mirror of
https://github.com/ansible-collections/community.general.git
synced 2026-05-07 05:42:50 +00:00
Refactor common network shared and platform utils code into package (#33452)
* Refactor common network shared and platform specific code into package (part-1) As per proposal #76 refactor common network shared and platform specific code into sub-package. https://github.com/ansible/proposals/issues/76 * ansible.module_utils.network.common - command shared functions * ansible.module_utils.network.{{ platform }} - where platform is platform specific shared functions * Fix review comments * Fix review comments
This commit is contained in:
0
lib/ansible/module_utils/network/__init__.py
Normal file
0
lib/ansible/module_utils/network/__init__.py
Normal file
0
lib/ansible/module_utils/network/a10/__init__.py
Normal file
0
lib/ansible/module_utils/network/a10/__init__.py
Normal file
153
lib/ansible/module_utils/network/a10/a10.py
Normal file
153
lib/ansible/module_utils/network/a10/a10.py
Normal file
@@ -0,0 +1,153 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
AXAPI_PORT_PROTOCOLS = {
|
||||
'tcp': 2,
|
||||
'udp': 3,
|
||||
}
|
||||
|
||||
AXAPI_VPORT_PROTOCOLS = {
|
||||
'tcp': 2,
|
||||
'udp': 3,
|
||||
'fast-http': 9,
|
||||
'http': 11,
|
||||
'https': 12,
|
||||
}
|
||||
|
||||
|
||||
def a10_argument_spec():
|
||||
return dict(
|
||||
host=dict(type='str', required=True),
|
||||
username=dict(type='str', aliases=['user', 'admin'], required=True),
|
||||
password=dict(type='str', aliases=['pass', 'pwd'], required=True, no_log=True),
|
||||
write_config=dict(type='bool', default=False)
|
||||
)
|
||||
|
||||
|
||||
def axapi_failure(result):
|
||||
if 'response' in result and result['response'].get('status') == 'fail':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def axapi_call(module, url, post=None):
|
||||
'''
|
||||
Returns a datastructure based on the result of the API call
|
||||
'''
|
||||
rsp, info = fetch_url(module, url, data=post)
|
||||
if not rsp or info['status'] >= 400:
|
||||
module.fail_json(msg="failed to connect (status code %s), error was %s" % (info['status'], info.get('msg', 'no error given')))
|
||||
try:
|
||||
raw_data = rsp.read()
|
||||
data = json.loads(raw_data)
|
||||
except ValueError:
|
||||
# at least one API call (system.action.write_config) returns
|
||||
# XML even when JSON is requested, so do some minimal handling
|
||||
# here to prevent failing even when the call succeeded
|
||||
if 'status="ok"' in raw_data.lower():
|
||||
data = {"response": {"status": "OK"}}
|
||||
else:
|
||||
data = {"response": {"status": "fail", "err": {"msg": raw_data}}}
|
||||
except:
|
||||
module.fail_json(msg="could not read the result from the host")
|
||||
finally:
|
||||
rsp.close()
|
||||
return data
|
||||
|
||||
|
||||
def axapi_authenticate(module, base_url, username, password):
|
||||
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
|
||||
result = axapi_call(module, url)
|
||||
if axapi_failure(result):
|
||||
return module.fail_json(msg=result['response']['err']['msg'])
|
||||
sessid = result['session_id']
|
||||
return base_url + '&session_id=' + sessid
|
||||
|
||||
|
||||
def axapi_authenticate_v3(module, base_url, username, password):
|
||||
url = base_url
|
||||
auth_payload = {"credentials": {"username": username, "password": password}}
|
||||
result = axapi_call_v3(module, url, method='POST', body=json.dumps(auth_payload))
|
||||
if axapi_failure(result):
|
||||
return module.fail_json(msg=result['response']['err']['msg'])
|
||||
signature = result['authresponse']['signature']
|
||||
return signature
|
||||
|
||||
|
||||
def axapi_call_v3(module, url, method=None, body=None, signature=None):
|
||||
'''
|
||||
Returns a datastructure based on the result of the API call
|
||||
'''
|
||||
if signature:
|
||||
headers = {'content-type': 'application/json', 'Authorization': 'A10 %s' % signature}
|
||||
else:
|
||||
headers = {'content-type': 'application/json'}
|
||||
rsp, info = fetch_url(module, url, method=method, data=body, headers=headers)
|
||||
if not rsp or info['status'] >= 400:
|
||||
module.fail_json(msg="failed to connect (status code %s), error was %s" % (info['status'], info.get('msg', 'no error given')))
|
||||
try:
|
||||
raw_data = rsp.read()
|
||||
data = json.loads(raw_data)
|
||||
except ValueError:
|
||||
# at least one API call (system.action.write_config) returns
|
||||
# XML even when JSON is requested, so do some minimal handling
|
||||
# here to prevent failing even when the call succeeded
|
||||
if 'status="ok"' in raw_data.lower():
|
||||
data = {"response": {"status": "OK"}}
|
||||
else:
|
||||
data = {"response": {"status": "fail", "err": {"msg": raw_data}}}
|
||||
except:
|
||||
module.fail_json(msg="could not read the result from the host")
|
||||
finally:
|
||||
rsp.close()
|
||||
return data
|
||||
|
||||
|
||||
def axapi_enabled_disabled(flag):
|
||||
'''
|
||||
The axapi uses 0/1 integer values for flags, rather than strings
|
||||
or booleans, so convert the given flag to a 0 or 1. For now, params
|
||||
are specified as strings only so thats what we check.
|
||||
'''
|
||||
if flag == 'enabled':
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def axapi_get_port_protocol(protocol):
|
||||
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
|
||||
|
||||
|
||||
def axapi_get_vport_protocol(protocol):
|
||||
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)
|
||||
0
lib/ansible/module_utils/network/aci/__init__.py
Normal file
0
lib/ansible/module_utils/network/aci/__init__.py
Normal file
758
lib/ansible/module_utils/network/aci/aci.py
Normal file
758
lib/ansible/module_utils/network/aci/aci.py
Normal file
@@ -0,0 +1,758 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This code is part of Ansible, but is an independent component
|
||||
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
|
||||
# Copyright 2017 Dag Wieers <dag@wieers.com>
|
||||
# Copyright 2017 Swetha Chunduri (@schunduri)
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
# Optional, only used for XML payload
|
||||
try:
|
||||
import lxml.etree
|
||||
HAS_LXML_ETREE = True
|
||||
except ImportError:
|
||||
HAS_LXML_ETREE = False
|
||||
|
||||
# Optional, only used for XML payload
|
||||
try:
|
||||
from xmljson import cobra
|
||||
HAS_XMLJSON_COBRA = True
|
||||
except ImportError:
|
||||
HAS_XMLJSON_COBRA = False
|
||||
|
||||
|
||||
aci_argument_spec = dict(
|
||||
hostname=dict(type='str', required=True, aliases=['host']),
|
||||
username=dict(type='str', default='admin', aliases=['user']),
|
||||
password=dict(type='str', required=True, no_log=True),
|
||||
protocol=dict(type='str', removed_in_version='2.6'), # Deprecated in v2.6
|
||||
timeout=dict(type='int', default=30),
|
||||
use_proxy=dict(type='bool', default=True),
|
||||
use_ssl=dict(type='bool', default=True),
|
||||
validate_certs=dict(type='bool', default=True),
|
||||
)
|
||||
|
||||
'''
|
||||
URL_MAPPING = dict(
|
||||
action_rule=dict(aci_class='rtctrlAttrP', mo='attr-', key='name'),
|
||||
aep=dict(aci_class='infraAttEntityP', mo='infra/attentp-', key='name'),
|
||||
ap=dict(aci_class='fvAp', mo='ap-', key='name'),
|
||||
bd=dict(aci_class='fvBD', mo='BD-', key='name'),
|
||||
bd_l3out=dict(aci_class='fvRsBDToOut', mo='rsBDToOut-', key='tnL3extOutName'),
|
||||
contract=dict(aci_class='vzBrCP', mo='brc-', key='name'),
|
||||
entry=dict(aci_class='vzEntry', mo='e-', key='name'),
|
||||
epg=dict(aci_class='fvAEPg', mo='epg-', key='name'),
|
||||
epg_consumer=dict(aci_class='fvRsCons', mo='rscons-', key='tnVzBrCPName'),
|
||||
epg_domain=dict(aci_class='fvRsDomAtt', mo='rsdomAtt-', key='tDn'),
|
||||
epg_provider=dict(aci_class='fvRsProv', mo='rsprov-', key='tnVzBrCPName'),
|
||||
epr_policy=dict(aci_class='fvEpRetPol', mo='epRPol-', key='name'),
|
||||
export_policy=dict(aci_class='configExportP', mo='fabric/configexp-', key='name'),
|
||||
fc_policy=dict(aci_class='fcIfPol', mo='infra/fcIfPol-', key='name'),
|
||||
filter=dict(aci_class='vzFilter', mo='flt-', key='name'),
|
||||
gateway_addr=dict(aci_class='fvSubnet', mo='subnet-', key='ip'),
|
||||
import_policy=dict(aci_class='configImportP', mo='fabric/configimp-', key='name'),
|
||||
l2_policy=dict(aci_class='l2IfPol', mo='infra/l2IfP-', key='name'),
|
||||
lldp_policy=dict(aci_class='lldpIfPol', mo='infra/lldpIfP-', key='name'),
|
||||
mcp=dict(aci_class='mcpIfPol', mo='infra/mcpIfP-', key='name'),
|
||||
monitoring_policy=dict(aci_class='monEPGPol', mo='monepg-', key='name'),
|
||||
port_channel=dict(aci_class='lacpLagPol', mo='infra/lacplagp-', key='name'),
|
||||
port_security=dict(aci_class='l2PortSecurityPol', mo='infra/portsecurityP-', key='name'),
|
||||
rtp=dict(aci_class='l3extRouteTagPol', mo='rttag-', key='name'),
|
||||
snapshot=dict(aci_class='configSnapshot', mo='snapshot-', key='name'),
|
||||
snapshot_container=dict(aci_class='configSnapshotCont', mo='backupst/snapshots-', key='name'),
|
||||
subject=dict(aci_class='vzSubj', mo='subj-', key='name'),
|
||||
subject_filter=dict(aci_class='vzRsSubjFiltAtt', mo='rssubjFiltAtt-', key='tnVzFilterName'),
|
||||
taboo_contract=dict(aci_class='vzTaboo', mo='taboo-', key='name'),
|
||||
tenant=dict(aci_class='fvTenant', mo='tn-', key='name'),
|
||||
tenant_span_dst_grp=dict(aci_class='spanDestGrp', mo='destgrp-', key='name'),
|
||||
tenant_span_src_grp=dict(aci_class='spanSrcGrp', mo='srcgrp-', key='name'),
|
||||
tenant_span_src_grp_dst_grp=dict(aci_class='spanSpanLbl', mo='spanlbl-', key='name'),
|
||||
vrf=dict(aci_class='fvCtx', mo='ctx-', key='name'),
|
||||
)
|
||||
'''
|
||||
|
||||
|
||||
def aci_response_error(result):
|
||||
''' Set error information when found '''
|
||||
result['error_code'] = 0
|
||||
result['error_text'] = 'Success'
|
||||
# Handle possible APIC error information
|
||||
if result['totalCount'] != '0':
|
||||
try:
|
||||
result['error_code'] = result['imdata'][0]['error']['attributes']['code']
|
||||
result['error_text'] = result['imdata'][0]['error']['attributes']['text']
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
|
||||
|
||||
def aci_response_json(result, rawoutput):
|
||||
''' Handle APIC JSON response output '''
|
||||
try:
|
||||
result.update(json.loads(rawoutput))
|
||||
except Exception as e:
|
||||
# Expose RAW output for troubleshooting
|
||||
result.update(raw=rawoutput, error_code=-1, error_text="Unable to parse output as JSON, see 'raw' output. %s" % e)
|
||||
return
|
||||
|
||||
# Handle possible APIC error information
|
||||
aci_response_error(result)
|
||||
|
||||
|
||||
def aci_response_xml(result, rawoutput):
|
||||
''' Handle APIC XML response output '''
|
||||
|
||||
# NOTE: The XML-to-JSON conversion is using the "Cobra" convention
|
||||
try:
|
||||
xml = lxml.etree.fromstring(to_bytes(rawoutput))
|
||||
xmldata = cobra.data(xml)
|
||||
except Exception as e:
|
||||
# Expose RAW output for troubleshooting
|
||||
result.update(raw=rawoutput, error_code=-1, error_text="Unable to parse output as XML, see 'raw' output. %s" % e)
|
||||
return
|
||||
|
||||
# Reformat as ACI does for JSON API output
|
||||
try:
|
||||
result.update(imdata=xmldata['imdata']['children'])
|
||||
except KeyError:
|
||||
result['imdata'] = dict()
|
||||
result['totalCount'] = xmldata['imdata']['attributes']['totalCount']
|
||||
|
||||
# Handle possible APIC error information
|
||||
aci_response_error(result)
|
||||
|
||||
|
||||
class ACIModule(object):
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.params = module.params
|
||||
self.result = dict(changed=False)
|
||||
self.headers = None
|
||||
|
||||
self.login()
|
||||
|
||||
def define_protocol(self):
|
||||
''' Set protocol based on use_ssl parameter '''
|
||||
|
||||
# Set protocol for further use
|
||||
if self.params['protocol'] in ('http', 'https'):
|
||||
self.module.deprecate("Parameter 'protocol' is deprecated, please use 'use_ssl' instead.", '2.6')
|
||||
elif self.params['protocol'] is None:
|
||||
self.params['protocol'] = 'https' if self.params.get('use_ssl', True) else 'http'
|
||||
else:
|
||||
self.module.fail_json(msg="Parameter 'protocol' needs to be one of ( http, https )")
|
||||
|
||||
def define_method(self):
|
||||
''' Set method based on state parameter '''
|
||||
|
||||
# Handle deprecated method/action parameter
|
||||
if self.params['method']:
|
||||
# Deprecate only if state was a valid option (not for aci_rest)
|
||||
if 'state' in self.module.argument_spec:
|
||||
self.module.deprecate("Parameter 'method' or 'action' is deprecated, please use 'state' instead", '2.6')
|
||||
method_map = dict(delete='absent', get='query', post='present')
|
||||
self.params['state'] = method_map[self.params['method']]
|
||||
else:
|
||||
state_map = dict(absent='delete', present='post', query='get')
|
||||
self.params['method'] = state_map[self.params['state']]
|
||||
|
||||
def login(self):
|
||||
''' Log in to APIC '''
|
||||
|
||||
# Ensure protocol is set (only do this once)
|
||||
self.define_protocol()
|
||||
|
||||
# Perform login request
|
||||
url = '%(protocol)s://%(hostname)s/api/aaaLogin.json' % self.params
|
||||
payload = {'aaaUser': {'attributes': {'name': self.params['username'], 'pwd': self.params['password']}}}
|
||||
resp, auth = fetch_url(self.module, url,
|
||||
data=json.dumps(payload),
|
||||
method='POST',
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
|
||||
# Handle APIC response
|
||||
if auth['status'] != 200:
|
||||
self.result['response'] = auth['msg']
|
||||
self.result['status'] = auth['status']
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, auth['body'])
|
||||
self.module.fail_json(msg='Authentication failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Authentication failed for %(url)s. %(msg)s' % auth)
|
||||
|
||||
# Retain cookie for later use
|
||||
self.headers = dict(Cookie=resp.headers['Set-Cookie'])
|
||||
|
||||
def request(self, path, payload=None):
|
||||
''' Perform a REST request '''
|
||||
|
||||
# Ensure method is set (only do this once)
|
||||
self.define_method()
|
||||
|
||||
# Perform request
|
||||
self.result['url'] = '%(protocol)s://%(hostname)s/' % self.params + path.lstrip('/')
|
||||
resp, info = fetch_url(self.module, self.result['url'],
|
||||
data=payload,
|
||||
headers=self.headers,
|
||||
method=self.params['method'].upper(),
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
|
||||
self.result['response'] = info['msg']
|
||||
self.result['status'] = info['status']
|
||||
|
||||
# Handle APIC response
|
||||
if info['status'] != 200:
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, info['body'])
|
||||
self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info)
|
||||
|
||||
aci_response_json(self.result, resp.read())
|
||||
|
||||
def query(self, path):
|
||||
''' Perform a query with no payload '''
|
||||
url = '%(protocol)s://%(hostname)s/' % self.params + path.lstrip('/')
|
||||
resp, query = fetch_url(self.module, url,
|
||||
data=None,
|
||||
headers=self.headers,
|
||||
method='GET',
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
|
||||
# Handle APIC response
|
||||
if query['status'] != 200:
|
||||
self.result['response'] = query['msg']
|
||||
self.result['status'] = query['status']
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, query['body'])
|
||||
self.module.fail_json(msg='Query failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Query failed for %(url)s. %(msg)s' % query)
|
||||
|
||||
query = json.loads(resp.read())
|
||||
|
||||
return json.dumps(query['imdata'], sort_keys=True, indent=2) + '\n'
|
||||
|
||||
def request_diff(self, path, payload=None):
|
||||
''' Perform a request, including a proper diff output '''
|
||||
self.result['diff'] = dict()
|
||||
self.result['diff']['before'] = self.query(path)
|
||||
self.request(path, payload=payload)
|
||||
# TODO: Check if we can use the request output for the 'after' diff
|
||||
self.result['diff']['after'] = self.query(path)
|
||||
|
||||
if self.result['diff']['before'] != self.result['diff']['after']:
|
||||
self.result['changed'] = True
|
||||
|
||||
def construct_url(self, root_class, subclass_1=None, subclass_2=None, subclass_3=None, child_classes=None):
|
||||
"""
|
||||
This method is used to retrieve the appropriate URL path and filter_string to make the request to the APIC.
|
||||
|
||||
:param root_class: The top-level class dictionary containing aci_class, aci_rn, filter_target, and module_object keys.
|
||||
:param sublass_1: The second-level class dictionary containing aci_class, aci_rn, filter_target, and module_object keys.
|
||||
:param sublass_2: The third-level class dictionary containing aci_class, aci_rn, filter_target, and module_object keys.
|
||||
:param sublass_3: The fourth-level class dictionary containing aci_class, aci_rn, filter_target, and module_object keys.
|
||||
:param child_classes: The list of child classes that the module supports along with the object.
|
||||
:type root_class: dict
|
||||
:type subclass_1: dict
|
||||
:type subclass_2: dict
|
||||
:type subclass_3: dict
|
||||
:type child_classes: list
|
||||
:return: The path and filter_string needed to build the full URL.
|
||||
"""
|
||||
if child_classes is None:
|
||||
child_includes = ''
|
||||
else:
|
||||
child_includes = ','.join(child_classes)
|
||||
child_includes = '&rsp-subtree=full&rsp-subtree-class=' + child_includes
|
||||
|
||||
if subclass_3 is not None:
|
||||
path, filter_string = self._construct_url_4(root_class, subclass_1, subclass_2, subclass_3, child_includes)
|
||||
elif subclass_2 is not None:
|
||||
path, filter_string = self._construct_url_3(root_class, subclass_1, subclass_2, child_includes)
|
||||
elif subclass_1 is not None:
|
||||
path, filter_string = self._construct_url_2(root_class, subclass_1, child_includes)
|
||||
else:
|
||||
path, filter_string = self._construct_url_1(root_class, child_includes)
|
||||
|
||||
self.result['url'] = '{}://{}/{}'.format(self.module.params['protocol'], self.module.params['hostname'], path)
|
||||
self.result['filter_string'] = filter_string
|
||||
|
||||
def _construct_url_1(self, obj, child_includes):
|
||||
"""
|
||||
This method is used by get_url when the object is the top-level class.
|
||||
"""
|
||||
obj_class = obj['aci_class']
|
||||
obj_rn = obj['aci_rn']
|
||||
|
||||
# State is present or absent
|
||||
if self.module.params['state'] != 'query':
|
||||
path = 'api/mo/uni/{}.json'.format(obj_rn)
|
||||
filter_string = '?rsp-prop-include=config-only' + child_includes
|
||||
# Query for all objects of the module's class
|
||||
elif obj is None:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = ''
|
||||
# Query for a specific object in the module's class
|
||||
else:
|
||||
path = 'api/mo/uni/{}.json'.format(obj_rn)
|
||||
filter_string = ''
|
||||
|
||||
# Append child_includes to filter_string if filter string is empty
|
||||
if child_includes is not None and filter_string == '':
|
||||
filter_string = child_includes.replace('&', '?', 1)
|
||||
|
||||
return path, filter_string
|
||||
|
||||
def _construct_url_2(self, parent, obj, child_includes):
|
||||
"""
|
||||
This method is used by get_url when the object is the second-level class.
|
||||
"""
|
||||
parent_rn = parent['aci_rn']
|
||||
parent_obj = parent['module_object']
|
||||
obj_class = obj['aci_class']
|
||||
obj_rn = obj['aci_rn']
|
||||
obj_filter = obj['filter_target']
|
||||
obj = obj['module_object']
|
||||
|
||||
if not child_includes:
|
||||
self_child_includes = '?rsp-subtree=full&rsp-subtree-class=' + obj_class
|
||||
else:
|
||||
self_child_includes = child_includes.replace('&', '?', 1) + ',' + obj_class
|
||||
|
||||
# State is present or absent
|
||||
if self.module.params['state'] != 'query':
|
||||
path = 'api/mo/uni/{}/{}.json'.format(parent_rn, obj_rn)
|
||||
filter_string = '?rsp-prop-include=config-only' + child_includes
|
||||
# Query for all objects of the module's class
|
||||
elif obj is None and parent_obj is None:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = ''
|
||||
# Queries when parent object is provided
|
||||
elif parent_obj is not None:
|
||||
# Query for specific object in the module's class
|
||||
if obj is not None:
|
||||
path = 'api/mo/uni/{}/{}.json'.format(parent_rn, obj_rn)
|
||||
filter_string = ''
|
||||
# Query for all object's of the module's class that belong to a specific parent object
|
||||
else:
|
||||
path = 'api/mo/uni/{}.json'.format(parent_rn)
|
||||
filter_string = self_child_includes
|
||||
# Query for all objects of the module's class that match the provided ID value
|
||||
else:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = '?query-target-filter=eq{}'.format(obj_filter) + child_includes
|
||||
|
||||
# Append child_includes to filter_string if filter string is empty
|
||||
if child_includes is not None and filter_string == '':
|
||||
filter_string = child_includes.replace('&', '?', 1)
|
||||
|
||||
return path, filter_string
|
||||
|
||||
def _construct_url_3(self, root, parent, obj, child_includes):
|
||||
"""
|
||||
This method is used by get_url when the object is the third-level class.
|
||||
"""
|
||||
root_rn = root['aci_rn']
|
||||
root_obj = root['module_object']
|
||||
parent_class = parent['aci_class']
|
||||
parent_rn = parent['aci_rn']
|
||||
parent_filter = parent['filter_target']
|
||||
parent_obj = parent['module_object']
|
||||
obj_class = obj['aci_class']
|
||||
obj_rn = obj['aci_rn']
|
||||
obj_filter = obj['filter_target']
|
||||
obj = obj['module_object']
|
||||
|
||||
if not child_includes:
|
||||
self_child_includes = '&rsp-subtree=full&rsp-subtree-class=' + obj_class
|
||||
else:
|
||||
self_child_includes = '{},{}'.format(child_includes, obj_class)
|
||||
|
||||
if not child_includes:
|
||||
parent_self_child_includes = '&rsp-subtree=full&rsp-subtree-class={},{}'.format(parent_class, obj_class)
|
||||
else:
|
||||
parent_self_child_includes = '{},{},{}'.format(child_includes, parent_class, obj_class)
|
||||
|
||||
# State is ablsent or present
|
||||
if self.module.params['state'] != 'query':
|
||||
path = 'api/mo/uni/{}/{}/{}.json'.format(root_rn, parent_rn, obj_rn)
|
||||
filter_string = '?rsp-prop-include=config-only' + child_includes
|
||||
# Query for all objects of the module's class
|
||||
elif obj is None and parent_obj is None and root_obj is None:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = ''
|
||||
# Queries when root object is provided
|
||||
elif root_obj is not None:
|
||||
# Queries when parent object is provided
|
||||
if parent_obj is not None:
|
||||
# Query for a specific object of the module's class
|
||||
if obj is not None:
|
||||
path = 'api/mo/uni/{}/{}/{}.json'.format(root_rn, parent_rn, obj_rn)
|
||||
filter_string = ''
|
||||
# Query for all objects of the module's class that belong to a specific parent object
|
||||
else:
|
||||
path = 'api/mo/uni/{}/{}.json'.format(root_rn, parent_rn)
|
||||
filter_string = self_child_includes.replace('&', '?', 1)
|
||||
# Query for all objects of the module's class that match the provided ID value and belong to a specefic root object
|
||||
elif obj is not None:
|
||||
path = 'api/mo/uni/{}.json'.format(root_rn)
|
||||
filter_string = '?rsp-subtree-filter=eq{}{}'.format(obj_filter, self_child_includes)
|
||||
# Query for all objects of the module's class that belong to a specific root object
|
||||
else:
|
||||
path = 'api/mo/uni/{}.json'.format(root_rn)
|
||||
filter_string = '?' + parent_self_child_includes
|
||||
# Queries when parent object is provided but root object is not provided
|
||||
elif parent_obj is not None:
|
||||
# Query for all objects of the module's class that belong to any parent class
|
||||
# matching the provided ID values for both object and parent object
|
||||
if obj is not None:
|
||||
path = 'api/class/{}.json'.format(parent_class)
|
||||
filter_string = '?query-target-filter=eq{}{}&rsp-subtree-filter=eq{}'.format(
|
||||
parent_filter, self_child_includes, obj_filter)
|
||||
# Query for all objects of the module's class that belong to any parent class
|
||||
# matching the provided ID value for the parent object
|
||||
else:
|
||||
path = 'api/class/{}.json'.format(parent_class)
|
||||
filter_string = '?query-target-filter=eq{}{}'.format(parent_filter, self_child_includes)
|
||||
# Query for all objects of the module's class matching the provided ID value of the object
|
||||
else:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = '?query-target-filter=eq{}'.format(obj_filter) + child_includes
|
||||
|
||||
# append child_includes to filter_string if filter string is empty
|
||||
if child_includes is not None and filter_string == '':
|
||||
filter_string = child_includes.replace('&', '?', 1)
|
||||
|
||||
return path, filter_string
|
||||
|
||||
def _construct_url_4(self, root, sec, parent, obj, child_includes):
|
||||
"""
|
||||
This method is used by get_url when the object is the third-level class.
|
||||
"""
|
||||
# root_class = root['aci_class']
|
||||
root_rn = root['aci_rn']
|
||||
# root_filter = root['filter_target']
|
||||
# root_obj = root['module_object']
|
||||
# sec_class = sec['aci_class']
|
||||
sec_rn = sec['aci_rn']
|
||||
# sec_filter = sec['filter_target']
|
||||
# sec_obj = sec['module_object']
|
||||
# parent_class = parent['aci_class']
|
||||
parent_rn = parent['aci_rn']
|
||||
# parent_filter = parent['filter_target']
|
||||
# parent_obj = parent['module_object']
|
||||
obj_class = obj['aci_class']
|
||||
obj_rn = obj['aci_rn']
|
||||
# obj_filter = obj['filter_target']
|
||||
# obj = obj['module_object']
|
||||
|
||||
# State is ablsent or present
|
||||
if self.module.params['state'] != 'query':
|
||||
path = 'api/mo/uni/{}/{}/{}/{}.json'.format(root_rn, sec_rn, parent_rn, obj_rn)
|
||||
filter_string = '?rsp-prop-include=config-only' + child_includes
|
||||
else:
|
||||
path = 'api/class/{}.json'.format(obj_class)
|
||||
filter_string = child_includes
|
||||
|
||||
return path, filter_string
|
||||
|
||||
def delete_config(self):
|
||||
"""
|
||||
This method is used to handle the logic when the modules state is equal to absent. The method only pushes a change if
|
||||
the object exists, and if check_mode is False. A successful change will mark the module as changed.
|
||||
"""
|
||||
self.result['proposed'] = {}
|
||||
|
||||
if not self.result['existing']:
|
||||
return
|
||||
|
||||
elif not self.module.check_mode:
|
||||
resp, info = fetch_url(self.module, self.result['url'],
|
||||
headers=self.headers,
|
||||
method='DELETE',
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
|
||||
self.result['response'] = info['msg']
|
||||
self.result['status'] = info['status']
|
||||
self.result['method'] = 'DELETE'
|
||||
|
||||
# Handle APIC response
|
||||
if info['status'] == 200:
|
||||
self.result['changed'] = True
|
||||
aci_response_json(self.result, resp.read())
|
||||
else:
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, info['body'])
|
||||
self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info)
|
||||
else:
|
||||
self.result['changed'] = True
|
||||
self.result['method'] = 'DELETE'
|
||||
|
||||
def get_diff(self, aci_class):
|
||||
"""
|
||||
This method is used to get the difference between the proposed and existing configurations. Each module
|
||||
should call the get_existing method before this method, and add the proposed config to the module results
|
||||
using the module's config parameters. The new config will added to the self.result dictionary.
|
||||
|
||||
:param aci_class: Type str.
|
||||
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
|
||||
"""
|
||||
proposed_config = self.result['proposed'][aci_class]['attributes']
|
||||
if self.result['existing']:
|
||||
existing_config = self.result['existing'][0][aci_class]['attributes']
|
||||
config = {}
|
||||
|
||||
# values are strings, so any diff between proposed and existing can be a straight replace
|
||||
for key, value in proposed_config.items():
|
||||
existing_field = existing_config.get(key)
|
||||
if value != existing_field:
|
||||
config[key] = value
|
||||
|
||||
# add name back to config only if the configs do not match
|
||||
if config:
|
||||
# TODO: If URLs are built with the object's name, then we should be able to leave off adding the name back
|
||||
# config["name"] = proposed_config["name"]
|
||||
config = {aci_class: {'attributes': config}}
|
||||
|
||||
# check for updates to child configs and update new config dictionary
|
||||
children = self.get_diff_children(aci_class)
|
||||
if children and config:
|
||||
config[aci_class].update({'children': children})
|
||||
elif children:
|
||||
config = {aci_class: {'attributes': {}, 'children': children}}
|
||||
|
||||
else:
|
||||
config = self.result['proposed']
|
||||
|
||||
self.result['config'] = config
|
||||
|
||||
@staticmethod
|
||||
def get_diff_child(child_class, proposed_child, existing_child):
|
||||
"""
|
||||
This method is used to get the difference between a proposed and existing child configs. The get_nested_config()
|
||||
method should be used to return the proposed and existing config portions of child.
|
||||
|
||||
:param child_class: Type str.
|
||||
The root class (dict key) for the child dictionary.
|
||||
:param proposed_child: Type dict.
|
||||
The config portion of the proposed child dictionary.
|
||||
:param existing_child: Type dict.
|
||||
The config portion of the existing child dictionary.
|
||||
:return: The child config with only values that are updated. If the proposed dictionary has no updates to make
|
||||
to what exists on the APIC, then None is returned.
|
||||
"""
|
||||
update_config = {child_class: {'attributes': {}}}
|
||||
for key, value in proposed_child.items():
|
||||
if value != existing_child[key]:
|
||||
update_config[child_class]['attributes'][key] = value
|
||||
|
||||
if not update_config[child_class]['attributes']:
|
||||
return None
|
||||
|
||||
return update_config
|
||||
|
||||
def get_diff_children(self, aci_class):
|
||||
"""
|
||||
This method is used to retrieve the updated child configs by comparing the proposed children configs
|
||||
agains the objects existing children configs.
|
||||
|
||||
:param aci_class: Type str.
|
||||
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
|
||||
:return: The list of updated child config dictionaries. None is returned if there are no changes to the child
|
||||
configurations.
|
||||
"""
|
||||
proposed_children = self.result['proposed'][aci_class].get('children')
|
||||
if proposed_children:
|
||||
child_updates = []
|
||||
existing_children = self.result['existing'][0][aci_class].get('children', [])
|
||||
|
||||
# Loop through proposed child configs and compare against existing child configuration
|
||||
for child in proposed_children:
|
||||
child_class, proposed_child, existing_child = self.get_nested_config(child, existing_children)
|
||||
|
||||
if existing_child is None:
|
||||
child_update = child
|
||||
else:
|
||||
child_update = self.get_diff_child(child_class, proposed_child, existing_child)
|
||||
|
||||
# Update list of updated child configs only if the child config is different than what exists
|
||||
if child_update:
|
||||
child_updates.append(child_update)
|
||||
else:
|
||||
return None
|
||||
|
||||
return child_updates
|
||||
|
||||
def get_existing(self):
|
||||
"""
|
||||
This method is used to get the existing object(s) based on the path specified in the module. Each module should
|
||||
build the URL so that if the object's name is supplied, then it will retrieve the configuration for that particular
|
||||
object, but if no name is supplied, then it will retrieve all MOs for the class. Following this method will ensure
|
||||
that this method can be used to supply the existing configuration when using the get_diff method. The response, status,
|
||||
and existing configuration will be added to the self.result dictionary.
|
||||
"""
|
||||
uri = self.result['url'] + self.result['filter_string']
|
||||
|
||||
resp, info = fetch_url(self.module, uri,
|
||||
headers=self.headers,
|
||||
method='GET',
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
self.result['response'] = info['msg']
|
||||
self.result['status'] = info['status']
|
||||
self.result['method'] = 'GET'
|
||||
|
||||
# Handle APIC response
|
||||
if info['status'] == 200:
|
||||
self.result['existing'] = json.loads(resp.read())['imdata']
|
||||
else:
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, info['body'])
|
||||
self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info)
|
||||
|
||||
@staticmethod
|
||||
def get_nested_config(proposed_child, existing_children):
|
||||
"""
|
||||
This method is used for stiping off the outer layers of the child dictionaries so only the configuration
|
||||
key, value pairs are returned.
|
||||
|
||||
:param proposed_child: Type dict.
|
||||
The dictionary that represents the child config.
|
||||
:param existing_children: Type list.
|
||||
The list of existing child config dictionaries.
|
||||
:return: The child's class as str (root config dict key), the child's proposed config dict, and the child's
|
||||
existing configuration dict.
|
||||
"""
|
||||
for key in proposed_child.keys():
|
||||
child_class = key
|
||||
proposed_config = proposed_child[key]['attributes']
|
||||
existing_config = None
|
||||
|
||||
# get existing dictionary from the list of existing to use for comparison
|
||||
for child in existing_children:
|
||||
if child.get(child_class):
|
||||
existing_config = child[key]['attributes']
|
||||
break
|
||||
|
||||
return child_class, proposed_config, existing_config
|
||||
|
||||
def payload(self, aci_class, class_config, child_configs=None):
|
||||
"""
|
||||
This method is used to dynamically build the proposed configuration dictionary from the config related parameters
|
||||
passed into the module. All values that were not passed values from the playbook task will be removed so as to not
|
||||
inadvertently change configurations.
|
||||
|
||||
:param aci_class: Type str
|
||||
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
|
||||
:param class_config: Type dict
|
||||
This is the configuration of the MO using the dictionary keys expected by the API
|
||||
:param child_configs: Type list
|
||||
This is a list of child dictionaries associated with the MOs config. The list should only
|
||||
include child objects that are used to associate two MOs together. Children that represent
|
||||
MOs should have their own module.
|
||||
"""
|
||||
proposed = dict((k, str(v)) for k, v in class_config.items() if v is not None)
|
||||
self.result['proposed'] = {aci_class: {'attributes': proposed}}
|
||||
|
||||
# add child objects to proposed
|
||||
if child_configs:
|
||||
children = []
|
||||
for child in child_configs:
|
||||
child_copy = child.copy()
|
||||
has_value = False
|
||||
for root_key in child_copy.keys():
|
||||
for final_keys, values in child_copy[root_key]['attributes'].items():
|
||||
if values is None:
|
||||
child[root_key]['attributes'].pop(final_keys)
|
||||
else:
|
||||
child[root_key]['attributes'][final_keys] = str(values)
|
||||
has_value = True
|
||||
if has_value:
|
||||
children.append(child)
|
||||
|
||||
if children:
|
||||
self.result['proposed'][aci_class].update(dict(children=children))
|
||||
|
||||
def post_config(self):
|
||||
"""
|
||||
This method is used to handle the logic when the modules state is equal to present. The method only pushes a change if
|
||||
the object has differences than what exists on the APIC, and if check_mode is False. A successful change will mark the
|
||||
module as changed.
|
||||
"""
|
||||
if not self.result['config']:
|
||||
return
|
||||
elif not self.module.check_mode:
|
||||
resp, info = fetch_url(self.module, self.result['url'],
|
||||
data=json.dumps(self.result['config']),
|
||||
headers=self.headers,
|
||||
method='POST',
|
||||
timeout=self.params['timeout'],
|
||||
use_proxy=self.params['use_proxy'])
|
||||
|
||||
self.result['response'] = info['msg']
|
||||
self.result['status'] = info['status']
|
||||
self.result['method'] = 'POST'
|
||||
|
||||
# Handle APIC response
|
||||
if info['status'] == 200:
|
||||
self.result['changed'] = True
|
||||
aci_response_json(self.result, resp.read())
|
||||
else:
|
||||
try:
|
||||
# APIC error
|
||||
aci_response_json(self.result, info['body'])
|
||||
self.module.fail_json(msg='Request failed: %(error_code)s %(error_text)s' % self.result, **self.result)
|
||||
except KeyError:
|
||||
# Connection error
|
||||
self.module.fail_json(msg='Request failed for %(url)s. %(msg)s' % info)
|
||||
else:
|
||||
self.result['changed'] = True
|
||||
self.result['method'] = 'POST'
|
||||
0
lib/ansible/module_utils/network/aireos/__init__.py
Normal file
0
lib/ansible/module_utils/network/aireos/__init__.py
Normal file
129
lib/ansible/module_utils/network/aireos/aireos.py
Normal file
129
lib/ansible/module_utils/network/aireos/aireos.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
aireos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
aireos_argument_spec = {
|
||||
'provider': dict(type='dict', options=aireos_provider_spec)
|
||||
}
|
||||
|
||||
aireos_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
aireos_argument_spec.update(aireos_top_spec)
|
||||
|
||||
|
||||
def sanitize(resp):
|
||||
# Takes response from device and strips whitespace from all lines
|
||||
# Aireos adds in extra preceding whitespace which netcfg parses as children/parents, which Aireos does not do
|
||||
# Aireos also adds in trailing whitespace that is unused
|
||||
cleaned = []
|
||||
for line in resp.splitlines():
|
||||
cleaned.append(line.strip())
|
||||
return '\n'.join(cleaned).strip()
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return aireos_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show run-config commands '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_then_replace'))
|
||||
cfg = sanitize(to_text(out, errors='surrogate_then_replace').strip())
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), rc=rc)
|
||||
responses.append(sanitize(to_text(out, errors='surrogate_then_replace')))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
|
||||
rc, out, err = exec_command(module, 'config')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
rc, out, err = exec_command(module, command)
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), command=command, rc=rc)
|
||||
|
||||
exec_command(module, 'end')
|
||||
0
lib/ansible/module_utils/network/aos/__init__.py
Normal file
0
lib/ansible/module_utils/network/aos/__init__.py
Normal file
180
lib/ansible/module_utils/network/aos/aos.py
Normal file
180
lib/ansible/module_utils/network/aos/aos.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#
|
||||
# Copyright (c) 2017 Apstra Inc, <community@apstra.com>
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
"""
|
||||
This module adds shared support for Apstra AOS modules
|
||||
|
||||
In order to use this module, include it as part of your module
|
||||
|
||||
from ansible.module_utils.network.aos.aos import (check_aos_version, get_aos_session, find_collection_item,
|
||||
content_to_dict, do_load_resource)
|
||||
|
||||
"""
|
||||
import json
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
try:
|
||||
import yaml
|
||||
HAS_YAML = True
|
||||
except ImportError:
|
||||
HAS_YAML = False
|
||||
|
||||
try:
|
||||
from apstra.aosom.session import Session
|
||||
|
||||
HAS_AOS_PYEZ = True
|
||||
except ImportError:
|
||||
HAS_AOS_PYEZ = False
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
def check_aos_version(module, min=False):
|
||||
"""
|
||||
Check if the library aos-pyez is present.
|
||||
If provided, also check if the minimum version requirement is met
|
||||
"""
|
||||
if not HAS_AOS_PYEZ:
|
||||
module.fail_json(msg='aos-pyez is not installed. Please see details '
|
||||
'here: https://github.com/Apstra/aos-pyez')
|
||||
|
||||
elif min:
|
||||
import apstra.aosom
|
||||
AOS_PYEZ_VERSION = apstra.aosom.__version__
|
||||
|
||||
if not LooseVersion(AOS_PYEZ_VERSION) >= LooseVersion(min):
|
||||
module.fail_json(msg='aos-pyez >= %s is required for this module' % min)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_aos_session(module, auth):
|
||||
"""
|
||||
Resume an existing session and return an AOS object.
|
||||
|
||||
Args:
|
||||
auth (dict): An AOS session as obtained by aos_login module blocks::
|
||||
|
||||
dict( token=<token>,
|
||||
server=<ip>,
|
||||
port=<port>
|
||||
)
|
||||
|
||||
Return:
|
||||
Aos object
|
||||
"""
|
||||
|
||||
check_aos_version(module)
|
||||
|
||||
aos = Session()
|
||||
aos.session = auth
|
||||
|
||||
return aos
|
||||
|
||||
|
||||
def find_collection_item(collection, item_name=False, item_id=False):
|
||||
"""
|
||||
Find collection_item based on name or id from a collection object
|
||||
Both Collection_item and Collection Objects are provided by aos-pyez library
|
||||
|
||||
Return
|
||||
collection_item: object corresponding to the collection type
|
||||
"""
|
||||
my_dict = None
|
||||
|
||||
if item_name:
|
||||
my_dict = collection.find(label=item_name)
|
||||
elif item_id:
|
||||
my_dict = collection.find(uid=item_id)
|
||||
|
||||
if my_dict is None:
|
||||
return collection['']
|
||||
else:
|
||||
return my_dict
|
||||
|
||||
|
||||
def content_to_dict(module, content):
|
||||
"""
|
||||
Convert 'content' into a Python Dict based on 'content_format'
|
||||
"""
|
||||
|
||||
# if not HAS_YAML:
|
||||
# module.fail_json(msg="Python Library Yaml is not present, mandatory to use 'content'")
|
||||
|
||||
content_dict = None
|
||||
|
||||
# try:
|
||||
# content_dict = json.loads(content.replace("\'", '"'))
|
||||
# except:
|
||||
# module.fail_json(msg="Unable to convert 'content' from JSON, please check if valid")
|
||||
#
|
||||
# elif format in ['yaml', 'var']:
|
||||
|
||||
try:
|
||||
content_dict = yaml.safe_load(content)
|
||||
|
||||
if not isinstance(content_dict, dict):
|
||||
raise Exception()
|
||||
|
||||
# Check if dict is empty and return an error if it's
|
||||
if not content_dict:
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
module.fail_json(msg="Unable to convert 'content' to a dict, please check if valid")
|
||||
|
||||
# replace the string with the dict
|
||||
module.params['content'] = content_dict
|
||||
|
||||
return content_dict
|
||||
|
||||
|
||||
def do_load_resource(module, collection, name):
|
||||
"""
|
||||
Create a new object (collection.item) by loading a datastructure directly
|
||||
"""
|
||||
|
||||
try:
|
||||
item = find_collection_item(collection, name, '')
|
||||
except:
|
||||
module.fail_json(msg="An error occurred while running 'find_collection_item'")
|
||||
|
||||
if item.exists:
|
||||
module.exit_json(changed=False, name=item.name, id=item.id, value=item.value)
|
||||
|
||||
# If not in check mode, apply the changes
|
||||
if not module.check_mode:
|
||||
try:
|
||||
item.datum = module.params['content']
|
||||
item.write()
|
||||
except Exception as e:
|
||||
module.fail_json(msg="Unable to write item content : %r" % to_native(e))
|
||||
|
||||
module.exit_json(changed=True, name=item.name, id=item.id, value=item.value)
|
||||
0
lib/ansible/module_utils/network/aruba/__init__.py
Normal file
0
lib/ansible/module_utils/network/aruba/__init__.py
Normal file
120
lib/ansible/module_utils/network/aruba/aruba.py
Normal file
120
lib/ansible/module_utils/network/aruba/aruba.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
aruba_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
aruba_argument_spec = {
|
||||
'provider': dict(type='dict', options=aruba_provider_spec)
|
||||
}
|
||||
|
||||
aruba_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
|
||||
aruba_argument_spec.update(aruba_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return aruba_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_then_replace'))
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
|
||||
rc, out, err = exec_command(module, 'configure terminal')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
rc, out, err = exec_command(module, command)
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), command=command, rc=rc)
|
||||
|
||||
exec_command(module, 'end')
|
||||
0
lib/ansible/module_utils/network/asa/__init__.py
Normal file
0
lib/ansible/module_utils/network/asa/__init__.py
Normal file
171
lib/ansible/module_utils/network/asa/asa.py
Normal file
171
lib/ansible/module_utils/network/asa/asa.py
Normal file
@@ -0,0 +1,171 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, EntityCollection
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.connection import Connection, ConnectionError
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
_CONNECTION = None
|
||||
|
||||
asa_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
'context': dict(),
|
||||
'passwords': dict()
|
||||
}
|
||||
|
||||
asa_argument_spec = {
|
||||
'provider': dict(type='dict', options=asa_provider_spec),
|
||||
}
|
||||
|
||||
asa_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'authorize': dict(type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
'context': dict(),
|
||||
'passwords': dict()
|
||||
}
|
||||
asa_argument_spec.update(asa_top_spec)
|
||||
|
||||
command_spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return asa_provider_spec
|
||||
|
||||
|
||||
def check_args(module):
|
||||
pass
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _CONNECTION
|
||||
if _CONNECTION:
|
||||
return _CONNECTION
|
||||
_CONNECTION = Connection(module._socket_path)
|
||||
|
||||
context = module.params['context']
|
||||
|
||||
if context:
|
||||
if context == 'system':
|
||||
command = 'changeto system'
|
||||
else:
|
||||
command = 'changeto context %s' % context
|
||||
_CONNECTION.get(command)
|
||||
|
||||
return _CONNECTION
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
if not isinstance(commands, list):
|
||||
raise AssertionError('argument must be of type <list>')
|
||||
|
||||
transform = EntityCollection(module, command_spec)
|
||||
commands = transform(commands)
|
||||
|
||||
for index, item in enumerate(commands):
|
||||
if module.check_mode and not item['command'].startswith('show'):
|
||||
module.warn('only show commands are supported when using check '
|
||||
'mode, not executing `%s`' % item['command'])
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
connection = get_connection(module)
|
||||
|
||||
commands = to_commands(module, to_list(commands))
|
||||
|
||||
responses = list()
|
||||
|
||||
for cmd in commands:
|
||||
out = connection.get(**cmd)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
passwords = module.params['passwords']
|
||||
if passwords:
|
||||
cmd = 'more system:running-config'
|
||||
else:
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
conn = get_connection(module)
|
||||
out = conn.get(cmd)
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def load_config(module, config):
|
||||
try:
|
||||
conn = get_connection(module)
|
||||
conn.edit_config(config)
|
||||
except ConnectionError as exc:
|
||||
module.fail_json(msg=to_text(exc))
|
||||
|
||||
|
||||
def get_defaults_flag(module):
|
||||
rc, out, err = exec_command(module, 'show running-config ?')
|
||||
out = to_text(out, errors='surrogate_then_replace')
|
||||
|
||||
commands = set()
|
||||
for line in out.splitlines():
|
||||
if line:
|
||||
commands.add(line.strip().split()[0])
|
||||
|
||||
if 'all' in commands:
|
||||
return 'all'
|
||||
else:
|
||||
return 'full'
|
||||
0
lib/ansible/module_utils/network/avi/__init__.py
Normal file
0
lib/ansible/module_utils/network/avi/__init__.py
Normal file
82
lib/ansible/module_utils/network/avi/avi.py
Normal file
82
lib/ansible/module_utils/network/avi/avi.py
Normal file
@@ -0,0 +1,82 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c), Gaurav Rastogi <grastogi@avinetworks.com>, 2017
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# This module initially matched the namespace of network module avi. However,
|
||||
# that causes namespace import error when other modules from avi namespaces
|
||||
# are imported. Added import of absolute_import to avoid import collisions for
|
||||
# avi.sdk.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
HAS_AVI = True
|
||||
try:
|
||||
import avi.sdk
|
||||
sdk_version = getattr(avi.sdk, '__version__', None)
|
||||
if ((sdk_version is None) or (sdk_version and (LooseVersion(sdk_version) < LooseVersion('17.1')))):
|
||||
# It allows the __version__ to be '' as that value is used in development builds
|
||||
raise ImportError
|
||||
from avi.sdk.utils.ansible_utils import avi_ansible_api
|
||||
except ImportError:
|
||||
HAS_AVI = False
|
||||
|
||||
|
||||
def avi_common_argument_spec():
|
||||
"""
|
||||
Returns common arguments for all Avi modules
|
||||
:return: dict
|
||||
"""
|
||||
return dict(
|
||||
controller=dict(default=os.environ.get('AVI_CONTROLLER', '')),
|
||||
username=dict(default=os.environ.get('AVI_USERNAME', '')),
|
||||
password=dict(default=os.environ.get('AVI_PASSWORD', ''), no_log=True),
|
||||
tenant=dict(default='admin'),
|
||||
tenant_uuid=dict(default=''),
|
||||
api_version=dict(default='16.4'))
|
||||
|
||||
|
||||
def ansible_return(module, rsp, changed, req=None, existing_obj=None):
|
||||
"""
|
||||
Helper function to return the right ansible return based on the error code and
|
||||
changed status.
|
||||
:param module: AnsibleModule
|
||||
:param rsp: ApiResponse object returned from ApiSession.
|
||||
:param changed: Whether something changed in this module.
|
||||
:param req: Dict data for Avi API call.
|
||||
:param existing_obj: Dict representing current HTTP resource in Avi Controller.
|
||||
|
||||
Returns: specific ansible module exit function
|
||||
"""
|
||||
if rsp.status_code > 299:
|
||||
return module.fail_json(msg='Error %d Msg %s req: %s' % (
|
||||
rsp.status_code, rsp.text, req))
|
||||
if changed and existing_obj:
|
||||
return module.exit_json(
|
||||
changed=changed, obj=rsp.json(), old_obj=existing_obj)
|
||||
return module.exit_json(changed=changed, obj=rsp.json())
|
||||
91
lib/ansible/module_utils/network/bigswitch/bigswitch.py
Normal file
91
lib/ansible/module_utils/network/bigswitch/bigswitch.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016, Ted Elhourani <ted@bigswitch.com>
|
||||
#
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
|
||||
class Response(object):
|
||||
|
||||
def __init__(self, resp, info):
|
||||
self.body = None
|
||||
if resp:
|
||||
self.body = resp.read()
|
||||
self.info = info
|
||||
|
||||
@property
|
||||
def json(self):
|
||||
if not self.body:
|
||||
if "body" in self.info:
|
||||
return json.loads(self.info["body"])
|
||||
return None
|
||||
try:
|
||||
return json.loads(self.body)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
@property
|
||||
def status_code(self):
|
||||
return self.info["status"]
|
||||
|
||||
|
||||
class Rest(object):
|
||||
|
||||
def __init__(self, module, headers, baseurl):
|
||||
self.module = module
|
||||
self.headers = headers
|
||||
self.baseurl = baseurl
|
||||
|
||||
def _url_builder(self, path):
|
||||
if path[0] == '/':
|
||||
path = path[1:]
|
||||
return '%s/%s' % (self.baseurl, path)
|
||||
|
||||
def send(self, method, path, data=None, headers=None):
|
||||
url = self._url_builder(path)
|
||||
data = self.module.jsonify(data)
|
||||
|
||||
resp, info = fetch_url(self.module, url, data=data, headers=self.headers, method=method)
|
||||
|
||||
return Response(resp, info)
|
||||
|
||||
def get(self, path, data=None, headers=None):
|
||||
return self.send('GET', path, data, headers)
|
||||
|
||||
def put(self, path, data=None, headers=None):
|
||||
return self.send('PUT', path, data, headers)
|
||||
|
||||
def post(self, path, data=None, headers=None):
|
||||
return self.send('POST', path, data, headers)
|
||||
|
||||
def patch(self, path, data=None, headers=None):
|
||||
return self.send('PATCH', path, data, headers)
|
||||
|
||||
def delete(self, path, data=None, headers=None):
|
||||
return self.send('DELETE', path, data, headers)
|
||||
474
lib/ansible/module_utils/network/cloudengine/ce.py
Normal file
474
lib/ansible/module_utils/network/cloudengine/ce.py
Normal file
@@ -0,0 +1,474 @@
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
#
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
try:
|
||||
from ncclient import manager, xml_
|
||||
from ncclient.operations.rpc import RPCError
|
||||
from ncclient.transport.errors import AuthenticationError
|
||||
from ncclient.operations.errors import TimeoutExpiredError
|
||||
HAS_NCCLIENT = True
|
||||
except ImportError:
|
||||
HAS_NCCLIENT = False
|
||||
|
||||
|
||||
_DEVICE_CLI_CONNECTION = None
|
||||
_DEVICE_NC_CONNECTION = None
|
||||
|
||||
ce_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'use_ssl': dict(type='bool'),
|
||||
'validate_certs': dict(type='bool'),
|
||||
'timeout': dict(type='int'),
|
||||
'transport': dict(default='cli', choices=['cli']),
|
||||
}
|
||||
ce_argument_spec = {
|
||||
'provider': dict(type='dict', options=ce_provider_spec),
|
||||
}
|
||||
ce_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'use_ssl': dict(removed_in_version=2.9, type='bool'),
|
||||
'validate_certs': dict(removed_in_version=2.9, type='bool'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
'transport': dict(removed_in_version=2.9, choices=['cli']),
|
||||
}
|
||||
ce_argument_spec.update(ce_top_spec)
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def load_params(module):
|
||||
provider = module.params.get('provider') or dict()
|
||||
for key, value in iteritems(provider):
|
||||
if key in ce_argument_spec:
|
||||
if module.params.get(key) is None and value is not None:
|
||||
module.params[key] = value
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _DEVICE_CLI_CONNECTION
|
||||
if not _DEVICE_CLI_CONNECTION:
|
||||
load_params(module)
|
||||
conn = Cli(module)
|
||||
_DEVICE_CLI_CONNECTION = conn
|
||||
return _DEVICE_CLI_CONNECTION
|
||||
|
||||
|
||||
def rm_config_prefix(cfg):
|
||||
if not cfg:
|
||||
return cfg
|
||||
|
||||
cmds = cfg.split("\n")
|
||||
for i in range(len(cmds)):
|
||||
if not cmds[i]:
|
||||
continue
|
||||
if '~' in cmds[i]:
|
||||
index = cmds[i].index('~')
|
||||
if cmds[i][:index] == ' ' * index:
|
||||
cmds[i] = cmds[i].replace("~", "", 1)
|
||||
return '\n'.join(cmds)
|
||||
|
||||
|
||||
class Cli:
|
||||
|
||||
def __init__(self, module):
|
||||
self._module = module
|
||||
self._device_configs = {}
|
||||
|
||||
def exec_command(self, command):
|
||||
if isinstance(command, dict):
|
||||
command = self._module.jsonify(command)
|
||||
|
||||
return exec_command(self._module, command)
|
||||
|
||||
def get_config(self, flags=None):
|
||||
"""Retrieves the current config from the device or cache
|
||||
"""
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'display current-configuration '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return self._device_configs[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg=err)
|
||||
cfg = str(out).strip()
|
||||
# remove default configuration prefix '~'
|
||||
for flag in flags:
|
||||
if "include-default" in flag:
|
||||
cfg = rm_config_prefix(cfg)
|
||||
break
|
||||
|
||||
self._device_configs[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
def run_commands(self, commands, check_rc=True):
|
||||
"""Run list of commands on remote device and return results
|
||||
"""
|
||||
responses = list()
|
||||
|
||||
for item in to_list(commands):
|
||||
cmd = item['command']
|
||||
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
|
||||
if check_rc and rc != 0:
|
||||
self._module.fail_json(msg=cli_err_msg(cmd.strip(), err))
|
||||
|
||||
try:
|
||||
out = self._module.from_json(out)
|
||||
except ValueError:
|
||||
out = str(out).strip()
|
||||
|
||||
responses.append(out)
|
||||
return responses
|
||||
|
||||
def load_config(self, config):
|
||||
"""Sends configuration commands to the remote device
|
||||
"""
|
||||
rc, out, err = self.exec_command('mmi-mode enable')
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg='unable to set mmi-mode enable', output=err)
|
||||
rc, out, err = self.exec_command('system-view immediately')
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg='unable to enter system-view', output=err)
|
||||
|
||||
for cmd in config:
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg=cli_err_msg(cmd.strip(), err))
|
||||
|
||||
self.exec_command('return')
|
||||
|
||||
|
||||
def cli_err_msg(cmd, err):
|
||||
""" get cli exception message"""
|
||||
|
||||
if not err:
|
||||
return "Error: Fail to get cli exception message."
|
||||
|
||||
msg = list()
|
||||
err_list = str(err).split("\r\n")
|
||||
for err in err_list:
|
||||
err = err.strip('.,\r\n\t ')
|
||||
if not err:
|
||||
continue
|
||||
if cmd and cmd == err:
|
||||
continue
|
||||
if " at '^' position" in err:
|
||||
err = err.replace(" at '^' position", "").strip()
|
||||
err = err.strip('.,\r\n\t ')
|
||||
if err == "^":
|
||||
continue
|
||||
if len(err) > 2 and err[0] in ["<", "["] and err[-1] in [">", "]"]:
|
||||
continue
|
||||
err.strip('.,\r\n\t ')
|
||||
if err:
|
||||
msg.append(err)
|
||||
|
||||
if cmd:
|
||||
msg.insert(0, "Command: %s" % cmd)
|
||||
|
||||
return ", ".join(msg).capitalize() + "."
|
||||
|
||||
|
||||
def to_command(module, commands):
|
||||
default_output = 'text'
|
||||
transform = ComplexList(dict(
|
||||
command=dict(key=True),
|
||||
output=dict(default=default_output),
|
||||
prompt=dict(),
|
||||
response=dict()
|
||||
), module)
|
||||
|
||||
commands = transform(to_list(commands))
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
conn = get_connection(module)
|
||||
return conn.get_config(flags)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
conn = get_connection(module)
|
||||
return conn.run_commands(to_command(module, commands), check_rc)
|
||||
|
||||
|
||||
def load_config(module, config):
|
||||
conn = get_connection(module)
|
||||
return conn.load_config(config)
|
||||
|
||||
|
||||
def ce_unknown_host_cb(host, fingerprint):
|
||||
""" ce_unknown_host_cb """
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_nc_set_id(xml_str):
|
||||
"""get netconf set-id value"""
|
||||
|
||||
result = re.findall(r'<rpc-reply.+?set-id=\"(\d+)\"', xml_str)
|
||||
if not result:
|
||||
return None
|
||||
return result[0]
|
||||
|
||||
|
||||
def get_xml_line(xml_list, index):
|
||||
"""get xml specified line valid string data"""
|
||||
|
||||
ele = None
|
||||
while xml_list and not ele:
|
||||
if index >= 0 and index >= len(xml_list):
|
||||
return None
|
||||
if index < 0 and abs(index) > len(xml_list):
|
||||
return None
|
||||
|
||||
ele = xml_list[index]
|
||||
if not ele.replace(" ", ""):
|
||||
xml_list.pop(index)
|
||||
ele = None
|
||||
return ele
|
||||
|
||||
|
||||
def merge_nc_xml(xml1, xml2):
|
||||
"""merge xml1 and xml2"""
|
||||
|
||||
xml1_list = xml1.split("</data>")[0].split("\n")
|
||||
xml2_list = xml2.split("<data>")[1].split("\n")
|
||||
|
||||
while True:
|
||||
xml1_ele1 = get_xml_line(xml1_list, -1)
|
||||
xml1_ele2 = get_xml_line(xml1_list, -2)
|
||||
xml2_ele1 = get_xml_line(xml2_list, 0)
|
||||
xml2_ele2 = get_xml_line(xml2_list, 1)
|
||||
if not xml1_ele1 or not xml1_ele2 or not xml2_ele1 or not xml2_ele2:
|
||||
return xml1
|
||||
|
||||
if "xmlns" in xml2_ele1:
|
||||
xml2_ele1 = xml2_ele1.lstrip().split(" ")[0] + ">"
|
||||
if "xmlns" in xml2_ele2:
|
||||
xml2_ele2 = xml2_ele2.lstrip().split(" ")[0] + ">"
|
||||
if xml1_ele1.replace(" ", "").replace("/", "") == xml2_ele1.replace(" ", "").replace("/", ""):
|
||||
if xml1_ele2.replace(" ", "").replace("/", "") == xml2_ele2.replace(" ", "").replace("/", ""):
|
||||
xml1_list.pop()
|
||||
xml2_list.pop(0)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
return "\n".join(xml1_list + xml2_list)
|
||||
|
||||
|
||||
class Netconf(object):
|
||||
""" Netconf """
|
||||
|
||||
def __init__(self, module):
|
||||
|
||||
self._module = module
|
||||
|
||||
if not HAS_NCCLIENT:
|
||||
self._module.fail_json(msg='Error: The ncclient library is required.')
|
||||
|
||||
try:
|
||||
self.mc = manager.connect(host=module.params["host"], port=module.params["port"],
|
||||
username=module.params["username"],
|
||||
password=module.params["password"],
|
||||
unknown_host_cb=ce_unknown_host_cb,
|
||||
allow_agent=False,
|
||||
look_for_keys=False,
|
||||
hostkey_verify=False,
|
||||
device_params={'name': 'huawei'},
|
||||
timeout=30)
|
||||
except AuthenticationError:
|
||||
self._module.fail_json(msg='Error: Authentication failed while connecting to device.')
|
||||
except Exception as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""),
|
||||
exception=traceback.format_exc())
|
||||
raise
|
||||
|
||||
def __del__(self):
|
||||
|
||||
self.mc.close_session()
|
||||
|
||||
def set_config(self, xml_str):
|
||||
""" set_config """
|
||||
|
||||
con_obj = None
|
||||
|
||||
try:
|
||||
con_obj = self.mc.edit_config(target='running', config=xml_str)
|
||||
except RPCError as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""))
|
||||
|
||||
return con_obj.xml
|
||||
|
||||
def get_config(self, xml_str):
|
||||
""" get_config """
|
||||
|
||||
con_obj = None
|
||||
try:
|
||||
con_obj = self.mc.get(filter=xml_str)
|
||||
except RPCError as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""))
|
||||
|
||||
set_id = get_nc_set_id(con_obj.xml)
|
||||
if not set_id:
|
||||
return con_obj.xml
|
||||
|
||||
# continue to get next
|
||||
xml_str = con_obj.xml
|
||||
while set_id:
|
||||
set_attr = dict()
|
||||
set_attr["set-id"] = str(set_id)
|
||||
xsd_fetch = xml_.new_ele_ns('get-next', "http://www.huawei.com/netconf/capability/base/1.0", set_attr)
|
||||
# get next data
|
||||
try:
|
||||
con_obj_next = self.mc.dispatch(xsd_fetch)
|
||||
except RPCError as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""))
|
||||
|
||||
if "<data/>" in con_obj_next.xml:
|
||||
break
|
||||
|
||||
# merge two xml data
|
||||
xml_str = merge_nc_xml(xml_str, con_obj_next.xml)
|
||||
set_id = get_nc_set_id(con_obj_next.xml)
|
||||
|
||||
return xml_str
|
||||
|
||||
def execute_action(self, xml_str):
|
||||
"""huawei execute-action"""
|
||||
|
||||
con_obj = None
|
||||
|
||||
try:
|
||||
con_obj = self.mc.action(action=xml_str)
|
||||
except RPCError as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""))
|
||||
except TimeoutExpiredError:
|
||||
raise
|
||||
|
||||
return con_obj.xml
|
||||
|
||||
def execute_cli(self, xml_str):
|
||||
"""huawei execute-cli"""
|
||||
|
||||
con_obj = None
|
||||
|
||||
try:
|
||||
con_obj = self.mc.cli(command=xml_str)
|
||||
except RPCError as err:
|
||||
self._module.fail_json(msg='Error: %s' % to_native(err).replace("\r\n", ""))
|
||||
|
||||
return con_obj.xml
|
||||
|
||||
|
||||
def get_nc_connection(module):
|
||||
global _DEVICE_NC_CONNECTION
|
||||
if not _DEVICE_NC_CONNECTION:
|
||||
load_params(module)
|
||||
conn = Netconf(module)
|
||||
_DEVICE_NC_CONNECTION = conn
|
||||
return _DEVICE_NC_CONNECTION
|
||||
|
||||
|
||||
def set_nc_config(module, xml_str):
|
||||
""" set_config """
|
||||
|
||||
conn = get_nc_connection(module)
|
||||
return conn.set_config(xml_str)
|
||||
|
||||
|
||||
def get_nc_config(module, xml_str):
|
||||
""" get_config """
|
||||
|
||||
conn = get_nc_connection(module)
|
||||
return conn.get_config(xml_str)
|
||||
|
||||
|
||||
def execute_nc_action(module, xml_str):
|
||||
""" huawei execute-action """
|
||||
|
||||
conn = get_nc_connection(module)
|
||||
return conn.execute_action(xml_str)
|
||||
|
||||
|
||||
def execute_nc_cli(module, xml_str):
|
||||
""" huawei execute-cli """
|
||||
|
||||
conn = get_nc_connection(module)
|
||||
return conn.execute_cli(xml_str)
|
||||
|
||||
|
||||
def check_ip_addr(ipaddr):
|
||||
""" check ip address, Supports IPv4 and IPv6 """
|
||||
|
||||
if not ipaddr or '\x00' in ipaddr:
|
||||
return False
|
||||
|
||||
try:
|
||||
res = socket.getaddrinfo(ipaddr, 0, socket.AF_UNSPEC,
|
||||
socket.SOCK_STREAM,
|
||||
0, socket.AI_NUMERICHOST)
|
||||
return bool(res)
|
||||
except socket.gaierror:
|
||||
err = sys.exc_info()[1]
|
||||
if err.args[0] == socket.EAI_NONAME:
|
||||
return False
|
||||
raise
|
||||
0
lib/ansible/module_utils/network/cnos/__init__.py
Normal file
0
lib/ansible/module_utils/network/cnos/__init__.py
Normal file
3516
lib/ansible/module_utils/network/cnos/cnos.py
Normal file
3516
lib/ansible/module_utils/network/cnos/cnos.py
Normal file
File diff suppressed because it is too large
Load Diff
1729
lib/ansible/module_utils/network/cnos/cnos_devicerules.py
Normal file
1729
lib/ansible/module_utils/network/cnos/cnos_devicerules.py
Normal file
File diff suppressed because it is too large
Load Diff
256
lib/ansible/module_utils/network/cnos/cnos_errorcodes.py
Normal file
256
lib/ansible/module_utils/network/cnos/cnos_errorcodes.py
Normal file
@@ -0,0 +1,256 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by
|
||||
# Ansible still belong to the author of the module, and may assign their own
|
||||
# license to the complete work.
|
||||
#
|
||||
# Copyright (C) 2017 Lenovo, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Contains error codes and methods
|
||||
# Lenovo Networking
|
||||
|
||||
errorDict = {0: 'Success',
|
||||
1: 'NOK',
|
||||
101: 'Device Response Timed out',
|
||||
102: 'Command Not supported - Use CLI command',
|
||||
103: 'Invalid Context',
|
||||
104: 'Command Value Not Supported as of Now. Use vlan Id only',
|
||||
105: 'Invalid interface Range',
|
||||
106: 'Please provide Enable Password.',
|
||||
108: '',
|
||||
109: '',
|
||||
110: 'Invalid protocol option',
|
||||
111: 'The Value is not Integer',
|
||||
112: 'The Value is not Float',
|
||||
113: 'Value is not in Range',
|
||||
114: 'Range value is not Integer',
|
||||
115: 'Value is not in Options',
|
||||
116: 'The Value is not Long',
|
||||
117: 'Range value is not Long',
|
||||
118: 'The Value cannot be empty',
|
||||
119: 'The Value is not String',
|
||||
120: 'The Value is not Matching',
|
||||
121: 'The Value is not IPV4 Address',
|
||||
122: 'The Value is not IPV6 Address',
|
||||
123: '',
|
||||
124: '',
|
||||
125: '',
|
||||
126: '',
|
||||
127: '',
|
||||
128: '',
|
||||
129: '',
|
||||
130: 'Invalid Access Map Name',
|
||||
131: 'Invalid Vlan Dot1q Tag',
|
||||
132: 'Invalid Vlan filter value',
|
||||
133: 'Invalid Vlan Range Value',
|
||||
134: 'Invalid Vlan Id',
|
||||
135: 'Invalid Vlan Access Map Action',
|
||||
136: 'Invalid Vlan Access Map Name',
|
||||
137: 'Invalid Access List',
|
||||
138: 'Invalid Vlan Access Map parameter',
|
||||
139: 'Invalid Vlan Name',
|
||||
140: 'Invalid Vlan Flood value,',
|
||||
141: 'Invalid Vlan State Value',
|
||||
142: 'Invalid Vlan Last Member query Interval',
|
||||
143: 'Invalid Querier IP address',
|
||||
144: 'Invalid Querier Time out',
|
||||
145: 'Invalid Query Interval',
|
||||
146: 'Invalid Vlan query max response time',
|
||||
147: 'Invalid vlan robustness variable',
|
||||
148: 'Invalid Vlan Startup Query count',
|
||||
149: 'Invalid vlan Startup Query Interval',
|
||||
150: 'Invalid Vlan snooping version',
|
||||
151: 'Invalid Vlan Ethernet Interface',
|
||||
152: 'Invalid Vlan Port Tag Number',
|
||||
153: 'Invalid mrouter option',
|
||||
154: 'Invalid Vlan Option',
|
||||
155: '',
|
||||
156: '',
|
||||
157: '',
|
||||
158: '',
|
||||
159: '',
|
||||
160: 'Invalid Vlag Auto Recovery Value',
|
||||
161: 'Invalid Vlag Config Consistency Value',
|
||||
162: 'Invalid Vlag Port Aggregation Number',
|
||||
163: 'Invalid Vlag Priority Value',
|
||||
164: 'Invalid Vlag Startup delay value',
|
||||
165: 'Invalid Vlag Trie Id',
|
||||
166: 'Invalid Vlag Instance Option',
|
||||
167: 'Invalid Vlag Keep Alive Attempts',
|
||||
168: 'Invalid Vlag Keep Alive Interval',
|
||||
169: 'Invalid Vlag Retry Interval',
|
||||
170: 'Invalid Vlag Peer Ip VRF Value',
|
||||
171: 'Invalid Vlag Health Check Options',
|
||||
172: 'Invalid Vlag Option',
|
||||
173: '',
|
||||
174: '',
|
||||
175: '',
|
||||
176: 'Invalid BGP As Number',
|
||||
177: 'Invalid Routing protocol option',
|
||||
178: 'Invalid BGP Address Family',
|
||||
179: 'Invalid AS Path options',
|
||||
180: 'Invalid BGP med options',
|
||||
181: 'Invalid Best Path option',
|
||||
182: 'Invalid BGP Local count number',
|
||||
183: 'Cluster Id has to either IP or AS Number',
|
||||
184: 'Invalid confederation identifier',
|
||||
185: 'Invalid Confederation Peer AS Value',
|
||||
186: 'Invalid Confederation Option',
|
||||
187: 'Invalid state path relay value',
|
||||
188: 'Invalid Maxas Limit AS Value',
|
||||
189: 'Invalid Neighbor IP Address or Neighbor AS Number',
|
||||
190: 'Invalid Router Id',
|
||||
191: 'Invalid BGP Keep Alive Interval',
|
||||
192: 'Invalid BGP Hold time',
|
||||
193: 'Invalid BGP Option',
|
||||
194: 'Invalid BGP Address Family option',
|
||||
195: 'Invalid BGP Address Family Redistribution option. ',
|
||||
196: 'Invalid BGP Address Family Route Map Name',
|
||||
197: 'Invalid Next Hop Critical Delay',
|
||||
198: 'Invalid Next Hop Non Critical Delay',
|
||||
199: 'Invalid Multipath Number Value',
|
||||
200: 'Invalid Aggegation Group Mode',
|
||||
201: 'Invalid Aggregation Group No',
|
||||
202: 'Invalid BFD Access Vlan',
|
||||
203: 'Invalid CFD Bridgeport Mode',
|
||||
204: 'Invalid Trunk Option',
|
||||
205: 'Invalid BFD Option',
|
||||
206: 'Invalid Portchannel description',
|
||||
207: 'Invalid Portchannel duplex option',
|
||||
208: 'Invalid Flow control option state',
|
||||
209: 'Invalid Flow control option',
|
||||
210: 'Invalid LACP Port priority',
|
||||
211: 'Invalid LACP Time out options',
|
||||
212: 'Invalid LACP Command options',
|
||||
213: 'Invalid LLDP TLV Option',
|
||||
214: 'Invalid LLDP Option',
|
||||
215: 'Invalid Load interval delay',
|
||||
216: 'Invalid Load interval Counter Number',
|
||||
217: 'Invalid Load Interval option',
|
||||
218: 'Invalid Mac Access Group Name',
|
||||
219: 'Invalid Mac Address',
|
||||
220: 'Invalid Microburst threshold value',
|
||||
221: 'Invalid MTU Value',
|
||||
222: 'Invalid Service instance value',
|
||||
223: 'Invalid service policy name',
|
||||
224: 'Invalid service policy options',
|
||||
225: 'Invalid Interface speed value',
|
||||
226: 'Invalid Storm control level value',
|
||||
227: 'Invalid Storm control option',
|
||||
228: 'Invalid Portchannel dot1q tag',
|
||||
229: 'Invalid VRRP Id Value',
|
||||
230: 'Invalid VRRP Options',
|
||||
231: 'Invalid portchannel source interface option',
|
||||
232: 'Invalid portchannel load balance options',
|
||||
233: 'Invalid Portchannel configuration attribute',
|
||||
234: 'Invalid BFD Interval Value',
|
||||
235: 'Invalid BFD minrx Value',
|
||||
236: 'Invalid BFD multiplier Value',
|
||||
237: 'Invalid Key Chain Value',
|
||||
238: 'Invalid key name option',
|
||||
239: 'Invalid key id value',
|
||||
240: 'Invalid Key Option',
|
||||
241: 'Invalid authentication option',
|
||||
242: 'Invalid destination Ip',
|
||||
243: 'Invalid source Ip',
|
||||
244: 'Invalid IP Option',
|
||||
245: 'Invalid Access group option',
|
||||
246: 'Invalid Access group name',
|
||||
247: 'Invalid ARP MacAddress Value',
|
||||
248: 'Invalid ARP timeout value',
|
||||
249: 'Invalid ARP Option',
|
||||
250: 'Invalid dhcp request option',
|
||||
251: 'Invalid dhcp Client option',
|
||||
252: 'Invalid relay Ip Address',
|
||||
253: 'Invalid dhcp Option',
|
||||
254: 'Invalid OSPF Option',
|
||||
255: 'Invalid OSPF Id IP Address Value',
|
||||
256: 'Invalid Ip Router Option',
|
||||
257: 'Invalid Spanning tree bpdufilter Options',
|
||||
258: 'Invalid Spanning tree bpduguard Options',
|
||||
259: 'Invalid Spanning tree cost Options',
|
||||
260: 'Invalid Spanning tree guard Options',
|
||||
261: 'Invalid Spanning tree link-type Options',
|
||||
262: 'Invalid Spanning tree link-type Options',
|
||||
263: 'Invalid Spanning tree options',
|
||||
264: 'Port-priority in increments of 32 is required',
|
||||
265: 'Invalid Spanning tree vlan options',
|
||||
266: 'Invalid IPv6 option',
|
||||
267: 'Invalid IPV6 neighbor IP Address',
|
||||
268: 'Invalid IPV6 neighbor mac address',
|
||||
269: 'Invalid IPV6 dhcp option',
|
||||
270: 'Invalid IPV6 relay address option',
|
||||
271: 'Invalid IPV6 Ethernet option',
|
||||
272: 'Invalid IPV6 Vlan option',
|
||||
273: 'Invalid IPV6 Link Local option',
|
||||
274: 'Invalid IPV6 dhcp option',
|
||||
275: 'Invalid IPV6 Address',
|
||||
276: 'Invalid IPV6 Address option',
|
||||
277: 'Invalid BFD neighbor options',
|
||||
278: 'Invalid Secondary option',
|
||||
289: 'Invalid PortChannel IPV4 address',
|
||||
290: 'Invalid Max Path Options',
|
||||
291: 'Invalid Distance Local Route value',
|
||||
292: 'Invalid Distance Internal AS value',
|
||||
293: 'Invalid Distance External AS value',
|
||||
294: 'Invalid BGP Reachability Half Life',
|
||||
295: 'Invalid BGP Dampening parameter',
|
||||
296: 'Invalid BGP Aggregate Prefix value',
|
||||
297: 'Invalid BGP Aggregate Prefix Option',
|
||||
298: 'Invalid BGP Address Family Route Map Name',
|
||||
299: 'Invalid BGP Net IP Mask Value',
|
||||
300: 'Invalid BGP Net IP Prefix Value',
|
||||
301: 'Invalid BGP Neighbor configuration option',
|
||||
302: 'Invalid BGP Neighbor Weight Value',
|
||||
303: 'Invalid Neigbor update source option',
|
||||
304: 'Invalid Ethernet slot/chassis number',
|
||||
305: 'Invalid Loopback Interface number',
|
||||
306: 'Invalid vlan id',
|
||||
307: 'Invalid Number of hops',
|
||||
308: 'Invalid Neighbor Keepalive interval',
|
||||
309: 'Invalid Neighbor timer hold time',
|
||||
310: 'Invalid neighbor password ',
|
||||
311: 'Invalid Max peer limit',
|
||||
312: 'Invalid Local AS Number',
|
||||
313: 'Invalid maximum hop count',
|
||||
314: 'Invalid neighbor description',
|
||||
315: 'Invalid Neighbor connect timer value',
|
||||
316: 'Invalid Neighbor address family option',
|
||||
317: 'Invalid neighbor address family option',
|
||||
318: 'Invalid route-map name',
|
||||
319: 'Invalid route-map',
|
||||
320: 'Invalid Name of a prefix list',
|
||||
321: 'Invalid Filter incoming option',
|
||||
322: 'Invalid AS path access-list name',
|
||||
323: 'Invalid Filter route option',
|
||||
324: 'Invalid route-map name',
|
||||
325: 'Invalid Number of occurrences of AS number',
|
||||
326: 'Invalid Prefix Limit'}
|
||||
|
||||
|
||||
def getErrorString(errorCode):
|
||||
retVal = errorDict[int(errorCode)]
|
||||
return retVal
|
||||
# EOM
|
||||
0
lib/ansible/module_utils/network/common/__init__.py
Normal file
0
lib/ansible/module_utils/network/common/__init__.py
Normal file
444
lib/ansible/module_utils/network/common/config.py
Normal file
444
lib/ansible/module_utils/network/common/config.py
Normal file
@@ -0,0 +1,444 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
import hashlib
|
||||
|
||||
from ansible.module_utils.six.moves import zip
|
||||
from ansible.module_utils._text import to_bytes, to_native
|
||||
from ansible.module_utils.network.common.utils import to_list
|
||||
|
||||
DEFAULT_COMMENT_TOKENS = ['#', '!', '/*', '*/', 'echo']
|
||||
|
||||
DEFAULT_IGNORE_LINES_RE = set([
|
||||
re.compile(r"Using \d+ out of \d+ bytes"),
|
||||
re.compile(r"Building configuration"),
|
||||
re.compile(r"Current configuration : \d+ bytes")
|
||||
])
|
||||
|
||||
|
||||
class ConfigLine(object):
|
||||
|
||||
def __init__(self, raw):
|
||||
self.text = str(raw).strip()
|
||||
self.raw = raw
|
||||
self._children = list()
|
||||
self._parents = list()
|
||||
|
||||
def __str__(self):
|
||||
return self.raw
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.line == other.line
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for item in self._children:
|
||||
if item.text == key:
|
||||
return item
|
||||
raise KeyError(key)
|
||||
|
||||
@property
|
||||
def line(self):
|
||||
line = self.parents
|
||||
line.append(self.text)
|
||||
return ' '.join(line)
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return _obj_to_text(self._children)
|
||||
|
||||
@property
|
||||
def child_objs(self):
|
||||
return self._children
|
||||
|
||||
@property
|
||||
def parents(self):
|
||||
return _obj_to_text(self._parents)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
config = _obj_to_raw(self._parents)
|
||||
config.append(self.raw)
|
||||
return '\n'.join(config)
|
||||
|
||||
@property
|
||||
def has_children(self):
|
||||
return len(self._children) > 0
|
||||
|
||||
@property
|
||||
def has_parents(self):
|
||||
return len(self._parents) > 0
|
||||
|
||||
def add_child(self, obj):
|
||||
if not isinstance(obj, ConfigLine):
|
||||
raise AssertionError('child must be of type `ConfigLine`')
|
||||
self._children.append(obj)
|
||||
|
||||
|
||||
def ignore_line(text, tokens=None):
|
||||
for item in (tokens or DEFAULT_COMMENT_TOKENS):
|
||||
if text.startswith(item):
|
||||
return True
|
||||
for regex in DEFAULT_IGNORE_LINES_RE:
|
||||
if regex.match(text):
|
||||
return True
|
||||
|
||||
|
||||
def _obj_to_text(x):
|
||||
return [o.text for o in x]
|
||||
|
||||
|
||||
def _obj_to_raw(x):
|
||||
return [o.raw for o in x]
|
||||
|
||||
|
||||
def _obj_to_block(objects, visited=None):
|
||||
items = list()
|
||||
for o in objects:
|
||||
if o not in items:
|
||||
items.append(o)
|
||||
for child in o._children:
|
||||
if child not in items:
|
||||
items.append(child)
|
||||
return _obj_to_raw(items)
|
||||
|
||||
|
||||
def dumps(objects, output='block', comments=False):
|
||||
if output == 'block':
|
||||
items = _obj_to_block(objects)
|
||||
elif output == 'commands':
|
||||
items = _obj_to_text(objects)
|
||||
else:
|
||||
raise TypeError('unknown value supplied for keyword output')
|
||||
|
||||
if output != 'commands':
|
||||
if comments:
|
||||
for index, item in enumerate(items):
|
||||
nextitem = index + 1
|
||||
if nextitem < len(items) and not item.startswith(' ') and items[nextitem].startswith(' '):
|
||||
item = '!\n%s' % item
|
||||
items[index] = item
|
||||
items.append('!')
|
||||
items.append('end')
|
||||
|
||||
return '\n'.join(items)
|
||||
|
||||
|
||||
class NetworkConfig(object):
|
||||
|
||||
def __init__(self, indent=1, contents=None, ignore_lines=None):
|
||||
self._indent = indent
|
||||
self._items = list()
|
||||
self._config_text = None
|
||||
|
||||
if ignore_lines:
|
||||
for item in ignore_lines:
|
||||
if not isinstance(item, re._pattern_type):
|
||||
item = re.compile(item)
|
||||
DEFAULT_IGNORE_LINES_RE.add(item)
|
||||
|
||||
if contents:
|
||||
self.load(contents)
|
||||
|
||||
@property
|
||||
def items(self):
|
||||
return self._items
|
||||
|
||||
@property
|
||||
def config_text(self):
|
||||
return self._config_text
|
||||
|
||||
@property
|
||||
def sha1(self):
|
||||
sha1 = hashlib.sha1()
|
||||
sha1.update(to_bytes(str(self), errors='surrogate_or_strict'))
|
||||
return sha1.digest()
|
||||
|
||||
def __getitem__(self, key):
|
||||
for line in self:
|
||||
if line.text == key:
|
||||
return line
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._items)
|
||||
|
||||
def __str__(self):
|
||||
return '\n'.join([c.raw for c in self.items])
|
||||
|
||||
def __len__(self):
|
||||
return len(self._items)
|
||||
|
||||
def load(self, s):
|
||||
self._config_text = s
|
||||
self._items = self.parse(s)
|
||||
|
||||
def loadfp(self, fp):
|
||||
return self.load(open(fp).read())
|
||||
|
||||
def parse(self, lines, comment_tokens=None):
|
||||
toplevel = re.compile(r'\S')
|
||||
childline = re.compile(r'^\s*(.+)$')
|
||||
entry_reg = re.compile(r'([{};])')
|
||||
|
||||
ancestors = list()
|
||||
config = list()
|
||||
|
||||
curlevel = 0
|
||||
prevlevel = 0
|
||||
|
||||
for linenum, line in enumerate(to_native(lines, errors='surrogate_or_strict').split('\n')):
|
||||
text = entry_reg.sub('', line).strip()
|
||||
|
||||
cfg = ConfigLine(line)
|
||||
|
||||
if not text or ignore_line(text, comment_tokens):
|
||||
continue
|
||||
|
||||
# handle top level commands
|
||||
if toplevel.match(line):
|
||||
ancestors = [cfg]
|
||||
prevlevel = curlevel
|
||||
curlevel = 0
|
||||
|
||||
# handle sub level commands
|
||||
else:
|
||||
match = childline.match(line)
|
||||
line_indent = match.start(1)
|
||||
|
||||
prevlevel = curlevel
|
||||
curlevel = int(line_indent / self._indent)
|
||||
|
||||
if (curlevel - 1) > prevlevel:
|
||||
curlevel = prevlevel + 1
|
||||
|
||||
parent_level = curlevel - 1
|
||||
|
||||
cfg._parents = ancestors[:curlevel]
|
||||
|
||||
if curlevel > len(ancestors):
|
||||
config.append(cfg)
|
||||
continue
|
||||
|
||||
for i in range(curlevel, len(ancestors)):
|
||||
ancestors.pop()
|
||||
|
||||
ancestors.append(cfg)
|
||||
ancestors[parent_level].add_child(cfg)
|
||||
|
||||
config.append(cfg)
|
||||
|
||||
return config
|
||||
|
||||
def get_object(self, path):
|
||||
for item in self.items:
|
||||
if item.text == path[-1]:
|
||||
if item.parents == path[:-1]:
|
||||
return item
|
||||
|
||||
def get_block(self, path):
|
||||
if not isinstance(path, list):
|
||||
raise AssertionError('path argument must be a list object')
|
||||
obj = self.get_object(path)
|
||||
if not obj:
|
||||
raise ValueError('path does not exist in config')
|
||||
return self._expand_block(obj)
|
||||
|
||||
def get_block_config(self, path):
|
||||
block = self.get_block(path)
|
||||
return dumps(block, 'block')
|
||||
|
||||
def _expand_block(self, configobj, S=None):
|
||||
if S is None:
|
||||
S = list()
|
||||
S.append(configobj)
|
||||
for child in configobj._children:
|
||||
if child in S:
|
||||
continue
|
||||
self._expand_block(child, S)
|
||||
return S
|
||||
|
||||
def _diff_line(self, other):
|
||||
updates = list()
|
||||
for item in self.items:
|
||||
if item not in other:
|
||||
updates.append(item)
|
||||
return updates
|
||||
|
||||
def _diff_strict(self, other):
|
||||
updates = list()
|
||||
for index, line in enumerate(self.items):
|
||||
try:
|
||||
if str(line).strip() != str(other[index]).strip():
|
||||
updates.append(line)
|
||||
except (AttributeError, IndexError):
|
||||
updates.append(line)
|
||||
return updates
|
||||
|
||||
def _diff_exact(self, other):
|
||||
updates = list()
|
||||
if len(other) != len(self.items):
|
||||
updates.extend(self.items)
|
||||
else:
|
||||
for ours, theirs in zip(self.items, other):
|
||||
if ours != theirs:
|
||||
updates.extend(self.items)
|
||||
break
|
||||
return updates
|
||||
|
||||
def difference(self, other, match='line', path=None, replace=None):
|
||||
"""Perform a config diff against the another network config
|
||||
|
||||
:param other: instance of NetworkConfig to diff against
|
||||
:param match: type of diff to perform. valid values are 'line',
|
||||
'strict', 'exact'
|
||||
:param path: context in the network config to filter the diff
|
||||
:param replace: the method used to generate the replacement lines.
|
||||
valid values are 'block', 'line'
|
||||
|
||||
:returns: a string of lines that are different
|
||||
"""
|
||||
if path and match != 'line':
|
||||
try:
|
||||
other = other.get_block(path)
|
||||
except ValueError:
|
||||
other = list()
|
||||
else:
|
||||
other = other.items
|
||||
|
||||
# generate a list of ConfigLines that aren't in other
|
||||
meth = getattr(self, '_diff_%s' % match)
|
||||
updates = meth(other)
|
||||
|
||||
if replace == 'block':
|
||||
parents = list()
|
||||
for item in updates:
|
||||
if not item.has_parents:
|
||||
parents.append(item)
|
||||
else:
|
||||
for p in item._parents:
|
||||
if p not in parents:
|
||||
parents.append(p)
|
||||
|
||||
updates = list()
|
||||
for item in parents:
|
||||
updates.extend(self._expand_block(item))
|
||||
|
||||
visited = set()
|
||||
expanded = list()
|
||||
|
||||
for item in updates:
|
||||
for p in item._parents:
|
||||
if p.line not in visited:
|
||||
visited.add(p.line)
|
||||
expanded.append(p)
|
||||
expanded.append(item)
|
||||
visited.add(item.line)
|
||||
|
||||
return expanded
|
||||
|
||||
def add(self, lines, parents=None):
|
||||
ancestors = list()
|
||||
offset = 0
|
||||
obj = None
|
||||
|
||||
# global config command
|
||||
if not parents:
|
||||
for line in lines:
|
||||
item = ConfigLine(line)
|
||||
item.raw = line
|
||||
if item not in self.items:
|
||||
self.items.append(item)
|
||||
|
||||
else:
|
||||
for index, p in enumerate(parents):
|
||||
try:
|
||||
i = index + 1
|
||||
obj = self.get_block(parents[:i])[0]
|
||||
ancestors.append(obj)
|
||||
|
||||
except ValueError:
|
||||
# add parent to config
|
||||
offset = index * self._indent
|
||||
obj = ConfigLine(p)
|
||||
obj.raw = p.rjust(len(p) + offset)
|
||||
if ancestors:
|
||||
obj._parents = list(ancestors)
|
||||
ancestors[-1]._children.append(obj)
|
||||
self.items.append(obj)
|
||||
ancestors.append(obj)
|
||||
|
||||
# add child objects
|
||||
for line in lines:
|
||||
# check if child already exists
|
||||
for child in ancestors[-1]._children:
|
||||
if child.text == line:
|
||||
break
|
||||
else:
|
||||
offset = len(parents) * self._indent
|
||||
item = ConfigLine(line)
|
||||
item.raw = line.rjust(len(line) + offset)
|
||||
item._parents = ancestors
|
||||
ancestors[-1]._children.append(item)
|
||||
self.items.append(item)
|
||||
|
||||
|
||||
class CustomNetworkConfig(NetworkConfig):
|
||||
|
||||
def items_text(self):
|
||||
return [item.text for item in self.items]
|
||||
|
||||
def expand_section(self, configobj, S=None):
|
||||
if S is None:
|
||||
S = list()
|
||||
S.append(configobj)
|
||||
for child in configobj.child_objs:
|
||||
if child in S:
|
||||
continue
|
||||
self.expand_section(child, S)
|
||||
return S
|
||||
|
||||
def to_block(self, section):
|
||||
return '\n'.join([item.raw for item in section])
|
||||
|
||||
def get_section(self, path):
|
||||
try:
|
||||
section = self.get_section_objects(path)
|
||||
return self.to_block(section)
|
||||
except ValueError:
|
||||
return list()
|
||||
|
||||
def get_section_objects(self, path):
|
||||
if not isinstance(path, list):
|
||||
path = [path]
|
||||
obj = self.get_object(path)
|
||||
if not obj:
|
||||
raise ValueError('path does not exist in config')
|
||||
return self.expand_section(obj)
|
||||
87
lib/ansible/module_utils/network/common/netconf.py
Normal file
87
lib/ansible/module_utils/network/common/netconf.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2017 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text, to_native
|
||||
from ansible.module_utils.connection import Connection, ConnectionError
|
||||
|
||||
try:
|
||||
from lxml.etree import Element, fromstring
|
||||
except ImportError:
|
||||
from xml.etree.ElementTree import Element, fromstring
|
||||
|
||||
NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"}
|
||||
|
||||
|
||||
def exec_rpc(module, *args, **kwargs):
|
||||
connection = NetconfConnection(module._socket_path)
|
||||
return connection.execute_rpc(*args, **kwargs)
|
||||
|
||||
|
||||
class NetconfConnection(Connection):
|
||||
|
||||
def __init__(self, socket_path):
|
||||
super(NetconfConnection, self).__init__(socket_path)
|
||||
|
||||
def __rpc__(self, name, *args, **kwargs):
|
||||
"""Executes the json-rpc and returns the output received
|
||||
from remote device.
|
||||
:name: rpc method to be executed over connection plugin that implements jsonrpc 2.0
|
||||
:args: Ordered list of params passed as arguments to rpc method
|
||||
:kwargs: Dict of valid key, value pairs passed as arguments to rpc method
|
||||
|
||||
For usage refer the respective connection plugin docs.
|
||||
"""
|
||||
self.check_rc = kwargs.pop('check_rc', True)
|
||||
self.ignore_warning = kwargs.pop('ignore_warning', True)
|
||||
|
||||
response = self._exec_jsonrpc(name, *args, **kwargs)
|
||||
if 'error' in response:
|
||||
rpc_error = response['error'].get('data')
|
||||
return self.parse_rpc_error(to_native(rpc_error, errors='surrogate_then_replace'))
|
||||
|
||||
return fromstring(to_native(response['result'], errors='surrogate_then_replace'))
|
||||
|
||||
def parse_rpc_error(self, rpc_error):
|
||||
if self.check_rc:
|
||||
error_root = fromstring(rpc_error)
|
||||
root = Element('root')
|
||||
root.append(error_root)
|
||||
|
||||
error_list = root.findall('.//nc:rpc-error', NS_MAP)
|
||||
if not error_list:
|
||||
raise ConnectionError(to_text(rpc_error, errors='surrogate_then_replace'))
|
||||
|
||||
warnings = []
|
||||
for error in error_list:
|
||||
message = error.find('./nc:error-message', NS_MAP).text
|
||||
severity = error.find('./nc:error-severity', NS_MAP).text
|
||||
|
||||
if severity == 'warning' and self.ignore_warning:
|
||||
warnings.append(message)
|
||||
else:
|
||||
raise ConnectionError(to_text(rpc_error, errors='surrogate_then_replace'))
|
||||
return warnings
|
||||
203
lib/ansible/module_utils/network/common/network.py
Normal file
203
lib/ansible/module_utils/network/common/network.py
Normal file
@@ -0,0 +1,203 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.network.common.parsing import Cli
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.six import iteritems
|
||||
|
||||
|
||||
NET_TRANSPORT_ARGS = dict(
|
||||
host=dict(required=True),
|
||||
port=dict(type='int'),
|
||||
|
||||
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
password=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD'])),
|
||||
ssh_keyfile=dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
|
||||
authorize=dict(default=False, fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
auth_pass=dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS'])),
|
||||
|
||||
provider=dict(type='dict', no_log=True),
|
||||
transport=dict(choices=list()),
|
||||
|
||||
timeout=dict(default=10, type='int')
|
||||
)
|
||||
|
||||
NET_CONNECTION_ARGS = dict()
|
||||
|
||||
NET_CONNECTIONS = dict()
|
||||
|
||||
|
||||
def _transitional_argument_spec():
|
||||
argument_spec = {}
|
||||
for key, value in iteritems(NET_TRANSPORT_ARGS):
|
||||
value['required'] = False
|
||||
argument_spec[key] = value
|
||||
return argument_spec
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
return list(val)
|
||||
elif val is not None:
|
||||
return [val]
|
||||
else:
|
||||
return list()
|
||||
|
||||
|
||||
class ModuleStub(object):
|
||||
def __init__(self, argument_spec, fail_json):
|
||||
self.params = dict()
|
||||
for key, value in argument_spec.items():
|
||||
self.params[key] = value.get('default')
|
||||
self.fail_json = fail_json
|
||||
|
||||
|
||||
class NetworkError(Exception):
|
||||
|
||||
def __init__(self, msg, **kwargs):
|
||||
super(NetworkError, self).__init__(msg)
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class Config(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def __call__(self, commands, **kwargs):
|
||||
lines = to_list(commands)
|
||||
return self.connection.configure(lines, **kwargs)
|
||||
|
||||
def load_config(self, commands, **kwargs):
|
||||
commands = to_list(commands)
|
||||
return self.connection.load_config(commands, **kwargs)
|
||||
|
||||
def get_config(self, **kwargs):
|
||||
return self.connection.get_config(**kwargs)
|
||||
|
||||
def save_config(self):
|
||||
return self.connection.save_config()
|
||||
|
||||
|
||||
class NetworkModule(AnsibleModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
connect_on_load = kwargs.pop('connect_on_load', True)
|
||||
|
||||
argument_spec = NET_TRANSPORT_ARGS.copy()
|
||||
argument_spec['transport']['choices'] = NET_CONNECTIONS.keys()
|
||||
argument_spec.update(NET_CONNECTION_ARGS.copy())
|
||||
|
||||
if kwargs.get('argument_spec'):
|
||||
argument_spec.update(kwargs['argument_spec'])
|
||||
kwargs['argument_spec'] = argument_spec
|
||||
|
||||
super(NetworkModule, self).__init__(*args, **kwargs)
|
||||
|
||||
self.connection = None
|
||||
self._cli = None
|
||||
self._config = None
|
||||
|
||||
try:
|
||||
transport = self.params['transport'] or '__default__'
|
||||
cls = NET_CONNECTIONS[transport]
|
||||
self.connection = cls()
|
||||
except KeyError:
|
||||
self.fail_json(msg='Unknown transport or no default transport specified')
|
||||
except (TypeError, NetworkError) as exc:
|
||||
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
|
||||
|
||||
if connect_on_load:
|
||||
self.connect()
|
||||
|
||||
@property
|
||||
def cli(self):
|
||||
if not self.connected:
|
||||
self.connect()
|
||||
if self._cli:
|
||||
return self._cli
|
||||
self._cli = Cli(self.connection)
|
||||
return self._cli
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
if not self.connected:
|
||||
self.connect()
|
||||
if self._config:
|
||||
return self._config
|
||||
self._config = Config(self.connection)
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def connected(self):
|
||||
return self.connection._connected
|
||||
|
||||
def _load_params(self):
|
||||
super(NetworkModule, self)._load_params()
|
||||
provider = self.params.get('provider') or dict()
|
||||
for key, value in provider.items():
|
||||
for args in [NET_TRANSPORT_ARGS, NET_CONNECTION_ARGS]:
|
||||
if key in args:
|
||||
if self.params.get(key) is None and value is not None:
|
||||
self.params[key] = value
|
||||
|
||||
def connect(self):
|
||||
try:
|
||||
if not self.connected:
|
||||
self.connection.connect(self.params)
|
||||
if self.params['authorize']:
|
||||
self.connection.authorize(self.params)
|
||||
self.log('connected to %s:%s using %s' % (self.params['host'],
|
||||
self.params['port'], self.params['transport']))
|
||||
except NetworkError as exc:
|
||||
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
|
||||
|
||||
def disconnect(self):
|
||||
try:
|
||||
if self.connected:
|
||||
self.connection.disconnect()
|
||||
self.log('disconnected from %s' % self.params['host'])
|
||||
except NetworkError as exc:
|
||||
self.fail_json(msg=to_native(exc), exception=traceback.format_exc())
|
||||
|
||||
|
||||
def register_transport(transport, default=False):
|
||||
def register(cls):
|
||||
NET_CONNECTIONS[transport] = cls
|
||||
if default:
|
||||
NET_CONNECTIONS['__default__'] = cls
|
||||
return cls
|
||||
return register
|
||||
|
||||
|
||||
def add_argument(key, value):
|
||||
NET_CONNECTION_ARGS[key] = value
|
||||
295
lib/ansible/module_utils/network/common/parsing.py
Normal file
295
lib/ansible/module_utils/network/common/parsing.py
Normal file
@@ -0,0 +1,295 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import re
|
||||
import shlex
|
||||
import time
|
||||
|
||||
from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE, BOOLEANS_FALSE
|
||||
from ansible.module_utils.six import string_types, text_type
|
||||
from ansible.module_utils.six.moves import zip
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple)):
|
||||
return list(val)
|
||||
elif val is not None:
|
||||
return [val]
|
||||
else:
|
||||
return list()
|
||||
|
||||
|
||||
class FailedConditionsError(Exception):
|
||||
def __init__(self, msg, failed_conditions):
|
||||
super(FailedConditionsError, self).__init__(msg)
|
||||
self.failed_conditions = failed_conditions
|
||||
|
||||
|
||||
class FailedConditionalError(Exception):
|
||||
def __init__(self, msg, failed_conditional):
|
||||
super(FailedConditionalError, self).__init__(msg)
|
||||
self.failed_conditional = failed_conditional
|
||||
|
||||
|
||||
class AddCommandError(Exception):
|
||||
def __init__(self, msg, command):
|
||||
super(AddCommandError, self).__init__(msg)
|
||||
self.command = command
|
||||
|
||||
|
||||
class AddConditionError(Exception):
|
||||
def __init__(self, msg, condition):
|
||||
super(AddConditionError, self).__init__(msg)
|
||||
self.condition = condition
|
||||
|
||||
|
||||
class Cli(object):
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
self.default_output = connection.default_output or 'text'
|
||||
self._commands = list()
|
||||
|
||||
@property
|
||||
def commands(self):
|
||||
return [str(c) for c in self._commands]
|
||||
|
||||
def __call__(self, commands, output=None):
|
||||
objects = list()
|
||||
for cmd in to_list(commands):
|
||||
objects.append(self.to_command(cmd, output))
|
||||
return self.connection.run_commands(objects)
|
||||
|
||||
def to_command(self, command, output=None, prompt=None, response=None, **kwargs):
|
||||
output = output or self.default_output
|
||||
if isinstance(command, Command):
|
||||
return command
|
||||
if isinstance(prompt, string_types):
|
||||
prompt = re.compile(re.escape(prompt))
|
||||
return Command(command, output, prompt=prompt, response=response, **kwargs)
|
||||
|
||||
def add_commands(self, commands, output=None, **kwargs):
|
||||
for cmd in commands:
|
||||
self._commands.append(self.to_command(cmd, output, **kwargs))
|
||||
|
||||
def run_commands(self):
|
||||
responses = self.connection.run_commands(self._commands)
|
||||
for resp, cmd in zip(responses, self._commands):
|
||||
cmd.response = resp
|
||||
|
||||
# wipe out the commands list to avoid issues if additional
|
||||
# commands are executed later
|
||||
self._commands = list()
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
class Command(object):
|
||||
|
||||
def __init__(self, command, output=None, prompt=None, response=None,
|
||||
**kwargs):
|
||||
|
||||
self.command = command
|
||||
self.output = output
|
||||
self.command_string = command
|
||||
|
||||
self.prompt = prompt
|
||||
self.response = response
|
||||
|
||||
self.args = kwargs
|
||||
|
||||
def __str__(self):
|
||||
return self.command_string
|
||||
|
||||
|
||||
class CommandRunner(object):
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
self.items = list()
|
||||
self.conditionals = set()
|
||||
|
||||
self.commands = list()
|
||||
|
||||
self.retries = 10
|
||||
self.interval = 1
|
||||
|
||||
self.match = 'all'
|
||||
|
||||
self._default_output = module.connection.default_output
|
||||
|
||||
def add_command(self, command, output=None, prompt=None, response=None,
|
||||
**kwargs):
|
||||
if command in [str(c) for c in self.commands]:
|
||||
raise AddCommandError('duplicated command detected', command=command)
|
||||
cmd = self.module.cli.to_command(command, output=output, prompt=prompt,
|
||||
response=response, **kwargs)
|
||||
self.commands.append(cmd)
|
||||
|
||||
def get_command(self, command, output=None):
|
||||
for cmd in self.commands:
|
||||
if cmd.command == command:
|
||||
return cmd.response
|
||||
raise ValueError("command '%s' not found" % command)
|
||||
|
||||
def get_responses(self):
|
||||
return [cmd.response for cmd in self.commands]
|
||||
|
||||
def add_conditional(self, condition):
|
||||
try:
|
||||
self.conditionals.add(Conditional(condition))
|
||||
except AttributeError as exc:
|
||||
raise AddConditionError(msg=str(exc), condition=condition)
|
||||
|
||||
def run(self):
|
||||
while self.retries > 0:
|
||||
self.module.cli.add_commands(self.commands)
|
||||
responses = self.module.cli.run_commands()
|
||||
|
||||
for item in list(self.conditionals):
|
||||
if item(responses):
|
||||
if self.match == 'any':
|
||||
return item
|
||||
self.conditionals.remove(item)
|
||||
|
||||
if not self.conditionals:
|
||||
break
|
||||
|
||||
time.sleep(self.interval)
|
||||
self.retries -= 1
|
||||
else:
|
||||
failed_conditions = [item.raw for item in self.conditionals]
|
||||
errmsg = 'One or more conditional statements have not been satisfied'
|
||||
raise FailedConditionsError(errmsg, failed_conditions)
|
||||
|
||||
|
||||
class Conditional(object):
|
||||
"""Used in command modules to evaluate waitfor conditions
|
||||
"""
|
||||
|
||||
OPERATORS = {
|
||||
'eq': ['eq', '=='],
|
||||
'neq': ['neq', 'ne', '!='],
|
||||
'gt': ['gt', '>'],
|
||||
'ge': ['ge', '>='],
|
||||
'lt': ['lt', '<'],
|
||||
'le': ['le', '<='],
|
||||
'contains': ['contains'],
|
||||
'matches': ['matches']
|
||||
}
|
||||
|
||||
def __init__(self, conditional, encoding=None):
|
||||
self.raw = conditional
|
||||
|
||||
try:
|
||||
key, op, val = shlex.split(conditional)
|
||||
except ValueError:
|
||||
raise ValueError('failed to parse conditional')
|
||||
|
||||
self.key = key
|
||||
self.func = self._func(op)
|
||||
self.value = self._cast_value(val)
|
||||
|
||||
def __call__(self, data):
|
||||
value = self.get_value(dict(result=data))
|
||||
return self.func(value)
|
||||
|
||||
def _cast_value(self, value):
|
||||
if value in BOOLEANS_TRUE:
|
||||
return True
|
||||
elif value in BOOLEANS_FALSE:
|
||||
return False
|
||||
elif re.match(r'^\d+\.d+$', value):
|
||||
return float(value)
|
||||
elif re.match(r'^\d+$', value):
|
||||
return int(value)
|
||||
else:
|
||||
return text_type(value)
|
||||
|
||||
def _func(self, oper):
|
||||
for func, operators in self.OPERATORS.items():
|
||||
if oper in operators:
|
||||
return getattr(self, func)
|
||||
raise AttributeError('unknown operator: %s' % oper)
|
||||
|
||||
def get_value(self, result):
|
||||
try:
|
||||
return self.get_json(result)
|
||||
except (IndexError, TypeError, AttributeError):
|
||||
msg = 'unable to apply conditional to result'
|
||||
raise FailedConditionalError(msg, self.raw)
|
||||
|
||||
def get_json(self, result):
|
||||
string = re.sub(r"\[[\'|\"]", ".", self.key)
|
||||
string = re.sub(r"[\'|\"]\]", ".", string)
|
||||
parts = re.split(r'\.(?=[^\]]*(?:\[|$))', string)
|
||||
for part in parts:
|
||||
match = re.findall(r'\[(\S+?)\]', part)
|
||||
if match:
|
||||
key = part[:part.find('[')]
|
||||
result = result[key]
|
||||
for m in match:
|
||||
try:
|
||||
m = int(m)
|
||||
except ValueError:
|
||||
m = str(m)
|
||||
result = result[m]
|
||||
else:
|
||||
result = result.get(part)
|
||||
return result
|
||||
|
||||
def number(self, value):
|
||||
if '.' in str(value):
|
||||
return float(value)
|
||||
else:
|
||||
return int(value)
|
||||
|
||||
def eq(self, value):
|
||||
return value == self.value
|
||||
|
||||
def neq(self, value):
|
||||
return value != self.value
|
||||
|
||||
def gt(self, value):
|
||||
return self.number(value) > self.value
|
||||
|
||||
def ge(self, value):
|
||||
return self.number(value) >= self.value
|
||||
|
||||
def lt(self, value):
|
||||
return self.number(value) < self.value
|
||||
|
||||
def le(self, value):
|
||||
return self.number(value) <= self.value
|
||||
|
||||
def contains(self, value):
|
||||
return str(self.value) in value
|
||||
|
||||
def matches(self, value):
|
||||
match = re.search(self.value, value, re.M)
|
||||
return match is not None
|
||||
428
lib/ansible/module_utils/network/common/utils.py
Normal file
428
lib/ansible/module_utils/network/common/utils.py
Normal file
@@ -0,0 +1,428 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
import ast
|
||||
import operator
|
||||
import socket
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
from ansible.module_utils.basic import AnsibleFallbackNotFound
|
||||
|
||||
try:
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
HAS_JINJA2 = True
|
||||
except ImportError:
|
||||
HAS_JINJA2 = False
|
||||
|
||||
|
||||
OPERATORS = frozenset(['ge', 'gt', 'eq', 'neq', 'lt', 'le'])
|
||||
ALIASES = frozenset([('min', 'ge'), ('max', 'le'), ('exactly', 'eq'), ('neq', 'ne')])
|
||||
|
||||
|
||||
def to_list(val):
|
||||
if isinstance(val, (list, tuple, set)):
|
||||
return list(val)
|
||||
elif val is not None:
|
||||
return [val]
|
||||
else:
|
||||
return list()
|
||||
|
||||
|
||||
def sort_list(val):
|
||||
if isinstance(val, list):
|
||||
return sorted(val)
|
||||
return val
|
||||
|
||||
|
||||
class Entity(object):
|
||||
"""Transforms a dict to with an argument spec
|
||||
|
||||
This class will take a dict and apply an Ansible argument spec to the
|
||||
values. The resulting dict will contain all of the keys in the param
|
||||
with appropriate values set.
|
||||
|
||||
Example::
|
||||
|
||||
argument_spec = dict(
|
||||
command=dict(key=True),
|
||||
display=dict(default='text', choices=['text', 'json']),
|
||||
validate=dict(type='bool')
|
||||
)
|
||||
transform = Entity(module, argument_spec)
|
||||
value = dict(command='foo')
|
||||
result = transform(value)
|
||||
print result
|
||||
{'command': 'foo', 'display': 'text', 'validate': None}
|
||||
|
||||
Supported argument spec:
|
||||
* key - specifies how to map a single value to a dict
|
||||
* read_from - read and apply the argument_spec from the module
|
||||
* required - a value is required
|
||||
* type - type of value (uses AnsibleModule type checker)
|
||||
* fallback - implements fallback function
|
||||
* choices - set of valid options
|
||||
* default - default value
|
||||
"""
|
||||
|
||||
def __init__(self, module, attrs=None, args=None, keys=None, from_argspec=False):
|
||||
args = [] if args is None else args
|
||||
|
||||
self._attributes = attrs or {}
|
||||
self._module = module
|
||||
|
||||
for arg in args:
|
||||
self._attributes[arg] = dict()
|
||||
if from_argspec:
|
||||
self._attributes[arg]['read_from'] = arg
|
||||
if keys and arg in keys:
|
||||
self._attributes[arg]['key'] = True
|
||||
|
||||
self.attr_names = frozenset(self._attributes.keys())
|
||||
|
||||
_has_key = False
|
||||
|
||||
for name, attr in iteritems(self._attributes):
|
||||
if attr.get('read_from'):
|
||||
if attr['read_from'] not in self._module.argument_spec:
|
||||
module.fail_json(msg='argument %s does not exist' % attr['read_from'])
|
||||
spec = self._module.argument_spec.get(attr['read_from'])
|
||||
for key, value in iteritems(spec):
|
||||
if key not in attr:
|
||||
attr[key] = value
|
||||
|
||||
if attr.get('key'):
|
||||
if _has_key:
|
||||
module.fail_json(msg='only one key value can be specified')
|
||||
_has_key = True
|
||||
attr['required'] = True
|
||||
|
||||
def serialize(self):
|
||||
return self._attributes
|
||||
|
||||
def to_dict(self, value):
|
||||
obj = {}
|
||||
for name, attr in iteritems(self._attributes):
|
||||
if attr.get('key'):
|
||||
obj[name] = value
|
||||
else:
|
||||
obj[name] = attr.get('default')
|
||||
return obj
|
||||
|
||||
def __call__(self, value, strict=True):
|
||||
if not isinstance(value, dict):
|
||||
value = self.to_dict(value)
|
||||
|
||||
if strict:
|
||||
unknown = set(value).difference(self.attr_names)
|
||||
if unknown:
|
||||
self._module.fail_json(msg='invalid keys: %s' % ','.join(unknown))
|
||||
|
||||
for name, attr in iteritems(self._attributes):
|
||||
if value.get(name) is None:
|
||||
value[name] = attr.get('default')
|
||||
|
||||
if attr.get('fallback') and not value.get(name):
|
||||
fallback = attr.get('fallback', (None,))
|
||||
fallback_strategy = fallback[0]
|
||||
fallback_args = []
|
||||
fallback_kwargs = {}
|
||||
if fallback_strategy is not None:
|
||||
for item in fallback[1:]:
|
||||
if isinstance(item, dict):
|
||||
fallback_kwargs = item
|
||||
else:
|
||||
fallback_args = item
|
||||
try:
|
||||
value[name] = fallback_strategy(*fallback_args, **fallback_kwargs)
|
||||
except AnsibleFallbackNotFound:
|
||||
continue
|
||||
|
||||
if attr.get('required') and value.get(name) is None:
|
||||
self._module.fail_json(msg='missing required attribute %s' % name)
|
||||
|
||||
if 'choices' in attr:
|
||||
if value[name] not in attr['choices']:
|
||||
self._module.fail_json(msg='%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
|
||||
|
||||
if value[name] is not None:
|
||||
value_type = attr.get('type', 'str')
|
||||
type_checker = self._module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
|
||||
type_checker(value[name])
|
||||
elif value.get(name):
|
||||
value[name] = self._module.params[name]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class EntityCollection(Entity):
|
||||
"""Extends ```Entity``` to handle a list of dicts """
|
||||
|
||||
def __call__(self, iterable, strict=True):
|
||||
if iterable is None:
|
||||
iterable = [super(EntityCollection, self).__call__(self._module.params, strict)]
|
||||
|
||||
if not isinstance(iterable, (list, tuple)):
|
||||
self._module.fail_json(msg='value must be an iterable')
|
||||
|
||||
return [(super(EntityCollection, self).__call__(i, strict)) for i in iterable]
|
||||
|
||||
|
||||
# these two are for backwards compatibility and can be removed once all of the
|
||||
# modules that use them are updated
|
||||
class ComplexDict(Entity):
|
||||
def __init__(self, attrs, module, *args, **kwargs):
|
||||
super(ComplexDict, self).__init__(module, attrs, *args, **kwargs)
|
||||
|
||||
|
||||
class ComplexList(EntityCollection):
|
||||
def __init__(self, attrs, module, *args, **kwargs):
|
||||
super(ComplexList, self).__init__(module, attrs, *args, **kwargs)
|
||||
|
||||
|
||||
def dict_diff(base, comparable):
|
||||
""" Generate a dict object of differences
|
||||
|
||||
This function will compare two dict objects and return the difference
|
||||
between them as a dict object. For scalar values, the key will reflect
|
||||
the updated value. If the key does not exist in `comparable`, then then no
|
||||
key will be returned. For lists, the value in comparable will wholly replace
|
||||
the value in base for the key. For dicts, the returned value will only
|
||||
return keys that are different.
|
||||
|
||||
:param base: dict object to base the diff on
|
||||
:param comparable: dict object to compare against base
|
||||
|
||||
:returns: new dict object with differences
|
||||
"""
|
||||
if not isinstance(base, dict):
|
||||
raise AssertionError("`base` must be of type <dict>")
|
||||
if not isinstance(comparable, dict):
|
||||
raise AssertionError("`comparable` must be of type <dict>")
|
||||
|
||||
updates = dict()
|
||||
|
||||
for key, value in iteritems(base):
|
||||
if isinstance(value, dict):
|
||||
item = comparable.get(key)
|
||||
if item is not None:
|
||||
updates[key] = dict_diff(value, comparable[key])
|
||||
else:
|
||||
comparable_value = comparable.get(key)
|
||||
if comparable_value is not None:
|
||||
if sort_list(base[key]) != sort_list(comparable_value):
|
||||
updates[key] = comparable_value
|
||||
|
||||
for key in set(comparable.keys()).difference(base.keys()):
|
||||
updates[key] = comparable.get(key)
|
||||
|
||||
return updates
|
||||
|
||||
|
||||
def dict_merge(base, other):
|
||||
""" Return a new dict object that combines base and other
|
||||
|
||||
This will create a new dict object that is a combination of the key/value
|
||||
pairs from base and other. When both keys exist, the value will be
|
||||
selected from other. If the value is a list object, the two lists will
|
||||
be combined and duplicate entries removed.
|
||||
|
||||
:param base: dict object to serve as base
|
||||
:param other: dict object to combine with base
|
||||
|
||||
:returns: new combined dict object
|
||||
"""
|
||||
if not isinstance(base, dict):
|
||||
raise AssertionError("`base` must be of type <dict>")
|
||||
if not isinstance(other, dict):
|
||||
raise AssertionError("`other` must be of type <dict>")
|
||||
|
||||
combined = dict()
|
||||
|
||||
for key, value in iteritems(base):
|
||||
if isinstance(value, dict):
|
||||
if key in other:
|
||||
item = other.get(key)
|
||||
if item is not None:
|
||||
combined[key] = dict_merge(value, other[key])
|
||||
else:
|
||||
combined[key] = item
|
||||
else:
|
||||
combined[key] = value
|
||||
elif isinstance(value, list):
|
||||
if key in other:
|
||||
item = other.get(key)
|
||||
if item is not None:
|
||||
combined[key] = list(set(chain(value, item)))
|
||||
else:
|
||||
combined[key] = item
|
||||
else:
|
||||
combined[key] = value
|
||||
else:
|
||||
if key in other:
|
||||
other_value = other.get(key)
|
||||
if other_value is not None:
|
||||
if sort_list(base[key]) != sort_list(other_value):
|
||||
combined[key] = other_value
|
||||
else:
|
||||
combined[key] = value
|
||||
else:
|
||||
combined[key] = other_value
|
||||
else:
|
||||
combined[key] = value
|
||||
|
||||
for key in set(other.keys()).difference(base.keys()):
|
||||
combined[key] = other.get(key)
|
||||
|
||||
return combined
|
||||
|
||||
|
||||
def conditional(expr, val, cast=None):
|
||||
match = re.match(r'^(.+)\((.+)\)$', str(expr), re.I)
|
||||
if match:
|
||||
op, arg = match.groups()
|
||||
else:
|
||||
op = 'eq'
|
||||
if ' ' in str(expr):
|
||||
raise AssertionError('invalid expression: cannot contain spaces')
|
||||
arg = expr
|
||||
|
||||
if cast is None and val is not None:
|
||||
arg = type(val)(arg)
|
||||
elif callable(cast):
|
||||
arg = cast(arg)
|
||||
val = cast(val)
|
||||
|
||||
op = next((oper for alias, oper in ALIASES if op == alias), op)
|
||||
|
||||
if not hasattr(operator, op) and op not in OPERATORS:
|
||||
raise ValueError('unknown operator: %s' % op)
|
||||
|
||||
func = getattr(operator, op)
|
||||
return func(val, arg)
|
||||
|
||||
|
||||
def ternary(value, true_val, false_val):
|
||||
''' value ? true_val : false_val '''
|
||||
if value:
|
||||
return true_val
|
||||
else:
|
||||
return false_val
|
||||
|
||||
|
||||
def remove_default_spec(spec):
|
||||
for item in spec:
|
||||
if 'default' in spec[item]:
|
||||
del spec[item]['default']
|
||||
|
||||
|
||||
def validate_ip_address(address):
|
||||
try:
|
||||
socket.inet_aton(address)
|
||||
except socket.error:
|
||||
return False
|
||||
return address.count('.') == 3
|
||||
|
||||
|
||||
def validate_prefix(prefix):
|
||||
if prefix and not 0 <= int(prefix) <= 32:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def load_provider(spec, args):
|
||||
provider = args.get('provider', {})
|
||||
for key, value in iteritems(spec):
|
||||
if key not in provider:
|
||||
if key in args:
|
||||
provider[key] = args[key]
|
||||
elif 'fallback' in value:
|
||||
provider[key] = _fallback(value['fallback'])
|
||||
elif 'default' in value:
|
||||
provider[key] = value['default']
|
||||
else:
|
||||
provider[key] = None
|
||||
args['provider'] = provider
|
||||
return provider
|
||||
|
||||
|
||||
def _fallback(fallback):
|
||||
strategy = fallback[0]
|
||||
args = []
|
||||
kwargs = {}
|
||||
|
||||
for item in fallback[1:]:
|
||||
if isinstance(item, dict):
|
||||
kwargs = item
|
||||
else:
|
||||
args = item
|
||||
try:
|
||||
return strategy(*args, **kwargs)
|
||||
except AnsibleFallbackNotFound:
|
||||
pass
|
||||
|
||||
|
||||
class Template:
|
||||
|
||||
def __init__(self):
|
||||
if not HAS_JINJA2:
|
||||
raise ImportError("jinja2 is required but does not appear to be installed. "
|
||||
"It can be installed using `pip install jinja2`")
|
||||
|
||||
self.env = Environment(undefined=StrictUndefined)
|
||||
self.env.filters.update({'ternary': ternary})
|
||||
|
||||
def __call__(self, value, variables=None, fail_on_undefined=True):
|
||||
variables = variables or {}
|
||||
|
||||
if not self.contains_vars(value):
|
||||
return value
|
||||
|
||||
try:
|
||||
value = self.env.from_string(value).render(variables)
|
||||
except UndefinedError:
|
||||
if not fail_on_undefined:
|
||||
return None
|
||||
raise
|
||||
|
||||
if value:
|
||||
try:
|
||||
return ast.literal_eval(value)
|
||||
except:
|
||||
return str(value)
|
||||
else:
|
||||
return None
|
||||
|
||||
def contains_vars(self, data):
|
||||
if isinstance(data, string_types):
|
||||
for marker in (self.env.block_start_string, self.env.variable_start_string, self.env.comment_start_string):
|
||||
if marker in data:
|
||||
return True
|
||||
return False
|
||||
153
lib/ansible/module_utils/network/dellos10/dellos10.py
Normal file
153
lib/ansible/module_utils/network/dellos10/dellos10.py
Normal file
@@ -0,0 +1,153 @@
|
||||
#
|
||||
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
# (c) 2017 Red Hat, Inc
|
||||
#
|
||||
# Copyright (c) 2016 Dell Inc.
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.network.common.config import NetworkConfig, ConfigLine
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
WARNING_PROMPTS_RE = [
|
||||
r"[\r\n]?\[confirm yes/no\]:\s?$",
|
||||
r"[\r\n]?\[y/n\]:\s?$",
|
||||
r"[\r\n]?\[yes/no\]:\s?$"
|
||||
]
|
||||
|
||||
dellos10_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
dellos10_argument_spec = {
|
||||
'provider': dict(type='dict', options=dellos10_provider_spec),
|
||||
}
|
||||
dellos10_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'authorize': dict(removed_in_version=2.9, type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
dellos10_argument_spec.update(dellos10_top_spec)
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_or_strict'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
rc, out, err = exec_command(module, 'configure terminal')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(err, errors='surrogate_or_strict'))
|
||||
|
||||
commands.append('commit')
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
cmd = {'command': command, 'prompt': WARNING_PROMPTS_RE, 'answer': 'yes'}
|
||||
rc, out, err = exec_command(module, module.jsonify(cmd))
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), command=command, rc=rc)
|
||||
|
||||
exec_command(module, 'end')
|
||||
|
||||
|
||||
def get_sublevel_config(running_config, module):
|
||||
contents = list()
|
||||
current_config_contents = list()
|
||||
running_config = NetworkConfig(contents=running_config, indent=1)
|
||||
obj = running_config.get_object(module.params['parents'])
|
||||
if obj:
|
||||
contents = obj.children
|
||||
contents[:0] = module.params['parents']
|
||||
|
||||
indent = 0
|
||||
for c in contents:
|
||||
if isinstance(c, str):
|
||||
current_config_contents.append(c.rjust(len(c) + indent, ' '))
|
||||
if isinstance(c, ConfigLine):
|
||||
current_config_contents.append(c.raw)
|
||||
indent = 1
|
||||
sublevel_config = '\n'.join(current_config_contents)
|
||||
|
||||
return sublevel_config
|
||||
252
lib/ansible/module_utils/network/dellos6/dellos6.py
Normal file
252
lib/ansible/module_utils/network/dellos6/dellos6.py
Normal file
@@ -0,0 +1,252 @@
|
||||
#
|
||||
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
#
|
||||
# Copyright (c) 2016 Dell Inc.
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.network.common.config import NetworkConfig, ConfigLine, ignore_line, DEFAULT_COMMENT_TOKENS
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
WARNING_PROMPTS_RE = [
|
||||
r"[\r\n]?\[confirm yes/no\]:\s?$",
|
||||
r"[\r\n]?\[y/n\]:\s?$",
|
||||
r"[\r\n]?\[yes/no\]:\s?$"
|
||||
]
|
||||
|
||||
dellos6_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
dellos6_argument_spec = {
|
||||
'provider': dict(type='dict', options=dellos6_provider_spec),
|
||||
}
|
||||
dellos6_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'authorize': dict(removed_in_version=2.9, type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
dellos6_argument_spec.update(dellos6_top_spec)
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_or_strict'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
rc, out, err = exec_command(module, 'configure terminal')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(err, errors='surrogate_or_strict'))
|
||||
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
cmd = {'command': command, 'prompt': WARNING_PROMPTS_RE, 'answer': 'yes'}
|
||||
rc, out, err = exec_command(module, module.jsonify(cmd))
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), command=command, rc=rc)
|
||||
exec_command(module, 'end')
|
||||
|
||||
|
||||
def get_sublevel_config(running_config, module):
|
||||
contents = list()
|
||||
current_config_contents = list()
|
||||
sublevel_config = Dellos6NetworkConfig(indent=0)
|
||||
obj = running_config.get_object(module.params['parents'])
|
||||
if obj:
|
||||
contents = obj._children
|
||||
for c in contents:
|
||||
if isinstance(c, ConfigLine):
|
||||
current_config_contents.append(c.raw)
|
||||
sublevel_config.add(current_config_contents, module.params['parents'])
|
||||
return sublevel_config
|
||||
|
||||
|
||||
def os6_parse(lines, indent=None, comment_tokens=None):
|
||||
sublevel_cmds = [
|
||||
re.compile(r'^vlan.*$'),
|
||||
re.compile(r'^stack.*$'),
|
||||
re.compile(r'^interface.*$'),
|
||||
re.compile(r'datacenter-bridging.*$'),
|
||||
re.compile(r'line (console|telnet|ssh).*$'),
|
||||
re.compile(r'ip ssh !(server).*$'),
|
||||
re.compile(r'ip (dhcp|vrf).*$'),
|
||||
re.compile(r'(ip|mac|management|arp) access-list.*$'),
|
||||
re.compile(r'ipv6 (dhcp|router).*$'),
|
||||
re.compile(r'mail-server.*$'),
|
||||
re.compile(r'vpc domain.*$'),
|
||||
re.compile(r'router.*$'),
|
||||
re.compile(r'route-map.*$'),
|
||||
re.compile(r'policy-map.*$'),
|
||||
re.compile(r'class-map match-all.*$'),
|
||||
re.compile(r'captive-portal.*$'),
|
||||
re.compile(r'admin-profile.*$'),
|
||||
re.compile(r'link-dependency group.*$'),
|
||||
re.compile(r'banner motd.*$'),
|
||||
re.compile(r'openflow.*$'),
|
||||
re.compile(r'support-assist.*$'),
|
||||
re.compile(r'template.*$'),
|
||||
re.compile(r'address-family.*$'),
|
||||
re.compile(r'spanning-tree mst configuration.*$'),
|
||||
re.compile(r'logging.*$'),
|
||||
re.compile(r'(radius-server|tacacs-server) host.*$')]
|
||||
|
||||
childline = re.compile(r'^exit$')
|
||||
config = list()
|
||||
parent = list()
|
||||
children = []
|
||||
parent_match = False
|
||||
for line in str(lines).split('\n'):
|
||||
text = str(re.sub(r'([{};])', '', line)).strip()
|
||||
cfg = ConfigLine(text)
|
||||
cfg.raw = line
|
||||
if not text or ignore_line(text, comment_tokens):
|
||||
parent = list()
|
||||
children = []
|
||||
continue
|
||||
|
||||
else:
|
||||
parent_match = False
|
||||
# handle sublevel parent
|
||||
for pr in sublevel_cmds:
|
||||
if pr.match(line):
|
||||
if len(parent) != 0:
|
||||
cfg._parents.extend(parent)
|
||||
parent.append(cfg)
|
||||
config.append(cfg)
|
||||
if children:
|
||||
children.insert(len(parent) - 1, [])
|
||||
children[len(parent) - 2].append(cfg)
|
||||
parent_match = True
|
||||
continue
|
||||
# handle exit
|
||||
if childline.match(line):
|
||||
if children:
|
||||
parent[len(children) - 1]._children.extend(children[len(children) - 1])
|
||||
if len(children) > 1:
|
||||
parent[len(children) - 2]._children.extend(parent[len(children) - 1]._children)
|
||||
cfg._parents.extend(parent)
|
||||
children.pop()
|
||||
parent.pop()
|
||||
if not children:
|
||||
children = list()
|
||||
if parent:
|
||||
cfg._parents.extend(parent)
|
||||
parent = list()
|
||||
config.append(cfg)
|
||||
# handle sublevel children
|
||||
elif parent_match is False and len(parent) > 0:
|
||||
if not children:
|
||||
cfglist = [cfg]
|
||||
children.append(cfglist)
|
||||
else:
|
||||
children[len(parent) - 1].append(cfg)
|
||||
cfg._parents.extend(parent)
|
||||
config.append(cfg)
|
||||
# handle global commands
|
||||
elif not parent:
|
||||
config.append(cfg)
|
||||
return config
|
||||
|
||||
|
||||
class Dellos6NetworkConfig(NetworkConfig):
|
||||
|
||||
def load(self, contents):
|
||||
self._items = os6_parse(contents, self._indent, DEFAULT_COMMENT_TOKENS)
|
||||
|
||||
def _diff_line(self, other, path=None):
|
||||
diff = list()
|
||||
for item in self.items:
|
||||
if str(item) == "exit":
|
||||
for diff_item in diff:
|
||||
if diff_item._parents:
|
||||
if item._parents == diff_item._parents:
|
||||
diff.append(item)
|
||||
break
|
||||
else:
|
||||
diff.append(item)
|
||||
break
|
||||
elif item not in other:
|
||||
diff.append(item)
|
||||
return diff
|
||||
152
lib/ansible/module_utils/network/dellos9/dellos9.py
Normal file
152
lib/ansible/module_utils/network/dellos9/dellos9.py
Normal file
@@ -0,0 +1,152 @@
|
||||
#
|
||||
# (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
# (c) 2017 Red Hat, Inc
|
||||
#
|
||||
# Copyright (c) 2016 Dell Inc.
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.network.common.config import NetworkConfig, ConfigLine
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
WARNING_PROMPTS_RE = [
|
||||
r"[\r\n]?\[confirm yes/no\]:\s?$",
|
||||
r"[\r\n]?\[y/n\]:\s?$",
|
||||
r"[\r\n]?\[yes/no\]:\s?$"
|
||||
]
|
||||
|
||||
dellos9_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
dellos9_argument_spec = {
|
||||
'provider': dict(type='dict', options=dellos9_provider_spec),
|
||||
}
|
||||
dellos9_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'authorize': dict(removed_in_version=2.9, type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
dellos9_argument_spec.update(dellos9_top_spec)
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_or_strict'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
rc, out, err = exec_command(module, 'configure terminal')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(err, errors='surrogate_or_strict'))
|
||||
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
cmd = {'command': command, 'prompt': WARNING_PROMPTS_RE, 'answer': 'yes'}
|
||||
rc, out, err = exec_command(module, module.jsonify(cmd))
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), command=command, rc=rc)
|
||||
|
||||
exec_command(module, 'end')
|
||||
|
||||
|
||||
def get_sublevel_config(running_config, module):
|
||||
contents = list()
|
||||
current_config_contents = list()
|
||||
running_config = NetworkConfig(contents=running_config, indent=1)
|
||||
obj = running_config.get_object(module.params['parents'])
|
||||
if obj:
|
||||
contents = obj.children
|
||||
contents[:0] = module.params['parents']
|
||||
|
||||
indent = 0
|
||||
for c in contents:
|
||||
if isinstance(c, str):
|
||||
current_config_contents.append(c.rjust(len(c) + indent, ' '))
|
||||
if isinstance(c, ConfigLine):
|
||||
current_config_contents.append(c.raw)
|
||||
indent = 1
|
||||
sublevel_config = '\n'.join(current_config_contents)
|
||||
|
||||
return sublevel_config
|
||||
0
lib/ansible/module_utils/network/enos/__init__.py
Normal file
0
lib/ansible/module_utils/network/enos/__init__.py
Normal file
167
lib/ansible/module_utils/network/enos/enos.py
Normal file
167
lib/ansible/module_utils/network/enos/enos.py
Normal file
@@ -0,0 +1,167 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by
|
||||
# Ansible still belong to the author of the module, and may assign their own
|
||||
# license to the complete work.
|
||||
#
|
||||
# Copyright (C) 2017 Lenovo, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Contains utility methods
|
||||
# Lenovo Networking
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, EntityCollection
|
||||
from ansible.module_utils.connection import Connection, exec_command
|
||||
from ansible.module_utils.connection import ConnectionError
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
_CONNECTION = None
|
||||
|
||||
enos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
'context': dict(),
|
||||
'passwords': dict()
|
||||
}
|
||||
|
||||
enos_argument_spec = {
|
||||
'provider': dict(type='dict', options=enos_provider_spec),
|
||||
}
|
||||
|
||||
command_spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return enos_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _CONNECTION
|
||||
if _CONNECTION:
|
||||
return _CONNECTION
|
||||
_CONNECTION = Connection(module._socket_path)
|
||||
|
||||
context = None
|
||||
try:
|
||||
context = module.params['context']
|
||||
except KeyError:
|
||||
context = None
|
||||
|
||||
if context:
|
||||
if context == 'system':
|
||||
command = 'changeto system'
|
||||
else:
|
||||
command = 'changeto context %s' % context
|
||||
_CONNECTION.get(command)
|
||||
|
||||
return _CONNECTION
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
passwords = module.params['passwords']
|
||||
if passwords:
|
||||
cmd = 'more system:running-config'
|
||||
else:
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
conn = get_connection(module)
|
||||
out = conn.get(cmd)
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
if not isinstance(commands, list):
|
||||
raise AssertionError('argument must be of type <list>')
|
||||
|
||||
transform = EntityCollection(module, command_spec)
|
||||
commands = transform(commands)
|
||||
|
||||
for index, item in enumerate(commands):
|
||||
if module.check_mode and not item['command'].startswith('show'):
|
||||
module.warn('only show commands are supported when using check '
|
||||
'mode, not executing `%s`' % item['command'])
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
connection = get_connection(module)
|
||||
|
||||
commands = to_commands(module, to_list(commands))
|
||||
|
||||
responses = list()
|
||||
|
||||
for cmd in commands:
|
||||
out = connection.get(**cmd)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, config):
|
||||
try:
|
||||
conn = get_connection(module)
|
||||
conn.edit_config(config)
|
||||
except ConnectionError as exc:
|
||||
module.fail_json(msg=to_text(exc))
|
||||
|
||||
|
||||
def get_defaults_flag(module):
|
||||
rc, out, err = exec_command(module, 'show running-config ?')
|
||||
out = to_text(out, errors='surrogate_then_replace')
|
||||
|
||||
commands = set()
|
||||
for line in out.splitlines():
|
||||
if line:
|
||||
commands.add(line.strip().split()[0])
|
||||
|
||||
if 'all' in commands:
|
||||
return 'all'
|
||||
else:
|
||||
return 'full'
|
||||
0
lib/ansible/module_utils/network/eos/__init__.py
Normal file
0
lib/ansible/module_utils/network/eos/__init__.py
Normal file
462
lib/ansible/module_utils/network/eos/eos.py
Normal file
462
lib/ansible/module_utils/network/eos/eos.py
Normal file
@@ -0,0 +1,462 @@
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
#
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import os
|
||||
import time
|
||||
|
||||
from ansible.module_utils._text import to_text, to_native
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
_DEVICE_CONNECTION = None
|
||||
|
||||
eos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(no_log=True, fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS'])),
|
||||
|
||||
'use_ssl': dict(default=True, type='bool'),
|
||||
'validate_certs': dict(default=True, type='bool'),
|
||||
'timeout': dict(type='int'),
|
||||
|
||||
'transport': dict(default='cli', choices=['cli', 'eapi'])
|
||||
}
|
||||
eos_argument_spec = {
|
||||
'provider': dict(type='dict', options=eos_provider_spec),
|
||||
}
|
||||
eos_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
|
||||
'use_ssl': dict(removed_in_version=2.9, type='bool'),
|
||||
'validate_certs': dict(removed_in_version=2.9, type='bool'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
|
||||
'transport': dict(removed_in_version=2.9, choices=['cli', 'eapi'])
|
||||
}
|
||||
eos_argument_spec.update(eos_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return eos_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def load_params(module):
|
||||
provider = module.params.get('provider') or dict()
|
||||
for key, value in iteritems(provider):
|
||||
if key in eos_argument_spec:
|
||||
if module.params.get(key) is None and value is not None:
|
||||
module.params[key] = value
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _DEVICE_CONNECTION
|
||||
if not _DEVICE_CONNECTION:
|
||||
load_params(module)
|
||||
if is_eapi(module):
|
||||
conn = Eapi(module)
|
||||
else:
|
||||
conn = Cli(module)
|
||||
_DEVICE_CONNECTION = conn
|
||||
return _DEVICE_CONNECTION
|
||||
|
||||
|
||||
class Cli:
|
||||
|
||||
def __init__(self, module):
|
||||
self._module = module
|
||||
self._device_configs = {}
|
||||
self._session_support = None
|
||||
|
||||
@property
|
||||
def supports_sessions(self):
|
||||
if self._session_support is not None:
|
||||
return self._session_support
|
||||
rc, out, err = self.exec_command('show configuration sessions')
|
||||
self._session_support = rc == 0
|
||||
return self._session_support
|
||||
|
||||
def exec_command(self, command):
|
||||
if isinstance(command, dict):
|
||||
command = self._module.jsonify(command)
|
||||
return exec_command(self._module, command)
|
||||
|
||||
def get_config(self, flags=None):
|
||||
"""Retrieves the current config from the device or cache
|
||||
"""
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return self._device_configs[cmd]
|
||||
except KeyError:
|
||||
conn = get_connection(self)
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
out = to_text(out, errors='surrogate_then_replace')
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
|
||||
cfg = str(out).strip()
|
||||
self._device_configs[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
def run_commands(self, commands, check_rc=True):
|
||||
"""Run list of commands on remote device and return results
|
||||
"""
|
||||
responses = list()
|
||||
|
||||
for cmd in to_list(commands):
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
out = to_text(out, errors='surrogate_then_replace')
|
||||
if check_rc and rc != 0:
|
||||
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
|
||||
|
||||
try:
|
||||
out = self._module.from_json(out)
|
||||
except ValueError:
|
||||
out = str(out).strip()
|
||||
|
||||
responses.append(out)
|
||||
return responses
|
||||
|
||||
def send_config(self, commands):
|
||||
multiline = False
|
||||
rc = 0
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
|
||||
if command.startswith('banner') or multiline:
|
||||
multiline = True
|
||||
command = self._module.jsonify({'command': command, 'sendonly': True})
|
||||
elif command == 'EOF' and multiline:
|
||||
multiline = False
|
||||
|
||||
rc, out, err = self.exec_command(command)
|
||||
if rc != 0:
|
||||
return (rc, out, to_text(err, errors='surrogate_then_replace'))
|
||||
|
||||
return (rc, 'ok', '')
|
||||
|
||||
def configure(self, commands):
|
||||
"""Sends configuration commands to the remote device
|
||||
"""
|
||||
conn = get_connection(self)
|
||||
|
||||
rc, out, err = self.exec_command('configure')
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg='unable to enter configuration mode', output=to_text(err, errors='surrogate_then_replace'))
|
||||
|
||||
rc, out, err = self.send_config(commands)
|
||||
if rc != 0:
|
||||
self.exec_command('abort')
|
||||
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'))
|
||||
|
||||
self.exec_command('end')
|
||||
return {}
|
||||
|
||||
def load_config(self, commands, commit=False, replace=False):
|
||||
"""Loads the config commands onto the remote device
|
||||
"""
|
||||
use_session = os.getenv('ANSIBLE_EOS_USE_SESSIONS', True)
|
||||
try:
|
||||
use_session = int(use_session)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not all((bool(use_session), self.supports_sessions)):
|
||||
return self.configure(self, commands)
|
||||
|
||||
conn = get_connection(self)
|
||||
session = 'ansible_%s' % int(time.time())
|
||||
result = {'session': session}
|
||||
|
||||
rc, out, err = self.exec_command('configure session %s' % session)
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg='unable to enter configuration mode', output=to_text(err, errors='surrogate_then_replace'))
|
||||
|
||||
if replace:
|
||||
self.exec_command('rollback clean-config')
|
||||
|
||||
rc, out, err = self.send_config(commands)
|
||||
if rc != 0:
|
||||
self.exec_command('abort')
|
||||
self._module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), commands=commands)
|
||||
|
||||
rc, out, err = self.exec_command('show session-config diffs')
|
||||
if rc == 0 and out:
|
||||
result['diff'] = to_text(out, errors='surrogate_then_replace').strip()
|
||||
|
||||
if commit:
|
||||
self.exec_command('commit')
|
||||
else:
|
||||
self.exec_command('abort')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class Eapi:
|
||||
|
||||
def __init__(self, module):
|
||||
self._module = module
|
||||
self._enable = None
|
||||
self._session_support = None
|
||||
self._device_configs = {}
|
||||
|
||||
host = module.params['provider']['host']
|
||||
port = module.params['provider']['port']
|
||||
|
||||
self._module.params['url_username'] = self._module.params['username']
|
||||
self._module.params['url_password'] = self._module.params['password']
|
||||
|
||||
if module.params['provider']['use_ssl']:
|
||||
proto = 'https'
|
||||
else:
|
||||
proto = 'http'
|
||||
|
||||
module.params['validate_certs'] = module.params['provider']['validate_certs']
|
||||
|
||||
self._url = '%s://%s:%s/command-api' % (proto, host, port)
|
||||
|
||||
if module.params['auth_pass']:
|
||||
self._enable = {'cmd': 'enable', 'input': module.params['auth_pass']}
|
||||
else:
|
||||
self._enable = 'enable'
|
||||
|
||||
@property
|
||||
def supports_sessions(self):
|
||||
if self._session_support:
|
||||
return self._session_support
|
||||
response = self.send_request(['show configuration sessions'])
|
||||
self._session_support = 'error' not in response
|
||||
return self._session_support
|
||||
|
||||
def _request_builder(self, commands, output, reqid=None):
|
||||
params = dict(version=1, cmds=commands, format=output)
|
||||
return dict(jsonrpc='2.0', id=reqid, method='runCmds', params=params)
|
||||
|
||||
def send_request(self, commands, output='text'):
|
||||
commands = to_list(commands)
|
||||
|
||||
if self._enable:
|
||||
commands.insert(0, self._enable)
|
||||
|
||||
body = self._request_builder(commands, output)
|
||||
data = self._module.jsonify(body)
|
||||
|
||||
headers = {'Content-Type': 'application/json-rpc'}
|
||||
timeout = self._module.params['timeout']
|
||||
|
||||
response, headers = fetch_url(
|
||||
self._module, self._url, data=data, headers=headers,
|
||||
method='POST', timeout=timeout
|
||||
)
|
||||
|
||||
if headers['status'] != 200:
|
||||
self._module.fail_json(**headers)
|
||||
|
||||
try:
|
||||
data = response.read()
|
||||
response = self._module.from_json(to_text(data, errors='surrogate_then_replace'))
|
||||
except ValueError:
|
||||
self._module.fail_json(msg='unable to load response from device', data=data)
|
||||
|
||||
if self._enable and 'result' in response:
|
||||
response['result'].pop(0)
|
||||
|
||||
return response
|
||||
|
||||
def run_commands(self, commands):
|
||||
"""Runs list of commands on remote device and returns results
|
||||
"""
|
||||
output = None
|
||||
queue = list()
|
||||
responses = list()
|
||||
|
||||
def _send(commands, output):
|
||||
response = self.send_request(commands, output=output)
|
||||
if 'error' in response:
|
||||
err = response['error']
|
||||
self._module.fail_json(msg=err['message'], code=err['code'])
|
||||
return response['result']
|
||||
|
||||
for item in to_list(commands):
|
||||
if is_json(item['command']):
|
||||
item['command'] = str(item['command']).replace('| json', '')
|
||||
item['output'] = 'json'
|
||||
|
||||
if output and output != item['output']:
|
||||
responses.extend(_send(queue, output))
|
||||
queue = list()
|
||||
|
||||
output = item['output'] or 'json'
|
||||
queue.append(item['command'])
|
||||
|
||||
if queue:
|
||||
responses.extend(_send(queue, output))
|
||||
|
||||
for index, item in enumerate(commands):
|
||||
try:
|
||||
responses[index] = responses[index]['output'].strip()
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return responses
|
||||
|
||||
def get_config(self, flags=None):
|
||||
"""Retrieves the current config from the device or cache
|
||||
"""
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return self._device_configs[cmd]
|
||||
except KeyError:
|
||||
out = self.send_request(cmd)
|
||||
cfg = str(out['result'][0]['output']).strip()
|
||||
self._device_configs[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
def configure(self, commands):
|
||||
"""Sends the ordered set of commands to the device
|
||||
"""
|
||||
cmds = ['configure terminal']
|
||||
cmds.extend(commands)
|
||||
|
||||
responses = self.send_request(commands)
|
||||
if 'error' in responses:
|
||||
err = responses['error']
|
||||
self._module.fail_json(msg=err['message'], code=err['code'])
|
||||
|
||||
return responses[1:]
|
||||
|
||||
def load_config(self, config, commit=False, replace=False):
|
||||
"""Loads the configuration onto the remote devices
|
||||
|
||||
If the device doesn't support configuration sessions, this will
|
||||
fallback to using configure() to load the commands. If that happens,
|
||||
there will be no returned diff or session values
|
||||
"""
|
||||
if not self.supports_sessions:
|
||||
return self.configure(self, config)
|
||||
|
||||
session = 'ansible_%s' % int(time.time())
|
||||
result = {'session': session}
|
||||
commands = ['configure session %s' % session]
|
||||
|
||||
if replace:
|
||||
commands.append('rollback clean-config')
|
||||
|
||||
commands.extend(config)
|
||||
|
||||
response = self.send_request(commands)
|
||||
if 'error' in response:
|
||||
commands = ['configure session %s' % session, 'abort']
|
||||
self.send_request(commands)
|
||||
err = response['error']
|
||||
self._module.fail_json(msg=err['message'], code=err['code'])
|
||||
|
||||
commands = ['configure session %s' % session, 'show session-config diffs']
|
||||
if commit:
|
||||
commands.append('commit')
|
||||
else:
|
||||
commands.append('abort')
|
||||
|
||||
response = self.send_request(commands, output='text')
|
||||
diff = response['result'][1]['output']
|
||||
if len(diff) > 0:
|
||||
result['diff'] = diff
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def is_json(cmd):
|
||||
return to_native(cmd, errors='surrogate_then_replace').endswith('| json')
|
||||
|
||||
|
||||
def is_eapi(module):
|
||||
transport = module.params['transport']
|
||||
provider_transport = (module.params['provider'] or {}).get('transport')
|
||||
return 'eapi' in (transport, provider_transport)
|
||||
|
||||
|
||||
def to_command(module, commands):
|
||||
if is_eapi(module):
|
||||
default_output = 'json'
|
||||
else:
|
||||
default_output = 'text'
|
||||
|
||||
transform = ComplexList(dict(
|
||||
command=dict(key=True),
|
||||
output=dict(default=default_output),
|
||||
prompt=dict(),
|
||||
answer=dict()
|
||||
), module)
|
||||
|
||||
return transform(to_list(commands))
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = None if flags is None else flags
|
||||
|
||||
conn = get_connection(module)
|
||||
return conn.get_config(flags)
|
||||
|
||||
|
||||
def run_commands(module, commands):
|
||||
conn = get_connection(module)
|
||||
return conn.run_commands(to_command(module, commands))
|
||||
|
||||
|
||||
def load_config(module, config, commit=False, replace=False):
|
||||
conn = get_connection(module)
|
||||
return conn.load_config(config, commit, replace)
|
||||
200
lib/ansible/module_utils/network/fortios/fortios.py
Normal file
200
lib/ansible/module_utils/network/fortios/fortios.py
Normal file
@@ -0,0 +1,200 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c), Benjamin Jolivot <bjolivot@gmail.com>, 2014
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
|
||||
|
||||
# check for pyFG lib
|
||||
try:
|
||||
from pyFG import FortiOS, FortiConfig
|
||||
from pyFG.exceptions import FailedCommit
|
||||
HAS_PYFG = True
|
||||
except ImportError:
|
||||
HAS_PYFG = False
|
||||
|
||||
fortios_argument_spec = dict(
|
||||
file_mode=dict(type='bool', default=False),
|
||||
config_file=dict(type='path'),
|
||||
host=dict(),
|
||||
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
password=dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
timeout=dict(type='int', default=60),
|
||||
vdom=dict(type='str'),
|
||||
backup=dict(type='bool', default=False),
|
||||
backup_path=dict(type='path'),
|
||||
backup_filename=dict(type='str'),
|
||||
)
|
||||
|
||||
fortios_required_if = [
|
||||
['file_mode', False, ['host', 'username', 'password']],
|
||||
['file_mode', True, ['config_file']],
|
||||
['backup', True, ['backup_path']],
|
||||
]
|
||||
|
||||
fortios_mutually_exclusive = [
|
||||
['config_file', 'host'],
|
||||
['config_file', 'username'],
|
||||
['config_file', 'password']
|
||||
]
|
||||
|
||||
fortios_error_codes = {
|
||||
'-3': "Object not found",
|
||||
'-61': "Command error"
|
||||
}
|
||||
|
||||
|
||||
def backup(module, running_config):
|
||||
backup_path = module.params['backup_path']
|
||||
backup_filename = module.params['backup_filename']
|
||||
if not os.path.exists(backup_path):
|
||||
try:
|
||||
os.mkdir(backup_path)
|
||||
except:
|
||||
module.fail_json(msg="Can't create directory {0} Permission denied ?".format(backup_path))
|
||||
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
|
||||
if 0 < len(backup_filename):
|
||||
filename = '%s/%s' % (backup_path, backup_filename)
|
||||
else:
|
||||
filename = '%s/%s_config.%s' % (backup_path, module.params['host'], tstamp)
|
||||
try:
|
||||
open(filename, 'w').write(running_config)
|
||||
except:
|
||||
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
|
||||
|
||||
|
||||
class AnsibleFortios(object):
|
||||
def __init__(self, module):
|
||||
if not HAS_PYFG:
|
||||
module.fail_json(msg='Could not import the python library pyFG required by this module')
|
||||
|
||||
self.result = {
|
||||
'changed': False,
|
||||
}
|
||||
self.module = module
|
||||
|
||||
def _connect(self):
|
||||
if self.module.params['file_mode']:
|
||||
self.forti_device = FortiOS('')
|
||||
else:
|
||||
host = self.module.params['host']
|
||||
username = self.module.params['username']
|
||||
password = self.module.params['password']
|
||||
timeout = self.module.params['timeout']
|
||||
vdom = self.module.params['vdom']
|
||||
|
||||
self.forti_device = FortiOS(host, username=username, password=password, timeout=timeout, vdom=vdom)
|
||||
|
||||
try:
|
||||
self.forti_device.open()
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Error connecting device. %s' % to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
def load_config(self, path):
|
||||
self.path = path
|
||||
self._connect()
|
||||
# load in file_mode
|
||||
if self.module.params['file_mode']:
|
||||
try:
|
||||
f = open(self.module.params['config_file'], 'r')
|
||||
running = f.read()
|
||||
f.close()
|
||||
except IOError as e:
|
||||
self.module.fail_json(msg='Error reading configuration file. %s' % to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
self.forti_device.load_config(config_text=running, path=path)
|
||||
|
||||
else:
|
||||
# get config
|
||||
try:
|
||||
self.forti_device.load_config(path=path)
|
||||
except Exception as e:
|
||||
self.forti_device.close()
|
||||
self.module.fail_json(msg='Error reading running config. %s' % to_native(e),
|
||||
exception=traceback.format_exc())
|
||||
|
||||
# set configs in object
|
||||
self.result['running_config'] = self.forti_device.running_config.to_text()
|
||||
self.candidate_config = self.forti_device.candidate_config
|
||||
|
||||
# backup if needed
|
||||
if self.module.params['backup']:
|
||||
backup(self.module, self.forti_device.running_config.to_text())
|
||||
|
||||
def apply_changes(self):
|
||||
change_string = self.forti_device.compare_config()
|
||||
if change_string:
|
||||
self.result['change_string'] = change_string
|
||||
self.result['changed'] = True
|
||||
|
||||
# Commit if not check mode
|
||||
if change_string and not self.module.check_mode:
|
||||
if self.module.params['file_mode']:
|
||||
try:
|
||||
f = open(self.module.params['config_file'], 'w')
|
||||
f.write(self.candidate_config.to_text())
|
||||
f.close()
|
||||
except IOError as e:
|
||||
self.module.fail_json(msg='Error writing configuration file. %s' %
|
||||
to_native(e), exception=traceback.format_exc())
|
||||
else:
|
||||
try:
|
||||
self.forti_device.commit()
|
||||
except FailedCommit as e:
|
||||
# Something's wrong (rollback is automatic)
|
||||
self.forti_device.close()
|
||||
error_list = self.get_error_infos(e)
|
||||
self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
|
||||
|
||||
self.forti_device.close()
|
||||
self.module.exit_json(**self.result)
|
||||
|
||||
def del_block(self, block_id):
|
||||
self.forti_device.candidate_config[self.path].del_block(block_id)
|
||||
|
||||
def add_block(self, block_id, block):
|
||||
self.forti_device.candidate_config[self.path][block_id] = block
|
||||
|
||||
def get_error_infos(self, cli_errors):
|
||||
error_list = []
|
||||
for errors in cli_errors.args:
|
||||
for error in errors:
|
||||
error_code = error[0]
|
||||
error_string = error[1]
|
||||
error_type = fortios_error_codes.get(error_code, "unknown")
|
||||
error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
|
||||
|
||||
return error_list
|
||||
|
||||
def get_empty_configuration_block(self, block_name, block_type):
|
||||
return FortiConfig(block_name, block_type)
|
||||
0
lib/ansible/module_utils/network/ios/__init__.py
Normal file
0
lib/ansible/module_utils/network/ios/__init__.py
Normal file
139
lib/ansible/module_utils/network/ios/ios.py
Normal file
139
lib/ansible/module_utils/network/ios/ios.py
Normal file
@@ -0,0 +1,139 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
ios_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int')
|
||||
}
|
||||
ios_argument_spec = {
|
||||
'provider': dict(type='dict', options=ios_provider_spec),
|
||||
}
|
||||
|
||||
ios_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(removed_in_version=2.9, no_log=True),
|
||||
'timeout': dict(removed_in_version=2.9, type='int')
|
||||
}
|
||||
ios_argument_spec.update(ios_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return ios_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_defaults_flag(module):
|
||||
rc, out, err = exec_command(module, 'show running-config ?')
|
||||
out = to_text(out, errors='surrogate_then_replace')
|
||||
|
||||
commands = set()
|
||||
for line in out.splitlines():
|
||||
if line.strip():
|
||||
commands.add(line.strip().split()[0])
|
||||
|
||||
if 'all' in commands:
|
||||
return ['all']
|
||||
else:
|
||||
return ['full']
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_then_replace'))
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
|
||||
rc, out, err = exec_command(module, 'configure terminal')
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
rc, out, err = exec_command(module, command)
|
||||
if rc != 0:
|
||||
exec_command(module, 'end')
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_then_replace'), command=command, rc=rc)
|
||||
|
||||
exec_command(module, 'end')
|
||||
0
lib/ansible/module_utils/network/iosxr/__init__.py
Normal file
0
lib/ansible/module_utils/network/iosxr/__init__.py
Normal file
149
lib/ansible/module_utils/network/iosxr/iosxr.py
Normal file
149
lib/ansible/module_utils/network/iosxr/iosxr.py
Normal file
@@ -0,0 +1,149 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c) 2015 Peter Sprygada, <psprygada@ansible.com>
|
||||
# Copyright (c) 2017 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
iosxr_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
iosxr_argument_spec = {
|
||||
'provider': dict(type='dict', options=iosxr_provider_spec)
|
||||
}
|
||||
iosxr_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
iosxr_argument_spec.update(iosxr_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return iosxr_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in to_list(commands):
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_or_strict'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands, warnings, commit=False, replace=False, comment=None, admin=False):
|
||||
cmd = 'configure terminal'
|
||||
if admin:
|
||||
cmd = 'admin ' + cmd
|
||||
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to enter configuration mode', err=to_text(err, errors='surrogate_or_strict'))
|
||||
|
||||
failed = False
|
||||
for command in to_list(commands):
|
||||
if command == 'end':
|
||||
continue
|
||||
|
||||
rc, out, err = exec_command(module, command)
|
||||
if rc != 0:
|
||||
failed = True
|
||||
break
|
||||
|
||||
if failed:
|
||||
exec_command(module, 'abort')
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), commands=commands, rc=rc)
|
||||
|
||||
rc, diff, err = exec_command(module, 'show commit changes diff')
|
||||
if rc != 0:
|
||||
# If we failed, maybe we are in an old version so
|
||||
# we run show configuration instead
|
||||
rc, diff, err = exec_command(module, 'show configuration')
|
||||
if module._diff:
|
||||
warnings.append('device platform does not support config diff')
|
||||
|
||||
if commit:
|
||||
cmd = 'commit'
|
||||
if comment:
|
||||
cmd += ' comment {0}'.format(comment)
|
||||
else:
|
||||
cmd = 'abort'
|
||||
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
exec_command(module, 'abort')
|
||||
module.fail_json(msg=err, commands=commands, rc=rc)
|
||||
|
||||
return to_text(diff, errors='surrogate_or_strict')
|
||||
113
lib/ansible/module_utils/network/ironware/ironware.py
Normal file
113
lib/ansible/module_utils/network/ironware/ironware.py
Normal file
@@ -0,0 +1,113 @@
|
||||
#
|
||||
# Copyright (c) 2017, Paul Baker <paul@paulbaker.id.au>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, EntityCollection
|
||||
from ansible.module_utils.connection import Connection, exec_command
|
||||
|
||||
_DEVICE_CONFIG = None
|
||||
_CONNECTION = None
|
||||
|
||||
ironware_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback, ['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
|
||||
ironware_argument_spec = {
|
||||
'provider': dict(type='dict', options=ironware_provider_spec)
|
||||
}
|
||||
|
||||
command_spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return ironware_provider_spec
|
||||
|
||||
|
||||
def check_args(module):
|
||||
pass
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _CONNECTION
|
||||
if _CONNECTION:
|
||||
return _CONNECTION
|
||||
_CONNECTION = Connection(module._socket_path)
|
||||
|
||||
return _CONNECTION
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
if not isinstance(commands, list):
|
||||
raise AssertionError('argument must be of type <list>')
|
||||
|
||||
transform = EntityCollection(module, command_spec)
|
||||
commands = transform(commands)
|
||||
|
||||
for index, item in enumerate(commands):
|
||||
if module.check_mode and not item['command'].startswith('show'):
|
||||
module.warn('only show commands are supported when using check '
|
||||
'mode, not executing `%s`' % item['command'])
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
connection = get_connection(module)
|
||||
|
||||
commands = to_commands(module, to_list(commands))
|
||||
|
||||
responses = list()
|
||||
|
||||
for cmd in commands:
|
||||
out = connection.get(**cmd)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
def get_config(module, source='running', flags=None):
|
||||
global _DEVICE_CONFIG
|
||||
if source is 'running' and flags is None and _DEVICE_CONFIG is not None:
|
||||
return _DEVICE_CONFIG
|
||||
else:
|
||||
conn = get_connection(module)
|
||||
out = conn.get_config(source=source, flags=flags)
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
if source is 'running' and flags is None:
|
||||
_DEVICE_CONFIG = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def load_config(module, config):
|
||||
conn = get_connection(module)
|
||||
conn.edit_config(config)
|
||||
0
lib/ansible/module_utils/network/junos/__init__.py
Normal file
0
lib/ansible/module_utils/network/junos/__init__.py
Normal file
405
lib/ansible/module_utils/network/junos/junos.py
Normal file
405
lib/ansible/module_utils/network/junos/junos.py
Normal file
@@ -0,0 +1,405 @@
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
import collections
|
||||
import json
|
||||
from contextlib import contextmanager
|
||||
from copy import deepcopy
|
||||
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.connection import Connection
|
||||
from ansible.module_utils.network.common.netconf import NetconfConnection
|
||||
from ansible.module_utils._text import to_text
|
||||
|
||||
try:
|
||||
from lxml.etree import Element, SubElement, fromstring, tostring
|
||||
HAS_LXML = True
|
||||
except ImportError:
|
||||
from xml.etree.ElementTree import Element, SubElement, fromstring, tostring
|
||||
HAS_LXML = False
|
||||
|
||||
ACTIONS = frozenset(['merge', 'override', 'replace', 'update', 'set'])
|
||||
JSON_ACTIONS = frozenset(['merge', 'override', 'update'])
|
||||
FORMATS = frozenset(['xml', 'text', 'json'])
|
||||
CONFIG_FORMATS = frozenset(['xml', 'text', 'json', 'set'])
|
||||
|
||||
junos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'timeout': dict(type='int'),
|
||||
'transport': dict(default='netconf', choices=['cli', 'netconf'])
|
||||
}
|
||||
junos_argument_spec = {
|
||||
'provider': dict(type='dict', options=junos_provider_spec),
|
||||
}
|
||||
junos_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
'transport': dict(removed_in_version=2.9)
|
||||
}
|
||||
junos_argument_spec.update(junos_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return junos_provider_spec
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
if hasattr(module, '_junos_connection'):
|
||||
return module._junos_connection
|
||||
|
||||
capabilities = get_capabilities(module)
|
||||
network_api = capabilities.get('network_api')
|
||||
if network_api == 'cliconf':
|
||||
module._junos_connection = Connection(module._socket_path)
|
||||
elif network_api == 'netconf':
|
||||
module._junos_connection = NetconfConnection(module._socket_path)
|
||||
else:
|
||||
module.fail_json(msg='Invalid connection type %s' % network_api)
|
||||
|
||||
return module._junos_connection
|
||||
|
||||
|
||||
def get_capabilities(module):
|
||||
if hasattr(module, '_junos_capabilities'):
|
||||
return module._junos_capabilities
|
||||
|
||||
capabilities = Connection(module._socket_path).get_capabilities()
|
||||
module._junos_capabilities = json.loads(capabilities)
|
||||
return module._junos_capabilities
|
||||
|
||||
|
||||
def _validate_rollback_id(module, value):
|
||||
try:
|
||||
if not 0 <= int(value) <= 49:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
module.fail_json(msg='rollback must be between 0 and 49')
|
||||
|
||||
|
||||
def load_configuration(module, candidate=None, action='merge', rollback=None, format='xml'):
|
||||
|
||||
if all((candidate is None, rollback is None)):
|
||||
module.fail_json(msg='one of candidate or rollback must be specified')
|
||||
|
||||
elif all((candidate is not None, rollback is not None)):
|
||||
module.fail_json(msg='candidate and rollback are mutually exclusive')
|
||||
|
||||
if format not in FORMATS:
|
||||
module.fail_json(msg='invalid format specified')
|
||||
|
||||
if format == 'json' and action not in JSON_ACTIONS:
|
||||
module.fail_json(msg='invalid action for format json')
|
||||
elif format in ('text', 'xml') and action not in ACTIONS:
|
||||
module.fail_json(msg='invalid action format %s' % format)
|
||||
if action == 'set' and not format == 'text':
|
||||
module.fail_json(msg='format must be text when action is set')
|
||||
|
||||
conn = get_connection(module)
|
||||
if rollback is not None:
|
||||
_validate_rollback_id(module, rollback)
|
||||
obj = Element('load-configuration', {'rollback': str(rollback)})
|
||||
conn.execute_rpc(tostring(obj))
|
||||
else:
|
||||
return conn.load_configuration(config=candidate, action=action, format=format)
|
||||
|
||||
|
||||
def get_configuration(module, compare=False, format='xml', rollback='0', filter=None):
|
||||
if format not in CONFIG_FORMATS:
|
||||
module.fail_json(msg='invalid config format specified')
|
||||
|
||||
conn = get_connection(module)
|
||||
if compare:
|
||||
xattrs = {'format': format}
|
||||
_validate_rollback_id(module, rollback)
|
||||
xattrs['compare'] = 'rollback'
|
||||
xattrs['rollback'] = str(rollback)
|
||||
reply = conn.execute_rpc(tostring(Element('get-configuration', xattrs)))
|
||||
else:
|
||||
reply = conn.get_configuration(format=format, filter=filter)
|
||||
|
||||
return reply
|
||||
|
||||
|
||||
def commit_configuration(module, confirm=False, check=False, comment=None, confirm_timeout=None, synchronize=False,
|
||||
at_time=None, exit=False):
|
||||
conn = get_connection(module)
|
||||
if check:
|
||||
reply = conn.validate()
|
||||
else:
|
||||
reply = conn.commit(confirmed=confirm, timeout=confirm_timeout, comment=comment, synchronize=synchronize, at_time=at_time)
|
||||
|
||||
return reply
|
||||
|
||||
|
||||
def command(module, cmd, format='text', rpc_only=False):
|
||||
conn = get_connection(module)
|
||||
if rpc_only:
|
||||
cmd += ' | display xml rpc'
|
||||
return conn.command(command=cmd, format=format)
|
||||
|
||||
|
||||
def lock_configuration(x):
|
||||
conn = get_connection(x)
|
||||
return conn.lock()
|
||||
|
||||
|
||||
def unlock_configuration(x):
|
||||
conn = get_connection(x)
|
||||
return conn.unlock()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def locked_config(module):
|
||||
try:
|
||||
lock_configuration(module)
|
||||
yield
|
||||
finally:
|
||||
unlock_configuration(module)
|
||||
|
||||
|
||||
def discard_changes(module, exit=False):
|
||||
conn = get_connection(module)
|
||||
return conn.discard_changes(exit=exit)
|
||||
|
||||
|
||||
def get_diff(module, rollback='0'):
|
||||
reply = get_configuration(module, compare=True, format='text', rollback=rollback)
|
||||
# if warning is received from device diff is empty.
|
||||
if isinstance(reply, list):
|
||||
return None
|
||||
|
||||
output = reply.find('.//configuration-output')
|
||||
if output is not None:
|
||||
return to_text(output.text, encoding='latin-1').strip()
|
||||
|
||||
|
||||
def load_config(module, candidate, warnings, action='merge', format='xml'):
|
||||
get_connection(module)
|
||||
if not candidate:
|
||||
return
|
||||
|
||||
if isinstance(candidate, list):
|
||||
candidate = '\n'.join(candidate)
|
||||
|
||||
reply = load_configuration(module, candidate, action=action, format=format)
|
||||
if isinstance(reply, list):
|
||||
warnings.extend(reply)
|
||||
|
||||
module._junos_connection.validate()
|
||||
return get_diff(module)
|
||||
|
||||
|
||||
def get_param(module, key):
|
||||
if module.params.get(key):
|
||||
value = module.params[key]
|
||||
elif module.params.get('provider'):
|
||||
value = module.params['provider'].get(key)
|
||||
else:
|
||||
value = None
|
||||
return value
|
||||
|
||||
|
||||
def map_params_to_obj(module, param_to_xpath_map, param=None):
|
||||
"""
|
||||
Creates a new dictionary with key as xpath corresponding
|
||||
to param and value is a list of dict with metadata and values for
|
||||
the xpath.
|
||||
Acceptable metadata keys:
|
||||
'value': Value of param.
|
||||
'tag_only': Value is indicated by tag only in xml hierarchy.
|
||||
'leaf_only': If operation is to be added at leaf node only.
|
||||
'value_req': If value(text) is requried for leaf node.
|
||||
'is_key': If the field is key or not.
|
||||
eg: Output
|
||||
{
|
||||
'name': [{'value': 'ge-0/0/1'}]
|
||||
'disable': [{'value': True, tag_only': True}]
|
||||
}
|
||||
|
||||
:param module:
|
||||
:param param_to_xpath_map: Modules params to xpath map
|
||||
:return: obj
|
||||
"""
|
||||
if not param:
|
||||
param = module.params
|
||||
|
||||
obj = collections.OrderedDict()
|
||||
for key, attribute in param_to_xpath_map.items():
|
||||
if key in param:
|
||||
is_attribute_dict = False
|
||||
|
||||
value = param[key]
|
||||
if not isinstance(value, (list, tuple)):
|
||||
value = [value]
|
||||
|
||||
if isinstance(attribute, dict):
|
||||
xpath = attribute.get('xpath')
|
||||
is_attribute_dict = True
|
||||
else:
|
||||
xpath = attribute
|
||||
|
||||
if not obj.get(xpath):
|
||||
obj[xpath] = list()
|
||||
|
||||
for val in value:
|
||||
if is_attribute_dict:
|
||||
attr = deepcopy(attribute)
|
||||
del attr['xpath']
|
||||
|
||||
attr.update({'value': val})
|
||||
obj[xpath].append(attr)
|
||||
else:
|
||||
obj[xpath].append({'value': val})
|
||||
return obj
|
||||
|
||||
|
||||
def map_obj_to_ele(module, want, top, value_map=None, param=None):
|
||||
if not HAS_LXML:
|
||||
module.fail_json(msg='lxml is not installed.')
|
||||
|
||||
if not param:
|
||||
param = module.params
|
||||
|
||||
root = Element('root')
|
||||
top_ele = top.split('/')
|
||||
ele = SubElement(root, top_ele[0])
|
||||
|
||||
if len(top_ele) > 1:
|
||||
for item in top_ele[1:-1]:
|
||||
ele = SubElement(ele, item)
|
||||
container = ele
|
||||
state = param.get('state')
|
||||
active = param.get('active')
|
||||
if active:
|
||||
oper = 'active'
|
||||
else:
|
||||
oper = 'inactive'
|
||||
|
||||
# build xml subtree
|
||||
if container.tag != top_ele[-1]:
|
||||
node = SubElement(container, top_ele[-1])
|
||||
else:
|
||||
node = container
|
||||
|
||||
for fxpath, attributes in want.items():
|
||||
for attr in attributes:
|
||||
tag_only = attr.get('tag_only', False)
|
||||
leaf_only = attr.get('leaf_only', False)
|
||||
value_req = attr.get('value_req', False)
|
||||
is_key = attr.get('is_key', False)
|
||||
parent_attrib = attr.get('parent_attrib', True)
|
||||
value = attr.get('value')
|
||||
field_top = attr.get('top')
|
||||
|
||||
# operation 'delete' is added as element attribute
|
||||
# only if it is key or leaf only node
|
||||
if state == 'absent' and not (is_key or leaf_only):
|
||||
continue
|
||||
|
||||
# convert param value to device specific value
|
||||
if value_map and fxpath in value_map:
|
||||
value = value_map[fxpath].get(value)
|
||||
|
||||
if (value is not None) or tag_only or leaf_only:
|
||||
ele = node
|
||||
if field_top:
|
||||
# eg: top = 'system/syslog/file'
|
||||
# field_top = 'system/syslog/file/contents'
|
||||
# <file>
|
||||
# <name>test</name>
|
||||
# <contents>
|
||||
# </contents>
|
||||
# </file>
|
||||
ele_list = root.xpath(top + '/' + field_top)
|
||||
|
||||
if not len(ele_list):
|
||||
fields = field_top.split('/')
|
||||
ele = node
|
||||
for item in fields:
|
||||
inner_ele = root.xpath(top + '/' + item)
|
||||
if len(inner_ele):
|
||||
ele = inner_ele[0]
|
||||
else:
|
||||
ele = SubElement(ele, item)
|
||||
else:
|
||||
ele = ele_list[0]
|
||||
|
||||
if value is not None and not isinstance(value, bool):
|
||||
value = to_text(value, errors='surrogate_then_replace')
|
||||
|
||||
if fxpath:
|
||||
tags = fxpath.split('/')
|
||||
for item in tags:
|
||||
ele = SubElement(ele, item)
|
||||
|
||||
if tag_only:
|
||||
if state == 'present':
|
||||
if not value:
|
||||
# if value of tag_only node is false, delete the node
|
||||
ele.set('delete', 'delete')
|
||||
|
||||
elif leaf_only:
|
||||
if state == 'present':
|
||||
ele.set(oper, oper)
|
||||
ele.text = value
|
||||
else:
|
||||
ele.set('delete', 'delete')
|
||||
# Add value of leaf node if required while deleting.
|
||||
# in some cases if value is present while deleting, it
|
||||
# can result in error, hence the check
|
||||
if value_req:
|
||||
ele.text = value
|
||||
if is_key:
|
||||
par = ele.getparent()
|
||||
par.set('delete', 'delete')
|
||||
else:
|
||||
ele.text = value
|
||||
par = ele.getparent()
|
||||
|
||||
if parent_attrib:
|
||||
if state == 'present':
|
||||
# set replace attribute at parent node
|
||||
if not par.attrib.get('replace'):
|
||||
par.set('replace', 'replace')
|
||||
|
||||
# set active/inactive at parent node
|
||||
if not par.attrib.get(oper):
|
||||
par.set(oper, oper)
|
||||
else:
|
||||
par.set('delete', 'delete')
|
||||
|
||||
return root.getchildren()[0]
|
||||
|
||||
|
||||
def to_param_list(module):
|
||||
aggregate = module.params.get('aggregate')
|
||||
if aggregate:
|
||||
if isinstance(aggregate, dict):
|
||||
return [aggregate]
|
||||
else:
|
||||
return aggregate
|
||||
else:
|
||||
return [module.params]
|
||||
0
lib/ansible/module_utils/network/mlnxos/__init__.py
Normal file
0
lib/ansible/module_utils/network/mlnxos/__init__.py
Normal file
102
lib/ansible/module_utils/network/mlnxos/mlnxos.py
Normal file
102
lib/ansible/module_utils/network/mlnxos/mlnxos.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2017, Ansible by Red Hat, inc
|
||||
#
|
||||
# This file is part of Ansible by Red Hat
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.connection import Connection, ConnectionError
|
||||
from ansible.module_utils.network.common.utils import to_list, EntityCollection
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
_CONNECTION = None
|
||||
|
||||
mlnxos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback,
|
||||
['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback,
|
||||
['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback,
|
||||
['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'authorize': dict(fallback=(env_fallback,
|
||||
['ANSIBLE_NET_AUTHORIZE']), type='bool'),
|
||||
'auth_pass': dict(fallback=(env_fallback,
|
||||
['ANSIBLE_NET_AUTH_PASS']), no_log=True),
|
||||
'timeout': dict(type='int')
|
||||
}
|
||||
mlnxos_argument_spec = {
|
||||
'provider': dict(type='dict', options=mlnxos_provider_spec),
|
||||
}
|
||||
|
||||
command_spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return mlnxos_provider_spec
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _CONNECTION
|
||||
if _CONNECTION:
|
||||
return _CONNECTION
|
||||
_CONNECTION = Connection(module._socket_path)
|
||||
return _CONNECTION
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
if not isinstance(commands, list):
|
||||
raise AssertionError('argument must be of type <list>')
|
||||
|
||||
transform = EntityCollection(module, command_spec)
|
||||
commands = transform(commands)
|
||||
return commands
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
connection = get_connection(module)
|
||||
|
||||
commands = to_commands(module, to_list(commands))
|
||||
|
||||
responses = list()
|
||||
|
||||
for cmd in commands:
|
||||
out = connection.get(**cmd)
|
||||
responses.append(to_text(out, errors='surrogate_then_replace'))
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
def get_config(module, source='running'):
|
||||
conn = get_connection(module)
|
||||
out = conn.get_config(source)
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
return cfg
|
||||
|
||||
|
||||
def load_config(module, config):
|
||||
try:
|
||||
conn = get_connection(module)
|
||||
conn.edit_config(config)
|
||||
except ConnectionError as exc:
|
||||
module.fail_json(msg=to_text(exc))
|
||||
322
lib/ansible/module_utils/network/netscaler/netscaler.py
Normal file
322
lib/ansible/module_utils/network/netscaler/netscaler.py
Normal file
@@ -0,0 +1,322 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Citrix Systems
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.six import binary_type, text_type
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
class ConfigProxy(object):
|
||||
|
||||
def __init__(self, actual, client, attribute_values_dict, readwrite_attrs, transforms=None, readonly_attrs=None, immutable_attrs=None, json_encodes=None):
|
||||
transforms = {} if transforms is None else transforms
|
||||
readonly_attrs = [] if readonly_attrs is None else readonly_attrs
|
||||
immutable_attrs = [] if immutable_attrs is None else immutable_attrs
|
||||
json_encodes = [] if json_encodes is None else json_encodes
|
||||
|
||||
# Actual config object from nitro sdk
|
||||
self.actual = actual
|
||||
|
||||
# nitro client
|
||||
self.client = client
|
||||
|
||||
# ansible attribute_values_dict
|
||||
self.attribute_values_dict = attribute_values_dict
|
||||
|
||||
self.readwrite_attrs = readwrite_attrs
|
||||
self.readonly_attrs = readonly_attrs
|
||||
self.immutable_attrs = immutable_attrs
|
||||
self.json_encodes = json_encodes
|
||||
self.transforms = transforms
|
||||
|
||||
self.attribute_values_processed = {}
|
||||
for attribute, value in self.attribute_values_dict.items():
|
||||
if value is None:
|
||||
continue
|
||||
if attribute in transforms:
|
||||
for transform in self.transforms[attribute]:
|
||||
if transform == 'bool_yes_no':
|
||||
if value is True:
|
||||
value = 'YES'
|
||||
elif value is False:
|
||||
value = 'NO'
|
||||
elif transform == 'bool_on_off':
|
||||
if value is True:
|
||||
value = 'ON'
|
||||
elif value is False:
|
||||
value = 'OFF'
|
||||
elif callable(transform):
|
||||
value = transform(value)
|
||||
else:
|
||||
raise Exception('Invalid transform %s' % transform)
|
||||
self.attribute_values_processed[attribute] = value
|
||||
|
||||
self._copy_attributes_to_actual()
|
||||
|
||||
def _copy_attributes_to_actual(self):
|
||||
for attribute in self.readwrite_attrs:
|
||||
if attribute in self.attribute_values_processed:
|
||||
attribute_value = self.attribute_values_processed[attribute]
|
||||
|
||||
if attribute_value is None:
|
||||
continue
|
||||
|
||||
# Fallthrough
|
||||
if attribute in self.json_encodes:
|
||||
attribute_value = json.JSONEncoder().encode(attribute_value).strip('"')
|
||||
setattr(self.actual, attribute, attribute_value)
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self.attribute_values_dict:
|
||||
return self.attribute_values_dict[name]
|
||||
else:
|
||||
raise AttributeError('No attribute %s found' % name)
|
||||
|
||||
def add(self):
|
||||
self.actual.__class__.add(self.client, self.actual)
|
||||
|
||||
def update(self):
|
||||
return self.actual.__class__.update(self.client, self.actual)
|
||||
|
||||
def delete(self):
|
||||
self.actual.__class__.delete(self.client, self.actual)
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
result = self.actual.__class__.get(self.client, *args, **kwargs)
|
||||
|
||||
return result
|
||||
|
||||
def has_equal_attributes(self, other):
|
||||
if self.diff_object(other) == {}:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def diff_object(self, other):
|
||||
diff_dict = {}
|
||||
for attribute in self.attribute_values_processed:
|
||||
# Skip readonly attributes
|
||||
if attribute not in self.readwrite_attrs:
|
||||
continue
|
||||
|
||||
# Skip attributes not present in module arguments
|
||||
if self.attribute_values_processed[attribute] is None:
|
||||
continue
|
||||
|
||||
# Check existence
|
||||
if hasattr(other, attribute):
|
||||
attribute_value = getattr(other, attribute)
|
||||
else:
|
||||
diff_dict[attribute] = 'missing from other'
|
||||
continue
|
||||
|
||||
# Compare values
|
||||
param_type = self.attribute_values_processed[attribute].__class__
|
||||
if attribute_value is None or param_type(attribute_value) != self.attribute_values_processed[attribute]:
|
||||
str_tuple = (
|
||||
type(self.attribute_values_processed[attribute]),
|
||||
self.attribute_values_processed[attribute],
|
||||
type(attribute_value),
|
||||
attribute_value,
|
||||
)
|
||||
diff_dict[attribute] = 'difference. ours: (%s) %s other: (%s) %s' % str_tuple
|
||||
return diff_dict
|
||||
|
||||
def get_actual_rw_attributes(self, filter='name'):
|
||||
if self.actual.__class__.count_filtered(self.client, '%s:%s' % (filter, self.attribute_values_dict[filter])) == 0:
|
||||
return {}
|
||||
server_list = self.actual.__class__.get_filtered(self.client, '%s:%s' % (filter, self.attribute_values_dict[filter]))
|
||||
actual_instance = server_list[0]
|
||||
ret_val = {}
|
||||
for attribute in self.readwrite_attrs:
|
||||
if not hasattr(actual_instance, attribute):
|
||||
continue
|
||||
ret_val[attribute] = getattr(actual_instance, attribute)
|
||||
return ret_val
|
||||
|
||||
def get_actual_ro_attributes(self, filter='name'):
|
||||
if self.actual.__class__.count_filtered(self.client, '%s:%s' % (filter, self.attribute_values_dict[filter])) == 0:
|
||||
return {}
|
||||
server_list = self.actual.__class__.get_filtered(self.client, '%s:%s' % (filter, self.attribute_values_dict[filter]))
|
||||
actual_instance = server_list[0]
|
||||
ret_val = {}
|
||||
for attribute in self.readonly_attrs:
|
||||
if not hasattr(actual_instance, attribute):
|
||||
continue
|
||||
ret_val[attribute] = getattr(actual_instance, attribute)
|
||||
return ret_val
|
||||
|
||||
def get_missing_rw_attributes(self):
|
||||
return list(set(self.readwrite_attrs) - set(self.get_actual_rw_attributes().keys()))
|
||||
|
||||
def get_missing_ro_attributes(self):
|
||||
return list(set(self.readonly_attrs) - set(self.get_actual_ro_attributes().keys()))
|
||||
|
||||
|
||||
def get_immutables_intersection(config_proxy, keys):
|
||||
immutables_set = set(config_proxy.immutable_attrs)
|
||||
keys_set = set(keys)
|
||||
# Return list of sets' intersection
|
||||
return list(immutables_set & keys_set)
|
||||
|
||||
|
||||
def ensure_feature_is_enabled(client, feature_str):
|
||||
enabled_features = client.get_enabled_features()
|
||||
|
||||
if enabled_features is None:
|
||||
enabled_features = []
|
||||
|
||||
if feature_str not in enabled_features:
|
||||
client.enable_features(feature_str)
|
||||
client.save_config()
|
||||
|
||||
|
||||
def get_nitro_client(module):
|
||||
from nssrc.com.citrix.netscaler.nitro.service.nitro_service import nitro_service
|
||||
|
||||
client = nitro_service(module.params['nsip'], module.params['nitro_protocol'])
|
||||
client.set_credential(module.params['nitro_user'], module.params['nitro_pass'])
|
||||
client.timeout = float(module.params['nitro_timeout'])
|
||||
client.certvalidation = module.params['validate_certs']
|
||||
return client
|
||||
|
||||
|
||||
netscaler_common_arguments = dict(
|
||||
nsip=dict(
|
||||
required=True,
|
||||
fallback=(env_fallback, ['NETSCALER_NSIP']),
|
||||
),
|
||||
nitro_user=dict(
|
||||
required=True,
|
||||
fallback=(env_fallback, ['NETSCALER_NITRO_USER']),
|
||||
no_log=True
|
||||
),
|
||||
nitro_pass=dict(
|
||||
required=True,
|
||||
fallback=(env_fallback, ['NETSCALER_NITRO_PASS']),
|
||||
no_log=True
|
||||
),
|
||||
nitro_protocol=dict(
|
||||
choices=['http', 'https'],
|
||||
fallback=(env_fallback, ['NETSCALER_NITRO_PROTOCOL']),
|
||||
default='http'
|
||||
),
|
||||
validate_certs=dict(
|
||||
default=True,
|
||||
type='bool'
|
||||
),
|
||||
nitro_timeout=dict(default=310, type='float'),
|
||||
state=dict(
|
||||
choices=[
|
||||
'present',
|
||||
'absent',
|
||||
],
|
||||
default='present',
|
||||
),
|
||||
save_config=dict(
|
||||
type='bool',
|
||||
default=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
loglines = []
|
||||
|
||||
|
||||
def complete_missing_attributes(actual, attrs_list, fill_value=None):
|
||||
for attribute in attrs_list:
|
||||
if not hasattr(actual, attribute):
|
||||
setattr(actual, attribute, fill_value)
|
||||
|
||||
|
||||
def log(msg):
|
||||
loglines.append(msg)
|
||||
|
||||
|
||||
def get_ns_version(client):
|
||||
from nssrc.com.citrix.netscaler.nitro.resource.config.ns.nsversion import nsversion
|
||||
result = nsversion.get(client)
|
||||
m = re.match(r'^.*NS(\d+)\.(\d+).*$', result[0].version)
|
||||
if m is None:
|
||||
return None
|
||||
else:
|
||||
return int(m.group(1)), int(m.group(2))
|
||||
|
||||
|
||||
def get_ns_hardware(client):
|
||||
from nssrc.com.citrix.netscaler.nitro.resource.config.ns.nshardware import nshardware
|
||||
result = nshardware.get(client)
|
||||
return result
|
||||
|
||||
|
||||
def monkey_patch_nitro_api():
|
||||
|
||||
from nssrc.com.citrix.netscaler.nitro.resource.base.Json import Json
|
||||
|
||||
def new_resource_to_string_convert(self, resrc):
|
||||
# Line below is the actual patch
|
||||
dict_valid_values = dict((k.replace('_', '', 1), v) for k, v in resrc.__dict__.items() if v)
|
||||
return json.dumps(dict_valid_values)
|
||||
Json.resource_to_string_convert = new_resource_to_string_convert
|
||||
|
||||
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
|
||||
|
||||
@classmethod
|
||||
def object_to_string_new(cls, obj):
|
||||
output = []
|
||||
flds = obj.__dict__
|
||||
for k, v in ((k.replace('_', '', 1), v) for k, v in flds.items() if v):
|
||||
if isinstance(v, bool):
|
||||
output.append('"%s":%s' % (k, v))
|
||||
elif isinstance(v, (binary_type, text_type)):
|
||||
v = to_native(v, errors='surrogate_or_strict')
|
||||
output.append('"%s":"%s"' % (k, v))
|
||||
elif isinstance(v, int):
|
||||
output.append('"%s":"%s"' % (k, v))
|
||||
return ','.join(output)
|
||||
|
||||
@classmethod
|
||||
def object_to_string_withoutquotes_new(cls, obj):
|
||||
output = []
|
||||
flds = obj.__dict__
|
||||
for k, v in ((k.replace('_', '', 1), v) for k, v in flds.items() if v):
|
||||
if isinstance(v, (int, bool)):
|
||||
output.append('%s:%s' % (k, v))
|
||||
elif isinstance(v, (binary_type, text_type)):
|
||||
v = to_native(v, errors='surrogate_or_strict')
|
||||
output.append('%s:%s' % (k, cls.encode(v)))
|
||||
return ','.join(output)
|
||||
|
||||
nitro_util.object_to_string = object_to_string_new
|
||||
nitro_util.object_to_string_withoutquotes = object_to_string_withoutquotes_new
|
||||
0
lib/ansible/module_utils/network/nso/__init__.py
Normal file
0
lib/ansible/module_utils/network/nso/__init__.py
Normal file
477
lib/ansible/module_utils/network/nso/nso.py
Normal file
477
lib/ansible/module_utils/network/nso/nso.py
Normal file
@@ -0,0 +1,477 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
nso_argument_spec = dict(
|
||||
url=dict(required=True),
|
||||
username=dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME']), required=True),
|
||||
password=dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), required=True, no_log=True)
|
||||
)
|
||||
|
||||
|
||||
class State(object):
|
||||
SET = 'set'
|
||||
PRESENT = 'present'
|
||||
ABSENT = 'absent'
|
||||
CHECK_SYNC = 'check-sync'
|
||||
DEEP_CHECK_SYNC = 'deep-check-sync'
|
||||
IN_SYNC = 'in-sync'
|
||||
DEEP_IN_SYNC = 'deep-in-sync'
|
||||
|
||||
SYNC_STATES = ('check-sync', 'deep-check-sync', 'in-sync', 'deep-in-sync')
|
||||
|
||||
|
||||
class ModuleFailException(Exception):
|
||||
def __init__(self, message):
|
||||
super(ModuleFailException, self).__init__(message)
|
||||
self.message = message
|
||||
|
||||
|
||||
class NsoException(Exception):
|
||||
def __init__(self, message, error):
|
||||
super(NsoException, self).__init__(message)
|
||||
self.message = message
|
||||
self.error = error
|
||||
|
||||
|
||||
class JsonRpc(object):
|
||||
def __init__(self, url):
|
||||
self._url = url
|
||||
|
||||
self._id = 0
|
||||
self._trans = {}
|
||||
self._headers = {'Content-Type': 'application/json'}
|
||||
self._conn = None
|
||||
|
||||
def login(self, user, passwd):
|
||||
payload = {
|
||||
'method': 'login',
|
||||
'params': {'user': user, 'passwd': passwd}
|
||||
}
|
||||
resp, resp_json = self._call(payload)
|
||||
self._headers['Cookie'] = resp.headers['set-cookie']
|
||||
|
||||
def logout(self):
|
||||
payload = {'method': 'logout', 'params': {}}
|
||||
self._call(payload)
|
||||
|
||||
def get_system_setting(self, setting):
|
||||
payload = {'method': 'get_system_setting', 'params': {'operation': setting}}
|
||||
resp, resp_json = self._call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def new_trans(self, **kwargs):
|
||||
payload = {'method': 'new_trans', 'params': kwargs}
|
||||
resp, resp_json = self._call(payload)
|
||||
return resp_json['result']['th']
|
||||
|
||||
def delete_trans(self, th):
|
||||
payload = {'method': 'delete_trans', 'params': {'th': th}}
|
||||
resp, resp_json = self._call(payload)
|
||||
|
||||
def validate_trans(self, th):
|
||||
payload = {'method': 'validate_trans', 'params': {'th': th}}
|
||||
resp, resp_json = self._write_call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def get_trans_changes(self, th):
|
||||
payload = {'method': 'get_trans_changes', 'params': {'th': th}}
|
||||
resp, resp_json = self._write_call(payload)
|
||||
return resp_json['result']['changes']
|
||||
|
||||
def validate_commit(self, th):
|
||||
payload = {'method': 'validate_commit', 'params': {'th': th}}
|
||||
resp, resp_json = self._write_call(payload)
|
||||
return resp_json['result'].get('warnings', [])
|
||||
|
||||
def commit(self, th):
|
||||
payload = {'method': 'commit', 'params': {'th': th}}
|
||||
resp, resp_json = self._write_call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def get_schema(self, **kwargs):
|
||||
payload = {'method': 'get_schema', 'params': kwargs}
|
||||
resp, resp_json = self._read_call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def get_module_prefix_map(self):
|
||||
payload = {'method': 'get_module_prefix_map', 'params': {}}
|
||||
resp, resp_json = self._call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def get_value(self, path):
|
||||
payload = {
|
||||
'method': 'get_value',
|
||||
'params': {'path': path}
|
||||
}
|
||||
resp, resp_json = self._read_call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def exists(self, path):
|
||||
payload = {'method': 'exists', 'params': {'path': path}}
|
||||
resp, resp_json = self._read_call(payload)
|
||||
return resp_json['result']['exists']
|
||||
|
||||
def create(self, th, path):
|
||||
payload = {'method': 'create', 'params': {'th': th, 'path': path}}
|
||||
self._write_call(payload)
|
||||
|
||||
def delete(self, th, path):
|
||||
payload = {'method': 'delete', 'params': {'th': th, 'path': path}}
|
||||
self._write_call(payload)
|
||||
|
||||
def set_value(self, th, path, value):
|
||||
payload = {
|
||||
'method': 'set_value',
|
||||
'params': {'th': th, 'path': path, 'value': value}
|
||||
}
|
||||
resp, resp_json = self._write_call(payload)
|
||||
return resp_json['result']
|
||||
|
||||
def run_action(self, th, path, params=None):
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
payload = {
|
||||
'method': 'run_action',
|
||||
'params': {
|
||||
'format': 'normal',
|
||||
'path': path,
|
||||
'params': params
|
||||
}
|
||||
}
|
||||
if th is None:
|
||||
resp, resp_json = self._read_call(payload)
|
||||
else:
|
||||
payload['params']['th'] = th
|
||||
resp, resp_json = self._call(payload)
|
||||
|
||||
return resp_json['result']
|
||||
|
||||
def _call(self, payload):
|
||||
self._id += 1
|
||||
if 'id' not in payload:
|
||||
payload['id'] = self._id
|
||||
|
||||
if 'jsonrpc' not in payload:
|
||||
payload['jsonrpc'] = '2.0'
|
||||
|
||||
data = json.dumps(payload)
|
||||
resp = open_url(
|
||||
self._url, method='POST', data=data, headers=self._headers)
|
||||
if resp.code != 200:
|
||||
raise NsoException(
|
||||
'NSO returned HTTP code {0}, expected 200'.format(resp.status), {})
|
||||
|
||||
resp_body = resp.read()
|
||||
resp_json = json.loads(resp_body)
|
||||
|
||||
if 'error' in resp_json:
|
||||
self._handle_call_error(payload, resp_json)
|
||||
return resp, resp_json
|
||||
|
||||
def _handle_call_error(self, payload, resp_json):
|
||||
method = payload['method']
|
||||
|
||||
error = resp_json['error']
|
||||
error_type = error['type'][len('rpc.method.'):]
|
||||
if error_type in ('unexpected_params',
|
||||
'unknown_params_value',
|
||||
'invalid_params',
|
||||
'invalid_params_type',
|
||||
'data_not_found'):
|
||||
key = error['data']['param']
|
||||
error_type_s = error_type.replace('_', ' ')
|
||||
if key == 'path':
|
||||
msg = 'NSO {0} {1}. path = {2}'.format(
|
||||
method, error_type_s, payload['params']['path'])
|
||||
else:
|
||||
path = payload['params'].get('path', 'unknown')
|
||||
msg = 'NSO {0} {1}. path = {2}. {3} = {4}'.format(
|
||||
method, error_type_s, path, key, payload['params'][key])
|
||||
else:
|
||||
msg = 'NSO {0} returned JSON-RPC error: {1}'.format(method, error)
|
||||
|
||||
raise NsoException(msg, error)
|
||||
|
||||
def _read_call(self, payload):
|
||||
if 'th' not in payload['params']:
|
||||
payload['params']['th'] = self._get_th(mode='read')
|
||||
return self._call(payload)
|
||||
|
||||
def _write_call(self, payload):
|
||||
if 'th' not in payload['params']:
|
||||
payload['params']['th'] = self._get_th(mode='read_write')
|
||||
return self._call(payload)
|
||||
|
||||
def _get_th(self, mode='read'):
|
||||
if mode not in self._trans:
|
||||
th = self.new_trans(mode=mode)
|
||||
self._trans[mode] = th
|
||||
return self._trans[mode]
|
||||
|
||||
|
||||
class ValueBuilder(object):
|
||||
class Value(object):
|
||||
__slots__ = ['path', 'state', 'value']
|
||||
|
||||
def __init__(self, path, state, value):
|
||||
self.path = path
|
||||
self.state = state
|
||||
self.value = value
|
||||
|
||||
def __lt__(self, rhs):
|
||||
l_len = len(self.path.split('/'))
|
||||
r_len = len(rhs.path.split('/'))
|
||||
if l_len == r_len:
|
||||
return self.path.__lt__(rhs.path)
|
||||
return l_len < r_len
|
||||
|
||||
def __str__(self):
|
||||
return 'Value<path={0}, state={1}, value={2}>'.format(
|
||||
self.path, self.state, self.value)
|
||||
|
||||
def __init__(self, client):
|
||||
self._client = client
|
||||
self._schema_cache = {}
|
||||
self._module_prefix_map_cache = None
|
||||
self._values = []
|
||||
self._values_dirty = False
|
||||
self._path_re = re.compile('{[^}]*}')
|
||||
|
||||
def build(self, parent, maybe_qname, value, schema=None):
|
||||
qname, name = self._get_prefix_name(maybe_qname)
|
||||
if name is None:
|
||||
path = parent
|
||||
else:
|
||||
path = '{0}/{1}'.format(parent, qname)
|
||||
|
||||
if schema is None:
|
||||
schema = self._get_schema(path)
|
||||
|
||||
if self._is_leaf(schema):
|
||||
if self._is_empty_leaf(schema):
|
||||
exists = self._client.exists(path)
|
||||
if exists and value != [None]:
|
||||
self._add_value(path, State.ABSENT, None)
|
||||
elif not exists and value == [None]:
|
||||
self._add_value(path, State.PRESENT, None)
|
||||
else:
|
||||
value_type = self._get_type(parent, maybe_qname)
|
||||
if value_type == 'identityref':
|
||||
value, t_value = self._get_prefix_name(value)
|
||||
self._add_value(path, State.SET, value)
|
||||
elif isinstance(value, dict):
|
||||
self._build_dict(path, schema, value)
|
||||
elif isinstance(value, list):
|
||||
self._build_list(path, schema, value)
|
||||
else:
|
||||
raise ModuleFailException(
|
||||
'unsupported schema {0} at {1}'.format(
|
||||
schema['kind'], path))
|
||||
|
||||
@property
|
||||
def values(self):
|
||||
if self._values_dirty:
|
||||
self._values.sort()
|
||||
self._values_dirty = False
|
||||
|
||||
return self._values
|
||||
|
||||
def _build_dict(self, path, schema, value):
|
||||
keys = schema.get('key', [])
|
||||
for dict_key, dict_value in value.items():
|
||||
qname, name = self._get_prefix_name(dict_key)
|
||||
if dict_key in ('__state', ) or name in keys:
|
||||
continue
|
||||
|
||||
child_schema = self._find_child(path, schema, qname)
|
||||
self.build(path, dict_key, dict_value, child_schema)
|
||||
|
||||
def _build_list(self, path, schema, value):
|
||||
for entry in value:
|
||||
entry_key = self._build_key(path, entry, schema['key'])
|
||||
entry_path = '{0}{{{1}}}'.format(path, entry_key)
|
||||
entry_state = entry.get('__state', 'present')
|
||||
entry_exists = self._client.exists(entry_path)
|
||||
|
||||
if entry_state == 'absent':
|
||||
if entry_exists:
|
||||
self._add_value(entry_path, State.ABSENT, None)
|
||||
else:
|
||||
if not entry_exists:
|
||||
self._add_value(entry_path, State.PRESENT, None)
|
||||
if entry_state in State.SYNC_STATES:
|
||||
self._add_value(entry_path, entry_state, None)
|
||||
|
||||
self.build(entry_path, None, entry)
|
||||
|
||||
def _build_key(self, path, entry, schema_keys):
|
||||
key_parts = []
|
||||
for key in schema_keys:
|
||||
value = entry.get(key, None)
|
||||
if value is None:
|
||||
raise ModuleFailException(
|
||||
'required leaf {0} in {1} not set in data'.format(
|
||||
key, path))
|
||||
|
||||
value_type = self._get_type(path, key)
|
||||
if value_type == 'identityref':
|
||||
value, t_value = self._get_prefix_name(value)
|
||||
key_parts.append(self._quote_key(value))
|
||||
return ' '.join(key_parts)
|
||||
|
||||
def _quote_key(self, key):
|
||||
if isinstance(key, bool):
|
||||
return key and 'true' or 'false'
|
||||
|
||||
q_key = []
|
||||
for c in str(key):
|
||||
if c in ('{', '}', "'", '\\'):
|
||||
q_key.append('\\')
|
||||
q_key.append(c)
|
||||
q_key = ''.join(q_key)
|
||||
if ' ' in q_key:
|
||||
return '{0}'.format(q_key)
|
||||
return q_key
|
||||
|
||||
def _find_child(self, path, schema, qname):
|
||||
if 'children' not in schema:
|
||||
schema = self._get_schema(path)
|
||||
|
||||
# look for the qualified name if : is in the name
|
||||
child_schema = self._get_child(schema, qname)
|
||||
if child_schema is not None:
|
||||
return child_schema
|
||||
|
||||
# no child was found, look for a choice with a child matching
|
||||
for child_schema in schema['children']:
|
||||
if child_schema['kind'] != 'choice':
|
||||
continue
|
||||
choice_child_schema = self._get_choice_child(child_schema, qname)
|
||||
if choice_child_schema is not None:
|
||||
return choice_child_schema
|
||||
|
||||
raise ModuleFailException(
|
||||
'no child in {0} with name {1}. children {2}'.format(
|
||||
path, qname, ','.join((c.get('qname', c.get('name', None)) for c in schema['children']))))
|
||||
|
||||
def _add_value(self, path, state, value):
|
||||
self._values.append(ValueBuilder.Value(path, state, value))
|
||||
self._values_dirty = True
|
||||
|
||||
def _get_prefix_name(self, qname):
|
||||
if qname is None:
|
||||
return None, None
|
||||
if ':' not in qname:
|
||||
return qname, qname
|
||||
|
||||
module_prefix_map = self._get_module_prefix_map()
|
||||
module, name = qname.split(':', 1)
|
||||
if module not in module_prefix_map:
|
||||
raise ModuleFailException(
|
||||
'no module mapping for module {0}. loaded modules {1}'.format(
|
||||
module, ','.join(sorted(module_prefix_map.keys()))))
|
||||
|
||||
return '{0}:{1}'.format(module_prefix_map[module], name), name
|
||||
|
||||
def _get_schema(self, path):
|
||||
return self._ensure_schema_cached(path)['data']
|
||||
|
||||
def _get_type(self, parent_path, key):
|
||||
all_schema = self._ensure_schema_cached(parent_path)
|
||||
parent_schema = all_schema['data']
|
||||
meta = all_schema['meta']
|
||||
|
||||
schema = self._get_child(parent_schema, key)
|
||||
if self._is_leaf(schema):
|
||||
path_type = schema['type']
|
||||
if path_type.get('primitive', False):
|
||||
return path_type['name']
|
||||
else:
|
||||
path_type_key = '{0}:{1}'.format(
|
||||
path_type['namespace'], path_type['name'])
|
||||
type_info = meta['types'][path_type_key]
|
||||
return type_info[-1]['name']
|
||||
return None
|
||||
|
||||
def _ensure_schema_cached(self, path):
|
||||
path = self._path_re.sub('', path)
|
||||
if path not in self._schema_cache:
|
||||
schema = self._client.get_schema(path=path, levels=1)
|
||||
self._schema_cache[path] = schema
|
||||
return self._schema_cache[path]
|
||||
|
||||
def _get_module_prefix_map(self):
|
||||
if self._module_prefix_map_cache is None:
|
||||
self._module_prefix_map_cache = self._client.get_module_prefix_map()
|
||||
return self._module_prefix_map_cache
|
||||
|
||||
def _get_child(self, schema, qname):
|
||||
# no child specified, return parent
|
||||
if qname is None:
|
||||
return schema
|
||||
|
||||
name_key = ':' in qname and 'qname' or 'name'
|
||||
return next((c for c in schema['children']
|
||||
if c.get(name_key, None) == qname), None)
|
||||
|
||||
def _get_choice_child(self, schema, qname):
|
||||
name_key = ':' in qname and 'qname' or 'name'
|
||||
for child_case in schema['cases']:
|
||||
choice_child_schema = next(
|
||||
(c for c in child_case['children']
|
||||
if c.get(name_key, None) == qname), None)
|
||||
if choice_child_schema is not None:
|
||||
return choice_child_schema
|
||||
return None
|
||||
|
||||
def _is_leaf(self, schema):
|
||||
return schema.get('kind', None) in ('key', 'leaf', 'leaf-list')
|
||||
|
||||
def _is_empty_leaf(self, schema):
|
||||
return (schema.get('kind', None) == 'leaf' and
|
||||
schema['type'].get('primitive', False) and
|
||||
schema['type'].get('name', '') == 'empty')
|
||||
|
||||
|
||||
def connect(params):
|
||||
client = JsonRpc(params['url'])
|
||||
client.login(params['username'], params['password'])
|
||||
return client
|
||||
|
||||
|
||||
def verify_version(client):
|
||||
version_str = client.get_system_setting('version')
|
||||
version = [int(p) for p in version_str.split('.')]
|
||||
if len(version) < 2:
|
||||
raise ModuleFailException(
|
||||
'unsupported NSO version format {0}'.format(version_str))
|
||||
if (version[0] < 4 or version[1] < 4 or
|
||||
(version[1] == 4 and (len(version) < 3 or version[2] < 3))):
|
||||
raise ModuleFailException(
|
||||
'unsupported NSO version {0}, only 4.4.3 or later is supported'.format(version_str))
|
||||
0
lib/ansible/module_utils/network/nxos/__init__.py
Normal file
0
lib/ansible/module_utils/network/nxos/__init__.py
Normal file
438
lib/ansible/module_utils/network/nxos/nxos.py
Normal file
438
lib/ansible/module_utils/network/nxos/nxos.py
Normal file
@@ -0,0 +1,438 @@
|
||||
#
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
#
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
|
||||
import collections
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
from ansible.module_utils.urls import fetch_url
|
||||
|
||||
_DEVICE_CONNECTION = None
|
||||
|
||||
nxos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE'])),
|
||||
|
||||
'use_ssl': dict(type='bool'),
|
||||
'validate_certs': dict(type='bool'),
|
||||
|
||||
'timeout': dict(type='int'),
|
||||
|
||||
'transport': dict(default='cli', choices=['cli', 'nxapi'])
|
||||
}
|
||||
nxos_argument_spec = {
|
||||
'provider': dict(type='dict', options=nxos_provider_spec),
|
||||
}
|
||||
nxos_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9),
|
||||
|
||||
'use_ssl': dict(removed_in_version=2.9, type='bool'),
|
||||
'validate_certs': dict(removed_in_version=2.9, type='bool'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
|
||||
'transport': dict(removed_in_version=2.9, choices=['cli', 'nxapi'])
|
||||
}
|
||||
nxos_argument_spec.update(nxos_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return nxos_provider_spec
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def load_params(module):
|
||||
provider = module.params.get('provider') or dict()
|
||||
for key, value in iteritems(provider):
|
||||
if key in nxos_provider_spec:
|
||||
if module.params.get(key) is None and value is not None:
|
||||
module.params[key] = value
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
global _DEVICE_CONNECTION
|
||||
if not _DEVICE_CONNECTION:
|
||||
load_params(module)
|
||||
if is_nxapi(module):
|
||||
conn = Nxapi(module)
|
||||
else:
|
||||
conn = Cli(module)
|
||||
_DEVICE_CONNECTION = conn
|
||||
return _DEVICE_CONNECTION
|
||||
|
||||
|
||||
class Cli:
|
||||
|
||||
def __init__(self, module):
|
||||
self._module = module
|
||||
self._device_configs = {}
|
||||
|
||||
def exec_command(self, command):
|
||||
if isinstance(command, dict):
|
||||
command = self._module.jsonify(command)
|
||||
return exec_command(self._module, command)
|
||||
|
||||
def get_config(self, flags=None):
|
||||
"""Retrieves the current config from the device or cache
|
||||
"""
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return self._device_configs[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg=to_text(err))
|
||||
try:
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
except UnicodeError as e:
|
||||
self._module.fail_json(msg=u'Failed to decode config: %s' % to_text(out))
|
||||
|
||||
self._device_configs[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
def run_commands(self, commands, check_rc=True):
|
||||
"""Run list of commands on remote device and return results
|
||||
"""
|
||||
responses = list()
|
||||
|
||||
for item in to_list(commands):
|
||||
if item['output'] == 'json' and not is_json(item['command']):
|
||||
cmd = '%s | json' % item['command']
|
||||
elif item['output'] == 'text' and is_json(item['command']):
|
||||
cmd = item['command'].rsplit('|', 1)[0]
|
||||
else:
|
||||
cmd = item['command']
|
||||
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
try:
|
||||
out = to_text(out, errors='surrogate_or_strict')
|
||||
except UnicodeError:
|
||||
self._module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
|
||||
|
||||
if check_rc and rc != 0:
|
||||
self._module.fail_json(msg=to_text(err))
|
||||
|
||||
if not check_rc and rc != 0:
|
||||
try:
|
||||
out = self._module.from_json(err)
|
||||
except ValueError:
|
||||
out = to_text(err).strip()
|
||||
else:
|
||||
try:
|
||||
out = self._module.from_json(out)
|
||||
except ValueError:
|
||||
out = to_text(out).strip()
|
||||
|
||||
if item['output'] == 'json' and out != '' and isinstance(out, string_types):
|
||||
self._module.fail_json(msg='failed to retrieve output of %s in json format' % item['command'])
|
||||
|
||||
responses.append(out)
|
||||
return responses
|
||||
|
||||
def load_config(self, config, return_error=False, opts=None):
|
||||
"""Sends configuration commands to the remote device
|
||||
"""
|
||||
if opts is None:
|
||||
opts = {}
|
||||
|
||||
rc, out, err = self.exec_command('configure')
|
||||
if rc != 0:
|
||||
self._module.fail_json(msg='unable to enter configuration mode', output=to_text(err))
|
||||
|
||||
msgs = []
|
||||
for cmd in config:
|
||||
rc, out, err = self.exec_command(cmd)
|
||||
if opts.get('ignore_timeout') and rc == 1:
|
||||
msgs.append(err)
|
||||
return msgs
|
||||
elif rc != 0:
|
||||
self._module.fail_json(msg=to_text(err))
|
||||
elif out:
|
||||
msgs.append(out)
|
||||
|
||||
self.exec_command('end')
|
||||
return msgs
|
||||
|
||||
|
||||
class Nxapi:
|
||||
|
||||
OUTPUT_TO_COMMAND_TYPE = {
|
||||
'text': 'cli_show_ascii',
|
||||
'json': 'cli_show',
|
||||
'bash': 'bash',
|
||||
'config': 'cli_conf'
|
||||
}
|
||||
|
||||
def __init__(self, module):
|
||||
self._module = module
|
||||
self._nxapi_auth = None
|
||||
self._device_configs = {}
|
||||
|
||||
self._module.params['url_username'] = self._module.params['username']
|
||||
self._module.params['url_password'] = self._module.params['password']
|
||||
|
||||
host = self._module.params['host']
|
||||
port = self._module.params['port']
|
||||
|
||||
if self._module.params['use_ssl']:
|
||||
proto = 'https'
|
||||
port = port or 443
|
||||
else:
|
||||
proto = 'http'
|
||||
port = port or 80
|
||||
|
||||
self._url = '%s://%s:%s/ins' % (proto, host, port)
|
||||
|
||||
def _error(self, msg, **kwargs):
|
||||
self._nxapi_auth = None
|
||||
if 'url' not in kwargs:
|
||||
kwargs['url'] = self._url
|
||||
self._module.fail_json(msg=msg, **kwargs)
|
||||
|
||||
def _request_builder(self, commands, output, version='1.0', chunk='0', sid=None):
|
||||
"""Encodes a NXAPI JSON request message
|
||||
"""
|
||||
try:
|
||||
command_type = self.OUTPUT_TO_COMMAND_TYPE[output]
|
||||
except KeyError:
|
||||
msg = 'invalid format, received %s, expected one of %s' % \
|
||||
(output, ','.join(self.OUTPUT_TO_COMMAND_TYPE.keys()))
|
||||
self._error(msg=msg)
|
||||
|
||||
if isinstance(commands, (list, set, tuple)):
|
||||
commands = ' ;'.join(commands)
|
||||
|
||||
msg = {
|
||||
'version': version,
|
||||
'type': command_type,
|
||||
'chunk': chunk,
|
||||
'sid': sid,
|
||||
'input': commands,
|
||||
'output_format': 'json'
|
||||
}
|
||||
|
||||
return dict(ins_api=msg)
|
||||
|
||||
def send_request(self, commands, output='text', check_status=True,
|
||||
return_error=False, opts=None):
|
||||
# only 10 show commands can be encoded in each request
|
||||
# messages sent to the remote device
|
||||
if opts is None:
|
||||
opts = {}
|
||||
if output != 'config':
|
||||
commands = collections.deque(to_list(commands))
|
||||
stack = list()
|
||||
requests = list()
|
||||
|
||||
while commands:
|
||||
stack.append(commands.popleft())
|
||||
if len(stack) == 10:
|
||||
body = self._request_builder(stack, output)
|
||||
data = self._module.jsonify(body)
|
||||
requests.append(data)
|
||||
stack = list()
|
||||
|
||||
if stack:
|
||||
body = self._request_builder(stack, output)
|
||||
data = self._module.jsonify(body)
|
||||
requests.append(data)
|
||||
|
||||
else:
|
||||
body = self._request_builder(commands, 'config')
|
||||
requests = [self._module.jsonify(body)]
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
result = list()
|
||||
timeout = self._module.params['timeout']
|
||||
|
||||
for req in requests:
|
||||
if self._nxapi_auth:
|
||||
headers['Cookie'] = self._nxapi_auth
|
||||
|
||||
response, headers = fetch_url(
|
||||
self._module, self._url, data=req, headers=headers,
|
||||
timeout=timeout, method='POST'
|
||||
)
|
||||
self._nxapi_auth = headers.get('set-cookie')
|
||||
|
||||
if opts.get('ignore_timeout') and headers['status'] == -1:
|
||||
result.append(headers['msg'])
|
||||
return result
|
||||
elif headers['status'] != 200:
|
||||
self._error(**headers)
|
||||
|
||||
try:
|
||||
response = self._module.from_json(response.read())
|
||||
except ValueError:
|
||||
self._module.fail_json(msg='unable to parse response')
|
||||
|
||||
if response['ins_api'].get('outputs'):
|
||||
output = response['ins_api']['outputs']['output']
|
||||
for item in to_list(output):
|
||||
if check_status and item['code'] != '200':
|
||||
if return_error:
|
||||
result.append(item)
|
||||
else:
|
||||
self._error(output=output, **item)
|
||||
elif 'body' in item:
|
||||
result.append(item['body'])
|
||||
# else:
|
||||
# error in command but since check_status is disabled
|
||||
# silently drop it.
|
||||
# result.append(item['msg'])
|
||||
|
||||
return result
|
||||
|
||||
def get_config(self, flags=None):
|
||||
"""Retrieves the current config from the device or cache
|
||||
"""
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return self._device_configs[cmd]
|
||||
except KeyError:
|
||||
out = self.send_request(cmd)
|
||||
cfg = str(out[0]).strip()
|
||||
self._device_configs[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
def run_commands(self, commands, check_rc=True):
|
||||
"""Run list of commands on remote device and return results
|
||||
"""
|
||||
output = None
|
||||
queue = list()
|
||||
responses = list()
|
||||
|
||||
def _send(commands, output):
|
||||
return self.send_request(commands, output, check_status=check_rc)
|
||||
|
||||
for item in to_list(commands):
|
||||
if is_json(item['command']):
|
||||
item['command'] = str(item['command']).rsplit('|', 1)[0]
|
||||
item['output'] = 'json'
|
||||
|
||||
if all((output == 'json', item['output'] == 'text')) or all((output == 'text', item['output'] == 'json')):
|
||||
responses.extend(_send(queue, output))
|
||||
queue = list()
|
||||
|
||||
output = item['output'] or 'json'
|
||||
queue.append(item['command'])
|
||||
|
||||
if queue:
|
||||
responses.extend(_send(queue, output))
|
||||
|
||||
return responses
|
||||
|
||||
def load_config(self, commands, return_error=False, opts=None):
|
||||
"""Sends the ordered set of commands to the device
|
||||
"""
|
||||
commands = to_list(commands)
|
||||
msg = self.send_request(commands, output='config', check_status=True,
|
||||
return_error=return_error, opts=opts)
|
||||
if return_error:
|
||||
return msg
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def is_json(cmd):
|
||||
return str(cmd).endswith('| json')
|
||||
|
||||
|
||||
def is_text(cmd):
|
||||
return not is_json(cmd)
|
||||
|
||||
|
||||
def is_nxapi(module):
|
||||
transport = module.params['transport']
|
||||
provider_transport = (module.params['provider'] or {}).get('transport')
|
||||
return 'nxapi' in (transport, provider_transport)
|
||||
|
||||
|
||||
def to_command(module, commands):
|
||||
if is_nxapi(module):
|
||||
default_output = 'json'
|
||||
else:
|
||||
default_output = 'text'
|
||||
|
||||
transform = ComplexList(dict(
|
||||
command=dict(key=True),
|
||||
output=dict(default=default_output),
|
||||
prompt=dict(),
|
||||
answer=dict()
|
||||
), module)
|
||||
|
||||
commands = transform(to_list(commands))
|
||||
|
||||
for item in commands:
|
||||
if is_json(item['command']):
|
||||
item['output'] = 'json'
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
conn = get_connection(module)
|
||||
return conn.get_config(flags)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
conn = get_connection(module)
|
||||
return conn.run_commands(to_command(module, commands), check_rc)
|
||||
|
||||
|
||||
def load_config(module, config, return_error=False, opts=None):
|
||||
conn = get_connection(module)
|
||||
return conn.load_config(config, return_error, opts)
|
||||
19
lib/ansible/module_utils/network/ordnance/ordnance.py
Normal file
19
lib/ansible/module_utils/network/ordnance/ordnance.py
Normal file
@@ -0,0 +1,19 @@
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'show running-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = module.exec_command(cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=err)
|
||||
cfg = str(out).strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
0
lib/ansible/module_utils/network/sros/__init__.py
Normal file
0
lib/ansible/module_utils/network/sros/__init__.py
Normal file
111
lib/ansible/module_utils/network/sros/sros.py
Normal file
111
lib/ansible/module_utils/network/sros/sros.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# Copyright (c) 2016 Peter Sprygada, <psprygada@ansible.com>
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright
|
||||
# notice,
|
||||
# this list of conditions and the following disclaimer in the
|
||||
# documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import re
|
||||
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list, ComplexList
|
||||
from ansible.module_utils.connection import exec_command
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
sros_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
sros_argument_spec = {
|
||||
'provider': dict(type='dict', options=sros_provider_spec),
|
||||
}
|
||||
sros_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
sros_argument_spec.update(sros_top_spec)
|
||||
|
||||
|
||||
def check_args(module, warnings):
|
||||
pass
|
||||
|
||||
|
||||
def get_config(module, flags=None):
|
||||
flags = [] if flags is None else flags
|
||||
|
||||
cmd = 'admin display-config '
|
||||
cmd += ' '.join(flags)
|
||||
cmd = cmd.strip()
|
||||
|
||||
try:
|
||||
return _DEVICE_CONFIGS[cmd]
|
||||
except KeyError:
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if rc != 0:
|
||||
module.fail_json(msg='unable to retrieve current config', stderr=to_text(err, errors='surrogate_or_strict'))
|
||||
cfg = to_text(out, errors='surrogate_or_strict').strip()
|
||||
_DEVICE_CONFIGS[cmd] = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def to_commands(module, commands):
|
||||
spec = {
|
||||
'command': dict(key=True),
|
||||
'prompt': dict(),
|
||||
'answer': dict()
|
||||
}
|
||||
transform = ComplexList(spec, module)
|
||||
return transform(commands)
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
commands = to_commands(module, to_list(commands))
|
||||
for cmd in commands:
|
||||
cmd = module.jsonify(cmd)
|
||||
rc, out, err = exec_command(module, cmd)
|
||||
if check_rc and rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), rc=rc)
|
||||
responses.append(to_text(out, errors='surrogate_or_strict'))
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands):
|
||||
for command in to_list(commands):
|
||||
rc, out, err = exec_command(module, command)
|
||||
if rc != 0:
|
||||
module.fail_json(msg=to_text(err, errors='surrogate_or_strict'), command=command, rc=rc)
|
||||
exec_command(module, 'exit all')
|
||||
0
lib/ansible/module_utils/network/vyos/__init__.py
Normal file
0
lib/ansible/module_utils/network/vyos/__init__.py
Normal file
156
lib/ansible/module_utils/network/vyos/vyos.py
Normal file
156
lib/ansible/module_utils/network/vyos/vyos.py
Normal file
@@ -0,0 +1,156 @@
|
||||
# This code is part of Ansible, but is an independent component.
|
||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
||||
# still belong to the author of the module, and may assign their own license
|
||||
# to the complete work.
|
||||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
import json
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.basic import env_fallback, return_values
|
||||
from ansible.module_utils.network.common.utils import to_list
|
||||
from ansible.module_utils.connection import Connection
|
||||
|
||||
_DEVICE_CONFIGS = {}
|
||||
|
||||
vyos_provider_spec = {
|
||||
'host': dict(),
|
||||
'port': dict(type='int'),
|
||||
|
||||
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
|
||||
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
|
||||
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE']), type='path'),
|
||||
|
||||
'timeout': dict(type='int'),
|
||||
}
|
||||
vyos_argument_spec = {
|
||||
'provider': dict(type='dict', options=vyos_provider_spec),
|
||||
}
|
||||
vyos_top_spec = {
|
||||
'host': dict(removed_in_version=2.9),
|
||||
'port': dict(removed_in_version=2.9, type='int'),
|
||||
|
||||
'username': dict(removed_in_version=2.9),
|
||||
'password': dict(removed_in_version=2.9, no_log=True),
|
||||
'ssh_keyfile': dict(removed_in_version=2.9, type='path'),
|
||||
|
||||
'timeout': dict(removed_in_version=2.9, type='int'),
|
||||
}
|
||||
vyos_argument_spec.update(vyos_top_spec)
|
||||
|
||||
|
||||
def get_provider_argspec():
|
||||
return vyos_provider_spec
|
||||
|
||||
|
||||
def get_connection(module):
|
||||
if hasattr(module, '_vyos_connection'):
|
||||
return module._vyos_connection
|
||||
|
||||
capabilities = get_capabilities(module)
|
||||
network_api = capabilities.get('network_api')
|
||||
if network_api == 'cliconf':
|
||||
module._vyos_connection = Connection(module._socket_path)
|
||||
else:
|
||||
module.fail_json(msg='Invalid connection type %s' % network_api)
|
||||
|
||||
return module._vyos_connection
|
||||
|
||||
|
||||
def get_capabilities(module):
|
||||
if hasattr(module, '_vyos_capabilities'):
|
||||
return module._vyos_capabilities
|
||||
|
||||
capabilities = Connection(module._socket_path).get_capabilities()
|
||||
module._vyos_capabilities = json.loads(capabilities)
|
||||
return module._vyos_capabilities
|
||||
|
||||
|
||||
def get_config(module):
|
||||
global _DEVICE_CONFIGS
|
||||
|
||||
if _DEVICE_CONFIGS != {}:
|
||||
return _DEVICE_CONFIGS
|
||||
else:
|
||||
connection = get_connection(module)
|
||||
out = connection.get_config()
|
||||
cfg = to_text(out, errors='surrogate_then_replace').strip()
|
||||
_DEVICE_CONFIGS = cfg
|
||||
return cfg
|
||||
|
||||
|
||||
def run_commands(module, commands, check_rc=True):
|
||||
responses = list()
|
||||
connection = get_connection(module)
|
||||
|
||||
for cmd in to_list(commands):
|
||||
try:
|
||||
cmd = json.loads(cmd)
|
||||
command = cmd['command']
|
||||
prompt = cmd['prompt']
|
||||
answer = cmd['answer']
|
||||
except:
|
||||
command = cmd
|
||||
prompt = None
|
||||
answer = None
|
||||
|
||||
out = connection.get(command, prompt, answer)
|
||||
|
||||
try:
|
||||
out = to_text(out, errors='surrogate_or_strict')
|
||||
except UnicodeError:
|
||||
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
|
||||
|
||||
responses.append(out)
|
||||
|
||||
return responses
|
||||
|
||||
|
||||
def load_config(module, commands, commit=False, comment=None):
|
||||
connection = get_connection(module)
|
||||
|
||||
out = connection.edit_config(commands)
|
||||
|
||||
diff = None
|
||||
if module._diff:
|
||||
out = connection.get('compare')
|
||||
out = to_text(out, errors='surrogate_or_strict')
|
||||
|
||||
if not out.startswith('No changes'):
|
||||
out = connection.get('show')
|
||||
diff = to_text(out, errors='surrogate_or_strict').strip()
|
||||
|
||||
if commit:
|
||||
try:
|
||||
out = connection.commit(comment)
|
||||
except:
|
||||
connection.discard_changes()
|
||||
module.fail_json(msg='commit failed: %s' % out)
|
||||
|
||||
if not commit:
|
||||
connection.discard_changes()
|
||||
else:
|
||||
connection.get('exit')
|
||||
|
||||
if diff:
|
||||
return diff
|
||||
Reference in New Issue
Block a user