mirror of
https://github.com/openshift/community.okd.git
synced 2026-03-26 19:03:14 +00:00
improve turbo mode support (#117)
- delay the loading of external modules when possible - delay the loading of OKDRawModule and OpenShiftProcess classes - k8s reuse the design of the kubernetes.core modules We've got a chicken/egg problem that prevent us from properly reporting if kubernetes.core is missing. We need args_common to create the module object. And we need the module object to report the missing dependency. The dependency is declared in the galaxy.yml file anyway, the problem should not happen.
This commit is contained in:
186
plugins/module_utils/k8s.py
Normal file
186
plugins/module_utils/k8s.py
Normal file
@@ -0,0 +1,186 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
import operator
|
||||
from functools import reduce
|
||||
import traceback
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.common import (
|
||||
K8sAnsibleMixin,
|
||||
get_api_client,
|
||||
)
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
try:
|
||||
from kubernetes.dynamic.exceptions import DynamicApiError, NotFoundError, ForbiddenError
|
||||
HAS_KUBERNETES_COLLECTION = True
|
||||
except ImportError as e:
|
||||
HAS_KUBERNETES_COLLECTION = False
|
||||
k8s_collection_import_exception = e
|
||||
K8S_COLLECTION_ERROR = traceback.format_exc()
|
||||
|
||||
|
||||
TRIGGER_ANNOTATION = 'image.openshift.io/triggers'
|
||||
TRIGGER_CONTAINER = re.compile(r"(?P<path>.*)\[((?P<index>[0-9]+)|\?\(@\.name==[\"'\\]*(?P<name>[a-z0-9]([-a-z0-9]*[a-z0-9])?))")
|
||||
|
||||
|
||||
class OKDRawModule(K8sAnsibleMixin):
|
||||
|
||||
def __init__(self, module, k8s_kind=None, *args, **kwargs):
|
||||
self.module = module
|
||||
self.client = get_api_client(module=module)
|
||||
self.check_mode = self.module.check_mode
|
||||
self.params = self.module.params
|
||||
self.fail_json = self.module.fail_json
|
||||
self.fail = self.module.fail_json
|
||||
self.exit_json = self.module.exit_json
|
||||
|
||||
super(OKDRawModule, self).__init__(module, *args, **kwargs)
|
||||
|
||||
self.warnings = []
|
||||
|
||||
self.kind = k8s_kind or self.params.get('kind')
|
||||
self.api_version = self.params.get('api_version')
|
||||
self.name = self.params.get('name')
|
||||
self.namespace = self.params.get('namespace')
|
||||
|
||||
self.check_library_version()
|
||||
self.set_resource_definitions(module)
|
||||
|
||||
def perform_action(self, resource, definition):
|
||||
state = self.params.get('state', None)
|
||||
name = definition['metadata'].get('name')
|
||||
namespace = definition['metadata'].get('namespace')
|
||||
|
||||
if state != 'absent':
|
||||
|
||||
if resource.kind in ['Project', 'ProjectRequest']:
|
||||
try:
|
||||
resource.get(name, namespace)
|
||||
except (NotFoundError, ForbiddenError):
|
||||
return self.create_project_request(definition)
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg='Failed to retrieve requested object: {0}'.format(exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
|
||||
try:
|
||||
existing = resource.get(name=name, namespace=namespace).to_dict()
|
||||
except Exception:
|
||||
existing = None
|
||||
|
||||
if existing:
|
||||
if resource.kind == 'DeploymentConfig':
|
||||
if definition.get('spec', {}).get('triggers'):
|
||||
definition = self.resolve_imagestream_triggers(existing, definition)
|
||||
elif existing['metadata'].get('annotations', {}).get(TRIGGER_ANNOTATION):
|
||||
definition = self.resolve_imagestream_trigger_annotation(existing, definition)
|
||||
|
||||
return super(OKDRawModule, self).perform_action(resource, definition)
|
||||
|
||||
@staticmethod
|
||||
def get_index(desired, objects, keys):
|
||||
""" Iterates over keys, returns the first object from objects where the value of the key
|
||||
matches the value in desired
|
||||
"""
|
||||
# pylint: disable=use-a-generator
|
||||
# Use a generator instead 'all(desired.get(key, True) == item.get(key, False) for key in keys)'
|
||||
for i, item in enumerate(objects):
|
||||
if item and all([desired.get(key, True) == item.get(key, False) for key in keys]):
|
||||
return i
|
||||
|
||||
def resolve_imagestream_trigger_annotation(self, existing, definition):
|
||||
import yaml
|
||||
|
||||
def get_from_fields(d, fields):
|
||||
try:
|
||||
return reduce(operator.getitem, fields, d)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def set_from_fields(d, fields, value):
|
||||
get_from_fields(d, fields[:-1])[fields[-1]] = value
|
||||
|
||||
if TRIGGER_ANNOTATION in definition['metadata'].get('annotations', {}).keys():
|
||||
triggers = yaml.safe_load(definition['metadata']['annotations'][TRIGGER_ANNOTATION] or '[]')
|
||||
else:
|
||||
triggers = yaml.safe_load(existing['metadata'].get('annotations', '{}').get(TRIGGER_ANNOTATION, '[]'))
|
||||
|
||||
if not isinstance(triggers, list):
|
||||
return definition
|
||||
|
||||
for trigger in triggers:
|
||||
if trigger.get('fieldPath'):
|
||||
parsed = self.parse_trigger_fieldpath(trigger['fieldPath'])
|
||||
path = parsed.get('path', '').split('.')
|
||||
if path:
|
||||
existing_containers = get_from_fields(existing, path)
|
||||
new_containers = get_from_fields(definition, path)
|
||||
if parsed.get('name'):
|
||||
existing_index = self.get_index({'name': parsed['name']}, existing_containers, ['name'])
|
||||
new_index = self.get_index({'name': parsed['name']}, new_containers, ['name'])
|
||||
elif parsed.get('index') is not None:
|
||||
existing_index = new_index = int(parsed['index'])
|
||||
else:
|
||||
existing_index = new_index = None
|
||||
if existing_index is not None and new_index is not None:
|
||||
if existing_index < len(existing_containers) and new_index < len(new_containers):
|
||||
set_from_fields(definition, path + [new_index, 'image'], get_from_fields(existing, path + [existing_index, 'image']))
|
||||
return definition
|
||||
|
||||
def resolve_imagestream_triggers(self, existing, definition):
|
||||
|
||||
existing_triggers = existing.get('spec', {}).get('triggers')
|
||||
new_triggers = definition['spec']['triggers']
|
||||
existing_containers = existing.get('spec', {}).get('template', {}).get('spec', {}).get('containers', [])
|
||||
new_containers = definition.get('spec', {}).get('template', {}).get('spec', {}).get('containers', [])
|
||||
for i, trigger in enumerate(new_triggers):
|
||||
if trigger.get('type') == 'ImageChange' and trigger.get('imageChangeParams'):
|
||||
names = trigger['imageChangeParams'].get('containerNames', [])
|
||||
for name in names:
|
||||
old_container_index = self.get_index({'name': name}, existing_containers, ['name'])
|
||||
new_container_index = self.get_index({'name': name}, new_containers, ['name'])
|
||||
if old_container_index is not None and new_container_index is not None:
|
||||
image = existing['spec']['template']['spec']['containers'][old_container_index]['image']
|
||||
definition['spec']['template']['spec']['containers'][new_container_index]['image'] = image
|
||||
|
||||
existing_index = self.get_index(trigger['imageChangeParams'],
|
||||
[x.get('imageChangeParams') for x in existing_triggers],
|
||||
['containerNames'])
|
||||
if existing_index is not None:
|
||||
existing_image = existing_triggers[existing_index].get('imageChangeParams', {}).get('lastTriggeredImage')
|
||||
if existing_image:
|
||||
definition['spec']['triggers'][i]['imageChangeParams']['lastTriggeredImage'] = existing_image
|
||||
existing_from = existing_triggers[existing_index].get('imageChangeParams', {}).get('from', {})
|
||||
new_from = trigger['imageChangeParams'].get('from', {})
|
||||
existing_namespace = existing_from.get('namespace')
|
||||
existing_name = existing_from.get('name', False)
|
||||
new_name = new_from.get('name', True)
|
||||
add_namespace = existing_namespace and 'namespace' not in new_from.keys() and existing_name == new_name
|
||||
if add_namespace:
|
||||
definition['spec']['triggers'][i]['imageChangeParams']['from']['namespace'] = existing_from['namespace']
|
||||
|
||||
return definition
|
||||
|
||||
def parse_trigger_fieldpath(self, expression):
|
||||
parsed = TRIGGER_CONTAINER.search(expression).groupdict()
|
||||
if parsed.get('index'):
|
||||
parsed['index'] = int(parsed['index'])
|
||||
return parsed
|
||||
|
||||
def create_project_request(self, definition):
|
||||
definition['kind'] = 'ProjectRequest'
|
||||
result = {'changed': False, 'result': {}}
|
||||
resource = self.find_resource('ProjectRequest', definition['apiVersion'], fail=True)
|
||||
if not self.check_mode:
|
||||
try:
|
||||
k8s_obj = resource.create(definition)
|
||||
result['result'] = k8s_obj.to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg="Failed to create object: {0}".format(exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
result['changed'] = True
|
||||
result['method'] = 'create'
|
||||
return result
|
||||
199
plugins/module_utils/openshift_process.py
Normal file
199
plugins/module_utils/openshift_process.py
Normal file
@@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.common import (
|
||||
K8sAnsibleMixin,
|
||||
get_api_client,
|
||||
)
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
try:
|
||||
from kubernetes.dynamic.exceptions import DynamicApiError, NotFoundError
|
||||
HAS_KUBERNETES_COLLECTION = True
|
||||
except ImportError as e:
|
||||
HAS_KUBERNETES_COLLECTION = False
|
||||
k8s_collection_import_exception = e
|
||||
K8S_COLLECTION_ERROR = traceback.format_exc()
|
||||
|
||||
try:
|
||||
from kubernetes.dynamic.exceptions import DynamicApiError, NotFoundError
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class OpenShiftProcess(K8sAnsibleMixin):
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.fail_json = self.module.fail_json
|
||||
self.exit_json = self.module.exit_json
|
||||
|
||||
if not HAS_KUBERNETES_COLLECTION:
|
||||
self.module.fail_json(
|
||||
msg="The kubernetes.core collection must be installed",
|
||||
exception=K8S_COLLECTION_ERROR,
|
||||
error=to_native(k8s_collection_import_exception),
|
||||
)
|
||||
|
||||
super(OpenShiftProcess, self).__init__(self.module)
|
||||
|
||||
self.params = self.module.params
|
||||
self.check_mode = self.module.check_mode
|
||||
self.client = get_api_client(self.module)
|
||||
|
||||
def execute_module(self):
|
||||
v1_templates = self.find_resource(
|
||||
"templates", "template.openshift.io/v1", fail=True
|
||||
)
|
||||
v1_processed_templates = self.find_resource(
|
||||
"processedtemplates", "template.openshift.io/v1", fail=True
|
||||
)
|
||||
|
||||
name = self.params.get("name")
|
||||
namespace = self.params.get("namespace")
|
||||
namespace_target = self.params.get("namespace_target")
|
||||
definition = self.params.get("resource_definition")
|
||||
src = self.params.get("src")
|
||||
|
||||
state = self.params.get("state")
|
||||
|
||||
parameters = self.params.get("parameters") or {}
|
||||
parameter_file = self.params.get("parameter_file")
|
||||
|
||||
if (name and definition) or (name and src) or (src and definition):
|
||||
self.fail_json("Only one of src, name, or definition may be provided")
|
||||
|
||||
if name and not namespace:
|
||||
self.fail_json("namespace is required when name is set")
|
||||
|
||||
template = None
|
||||
|
||||
if src or definition:
|
||||
self.set_resource_definitions(self.module)
|
||||
if len(self.resource_definitions) < 1:
|
||||
self.fail_json(
|
||||
"Unable to load a Template resource from src or resource_definition"
|
||||
)
|
||||
elif len(self.resource_definitions) > 1:
|
||||
self.fail_json(
|
||||
"Multiple Template resources found in src or resource_definition, only one Template may be processed at a time"
|
||||
)
|
||||
template = self.resource_definitions[0]
|
||||
template_namespace = template.get("metadata", {}).get("namespace")
|
||||
namespace = template_namespace or namespace or namespace_target or "default"
|
||||
elif name and namespace:
|
||||
try:
|
||||
template = v1_templates.get(name=name, namespace=namespace).to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(
|
||||
msg="Failed to retrieve Template with name '{0}' in namespace '{1}': {2}".format(
|
||||
name, namespace, exc.body
|
||||
),
|
||||
error=exc.status,
|
||||
status=exc.status,
|
||||
reason=exc.reason,
|
||||
)
|
||||
except Exception as exc:
|
||||
self.module.fail_json(
|
||||
msg="Failed to retrieve Template with name '{0}' in namespace '{1}': {2}".format(
|
||||
name, namespace, to_native(exc)
|
||||
),
|
||||
error="",
|
||||
status="",
|
||||
reason="",
|
||||
)
|
||||
else:
|
||||
self.fail_json(
|
||||
"One of resource_definition, src, or name and namespace must be provided"
|
||||
)
|
||||
|
||||
if parameter_file:
|
||||
parameters = self.parse_dotenv_and_merge(parameters, parameter_file)
|
||||
|
||||
for k, v in parameters.items():
|
||||
template = self.update_template_param(template, k, v)
|
||||
|
||||
result = {"changed": False}
|
||||
|
||||
try:
|
||||
response = v1_processed_templates.create(
|
||||
body=template, namespace=namespace
|
||||
).to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(
|
||||
msg="Server failed to render the Template: {0}".format(exc.body),
|
||||
error=exc.status,
|
||||
status=exc.status,
|
||||
reason=exc.reason,
|
||||
)
|
||||
except Exception as exc:
|
||||
self.module.fail_json(
|
||||
msg="Server failed to render the Template: {0}".format(to_native(exc)),
|
||||
error="",
|
||||
status="",
|
||||
reason="",
|
||||
)
|
||||
result["message"] = ""
|
||||
if "message" in response:
|
||||
result["message"] = response["message"]
|
||||
result["resources"] = response["objects"]
|
||||
|
||||
if state != "rendered":
|
||||
self.resource_definitions = response["objects"]
|
||||
self.kind = self.api_version = self.name = None
|
||||
self.namespace = self.params.get("namespace_target")
|
||||
self.append_hash = False
|
||||
self.apply = False
|
||||
self.params["validate"] = None
|
||||
self.params["merge_type"] = None
|
||||
super(OpenShiftProcess, self).execute_module()
|
||||
|
||||
self.module.exit_json(**result)
|
||||
|
||||
def update_template_param(self, template, k, v):
|
||||
for i, param in enumerate(template["parameters"]):
|
||||
if param["name"] == k:
|
||||
template["parameters"][i]["value"] = v
|
||||
return template
|
||||
return template
|
||||
|
||||
def parse_dotenv_and_merge(self, parameters, parameter_file):
|
||||
import re
|
||||
|
||||
DOTENV_PARSER = re.compile(
|
||||
r"(?x)^(\s*(\#.*|\s*|(export\s+)?(?P<key>[A-z_][A-z0-9_.]*)=(?P<value>.+?)?)\s*)[\r\n]*$"
|
||||
)
|
||||
path = os.path.normpath(parameter_file)
|
||||
if not os.path.exists(path):
|
||||
self.fail(msg="Error accessing {0}. Does the file exist?".format(path))
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
multiline = ""
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if line.endswith("\\"):
|
||||
multiline += " ".join(line.rsplit("\\", 1))
|
||||
continue
|
||||
if multiline:
|
||||
line = multiline + line
|
||||
multiline = ""
|
||||
match = DOTENV_PARSER.search(line)
|
||||
if not match:
|
||||
continue
|
||||
match = match.groupdict()
|
||||
if match.get("key"):
|
||||
if match["key"] in parameters:
|
||||
self.fail_json(
|
||||
msg="Duplicate value for '{0}' detected in parameter file".format(
|
||||
match["key"]
|
||||
)
|
||||
)
|
||||
parameters[match["key"]] = match["value"]
|
||||
except IOError as exc:
|
||||
self.fail(msg="Error loading parameter file: {0}".format(exc))
|
||||
return parameters
|
||||
@@ -260,243 +260,58 @@ result:
|
||||
'''
|
||||
# ENDREMOVE (downstream)
|
||||
|
||||
import copy
|
||||
import re
|
||||
import operator
|
||||
import traceback
|
||||
from functools import reduce
|
||||
|
||||
try:
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.ansiblemodule import AnsibleModule
|
||||
except ImportError:
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
try:
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.common import get_api_client, K8sAnsibleMixin
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.args_common import (
|
||||
NAME_ARG_SPEC, RESOURCE_ARG_SPEC, AUTH_ARG_SPEC, WAIT_ARG_SPEC, DELETE_OPTS_ARG_SPEC)
|
||||
HAS_KUBERNETES_COLLECTION = True
|
||||
except ImportError as e:
|
||||
HAS_KUBERNETES_COLLECTION = False
|
||||
k8s_collection_import_exception = e
|
||||
K8S_COLLECTION_ERROR = traceback.format_exc()
|
||||
|
||||
try:
|
||||
import yaml
|
||||
from kubernetes.dynamic.exceptions import DynamicApiError, NotFoundError, ForbiddenError
|
||||
except ImportError:
|
||||
# Exceptions handled in common
|
||||
pass
|
||||
|
||||
TRIGGER_ANNOTATION = 'image.openshift.io/triggers'
|
||||
TRIGGER_CONTAINER = re.compile(r"(?P<path>.*)\[((?P<index>[0-9]+)|\?\(@\.name==[\"'\\]*(?P<name>[a-z0-9]([-a-z0-9]*[a-z0-9])?))")
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.args_common import (
|
||||
NAME_ARG_SPEC, RESOURCE_ARG_SPEC, AUTH_ARG_SPEC, WAIT_ARG_SPEC, DELETE_OPTS_ARG_SPEC)
|
||||
|
||||
|
||||
class OKDRawModule(K8sAnsibleMixin):
|
||||
def validate_spec():
|
||||
return dict(
|
||||
fail_on_error=dict(type='bool'),
|
||||
version=dict(),
|
||||
strict=dict(type='bool', default=True)
|
||||
)
|
||||
|
||||
def __init__(self, k8s_kind=None, *args, **kwargs):
|
||||
mutually_exclusive = [
|
||||
('resource_definition', 'src'),
|
||||
('merge_type', 'apply'),
|
||||
('template', 'resource_definition'),
|
||||
('template', 'src'),
|
||||
]
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=self.argspec,
|
||||
mutually_exclusive=mutually_exclusive,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
self.module = module
|
||||
self.check_mode = self.module.check_mode
|
||||
self.params = self.module.params
|
||||
self.fail_json = self.module.fail_json
|
||||
self.fail = self.module.fail_json
|
||||
self.exit_json = self.module.exit_json
|
||||
|
||||
if not HAS_KUBERNETES_COLLECTION:
|
||||
self.fail_json(
|
||||
msg="The kubernetes.core collection must be installed",
|
||||
exception=K8S_COLLECTION_ERROR,
|
||||
error=to_native(k8s_collection_import_exception)
|
||||
)
|
||||
|
||||
super(OKDRawModule, self).__init__(module, *args, **kwargs)
|
||||
|
||||
self.client = get_api_client(module)
|
||||
self.warnings = []
|
||||
|
||||
self.kind = k8s_kind or self.params.get('kind')
|
||||
self.api_version = self.params.get('api_version')
|
||||
self.name = self.params.get('name')
|
||||
self.namespace = self.params.get('namespace')
|
||||
|
||||
self.check_library_version()
|
||||
self.set_resource_definitions(module)
|
||||
|
||||
@property
|
||||
def validate_spec(self):
|
||||
return dict(
|
||||
fail_on_error=dict(type='bool'),
|
||||
version=dict(),
|
||||
strict=dict(type='bool', default=True)
|
||||
)
|
||||
|
||||
@property
|
||||
def argspec(self):
|
||||
argument_spec = copy.deepcopy(NAME_ARG_SPEC)
|
||||
argument_spec.update(copy.deepcopy(RESOURCE_ARG_SPEC))
|
||||
argument_spec.update(copy.deepcopy(AUTH_ARG_SPEC))
|
||||
argument_spec.update(copy.deepcopy(WAIT_ARG_SPEC))
|
||||
argument_spec['merge_type'] = dict(type='list', elements='str', choices=['json', 'merge', 'strategic-merge'])
|
||||
argument_spec['validate'] = dict(type='dict', default=None, options=self.validate_spec)
|
||||
argument_spec['append_hash'] = dict(type='bool', default=False)
|
||||
argument_spec['apply'] = dict(type='bool', default=False)
|
||||
argument_spec['template'] = dict(type='raw', default=None)
|
||||
argument_spec['delete_options'] = dict(type='dict', default=None, options=copy.deepcopy(DELETE_OPTS_ARG_SPEC))
|
||||
argument_spec['continue_on_error'] = dict(type='bool', default=False)
|
||||
argument_spec['state'] = dict(default='present', choices=['present', 'absent', 'patched'])
|
||||
argument_spec['force'] = dict(type='bool', default=False)
|
||||
return argument_spec
|
||||
|
||||
def perform_action(self, resource, definition):
|
||||
state = self.params.get('state', None)
|
||||
name = definition['metadata'].get('name')
|
||||
namespace = definition['metadata'].get('namespace')
|
||||
|
||||
if state != 'absent':
|
||||
|
||||
if resource.kind in ['Project', 'ProjectRequest']:
|
||||
try:
|
||||
resource.get(name, namespace)
|
||||
except (NotFoundError, ForbiddenError):
|
||||
return self.create_project_request(definition)
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg='Failed to retrieve requested object: {0}'.format(exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
|
||||
try:
|
||||
existing = resource.get(name=name, namespace=namespace).to_dict()
|
||||
except Exception:
|
||||
existing = None
|
||||
|
||||
if existing:
|
||||
if resource.kind == 'DeploymentConfig':
|
||||
if definition.get('spec', {}).get('triggers'):
|
||||
definition = self.resolve_imagestream_triggers(existing, definition)
|
||||
elif existing['metadata'].get('annotations', {}).get(TRIGGER_ANNOTATION):
|
||||
definition = self.resolve_imagestream_trigger_annotation(existing, definition)
|
||||
|
||||
return super(OKDRawModule, self).perform_action(resource, definition)
|
||||
|
||||
@staticmethod
|
||||
def get_index(desired, objects, keys):
|
||||
""" Iterates over keys, returns the first object from objects where the value of the key
|
||||
matches the value in desired
|
||||
"""
|
||||
# pylint: disable=use-a-generator
|
||||
for i, item in enumerate(objects):
|
||||
if item and all([desired.get(key, True) == item.get(key, False) for key in keys]):
|
||||
return i
|
||||
|
||||
def resolve_imagestream_trigger_annotation(self, existing, definition):
|
||||
|
||||
def get_from_fields(d, fields):
|
||||
try:
|
||||
return reduce(operator.getitem, fields, d)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def set_from_fields(d, fields, value):
|
||||
get_from_fields(d, fields[:-1])[fields[-1]] = value
|
||||
|
||||
if TRIGGER_ANNOTATION in definition['metadata'].get('annotations', {}).keys():
|
||||
triggers = yaml.safe_load(definition['metadata']['annotations'][TRIGGER_ANNOTATION] or '[]')
|
||||
else:
|
||||
triggers = yaml.safe_load(existing['metadata'].get('annotations', '{}').get(TRIGGER_ANNOTATION, '[]'))
|
||||
|
||||
if not isinstance(triggers, list):
|
||||
return definition
|
||||
|
||||
for trigger in triggers:
|
||||
if trigger.get('fieldPath'):
|
||||
parsed = self.parse_trigger_fieldpath(trigger['fieldPath'])
|
||||
path = parsed.get('path', '').split('.')
|
||||
if path:
|
||||
existing_containers = get_from_fields(existing, path)
|
||||
new_containers = get_from_fields(definition, path)
|
||||
if parsed.get('name'):
|
||||
existing_index = self.get_index({'name': parsed['name']}, existing_containers, ['name'])
|
||||
new_index = self.get_index({'name': parsed['name']}, new_containers, ['name'])
|
||||
elif parsed.get('index') is not None:
|
||||
existing_index = new_index = int(parsed['index'])
|
||||
else:
|
||||
existing_index = new_index = None
|
||||
if existing_index is not None and new_index is not None:
|
||||
if existing_index < len(existing_containers) and new_index < len(new_containers):
|
||||
set_from_fields(definition, path + [new_index, 'image'], get_from_fields(existing, path + [existing_index, 'image']))
|
||||
return definition
|
||||
|
||||
def resolve_imagestream_triggers(self, existing, definition):
|
||||
|
||||
existing_triggers = existing.get('spec', {}).get('triggers')
|
||||
new_triggers = definition['spec']['triggers']
|
||||
existing_containers = existing.get('spec', {}).get('template', {}).get('spec', {}).get('containers', [])
|
||||
new_containers = definition.get('spec', {}).get('template', {}).get('spec', {}).get('containers', [])
|
||||
for i, trigger in enumerate(new_triggers):
|
||||
if trigger.get('type') == 'ImageChange' and trigger.get('imageChangeParams'):
|
||||
names = trigger['imageChangeParams'].get('containerNames', [])
|
||||
for name in names:
|
||||
old_container_index = self.get_index({'name': name}, existing_containers, ['name'])
|
||||
new_container_index = self.get_index({'name': name}, new_containers, ['name'])
|
||||
if old_container_index is not None and new_container_index is not None:
|
||||
image = existing['spec']['template']['spec']['containers'][old_container_index]['image']
|
||||
definition['spec']['template']['spec']['containers'][new_container_index]['image'] = image
|
||||
|
||||
existing_index = self.get_index(trigger['imageChangeParams'],
|
||||
[x.get('imageChangeParams') for x in existing_triggers],
|
||||
['containerNames'])
|
||||
if existing_index is not None:
|
||||
existing_image = existing_triggers[existing_index].get('imageChangeParams', {}).get('lastTriggeredImage')
|
||||
if existing_image:
|
||||
definition['spec']['triggers'][i]['imageChangeParams']['lastTriggeredImage'] = existing_image
|
||||
existing_from = existing_triggers[existing_index].get('imageChangeParams', {}).get('from', {})
|
||||
new_from = trigger['imageChangeParams'].get('from', {})
|
||||
existing_namespace = existing_from.get('namespace')
|
||||
existing_name = existing_from.get('name', False)
|
||||
new_name = new_from.get('name', True)
|
||||
add_namespace = existing_namespace and 'namespace' not in new_from.keys() and existing_name == new_name
|
||||
if add_namespace:
|
||||
definition['spec']['triggers'][i]['imageChangeParams']['from']['namespace'] = existing_from['namespace']
|
||||
|
||||
return definition
|
||||
|
||||
def parse_trigger_fieldpath(self, expression):
|
||||
parsed = TRIGGER_CONTAINER.search(expression).groupdict()
|
||||
if parsed.get('index'):
|
||||
parsed['index'] = int(parsed['index'])
|
||||
return parsed
|
||||
|
||||
def create_project_request(self, definition):
|
||||
definition['kind'] = 'ProjectRequest'
|
||||
result = {'changed': False, 'result': {}}
|
||||
resource = self.find_resource('ProjectRequest', definition['apiVersion'], fail=True)
|
||||
if not self.check_mode:
|
||||
try:
|
||||
k8s_obj = resource.create(definition)
|
||||
result['result'] = k8s_obj.to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg="Failed to create object: {0}".format(exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
result['changed'] = True
|
||||
result['method'] = 'create'
|
||||
return result
|
||||
def argspec():
|
||||
argument_spec = {}
|
||||
argument_spec.update(NAME_ARG_SPEC)
|
||||
argument_spec.update(RESOURCE_ARG_SPEC)
|
||||
argument_spec.update(AUTH_ARG_SPEC)
|
||||
argument_spec.update(WAIT_ARG_SPEC)
|
||||
argument_spec['merge_type'] = dict(type='list', elements='str', choices=['json', 'merge', 'strategic-merge'])
|
||||
argument_spec['validate'] = dict(type='dict', default=None, options=validate_spec())
|
||||
argument_spec['append_hash'] = dict(type='bool', default=False)
|
||||
argument_spec['apply'] = dict(type='bool', default=False)
|
||||
argument_spec['template'] = dict(type='raw', default=None)
|
||||
argument_spec['delete_options'] = dict(type='dict', default=None, options=DELETE_OPTS_ARG_SPEC)
|
||||
argument_spec['continue_on_error'] = dict(type='bool', default=False)
|
||||
argument_spec['state'] = dict(default='present', choices=['present', 'absent', 'patched'])
|
||||
argument_spec['force'] = dict(type='bool', default=False)
|
||||
return argument_spec
|
||||
|
||||
|
||||
def main():
|
||||
OKDRawModule().execute_module()
|
||||
mutually_exclusive = [
|
||||
('resource_definition', 'src'),
|
||||
('merge_type', 'apply'),
|
||||
('template', 'resource_definition'),
|
||||
('template', 'src'),
|
||||
]
|
||||
|
||||
module = AnsibleModule(argument_spec=argspec(), supports_check_mode=True, mutually_exclusive=mutually_exclusive)
|
||||
|
||||
from ansible_collections.community.okd.plugins.module_utils.k8s import OKDRawModule
|
||||
okdraw_module = OKDRawModule(module)
|
||||
|
||||
# remove_aliases from kubernetes.core's common requires the argspec attribute. Ideally, it should
|
||||
# read that throught the module class, but we cannot change that.
|
||||
okdraw_module.argspec = argspec()
|
||||
okdraw_module.execute_module()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2020, Red Hat
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
# Copyright (c) 2020-2021, Red Hat
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# STARTREMOVE (downstream)
|
||||
DOCUMENTATION = r'''
|
||||
module: openshift_process
|
||||
@@ -203,190 +203,43 @@ resources:
|
||||
'''
|
||||
# ENDREMOVE (downstream)
|
||||
|
||||
import re
|
||||
import os
|
||||
import copy
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.ansiblemodule import AnsibleModule
|
||||
except ImportError:
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
try:
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.common import K8sAnsibleMixin, get_api_client
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.args_common import (
|
||||
AUTH_ARG_SPEC, RESOURCE_ARG_SPEC, WAIT_ARG_SPEC
|
||||
)
|
||||
HAS_KUBERNETES_COLLECTION = True
|
||||
except ImportError as e:
|
||||
HAS_KUBERNETES_COLLECTION = False
|
||||
k8s_collection_import_exception = e
|
||||
K8S_COLLECTION_ERROR = traceback.format_exc()
|
||||
K8sAnsibleMixin = object
|
||||
AUTH_ARG_SPEC = RESOURCE_ARG_SPEC = WAIT_ARG_SPEC = {}
|
||||
|
||||
try:
|
||||
from kubernetes.dynamic.exceptions import DynamicApiError, NotFoundError
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
DOTENV_PARSER = re.compile(r"(?x)^(\s*(\#.*|\s*|(export\s+)?(?P<key>[A-z_][A-z0-9_.]*)=(?P<value>.+?)?)\s*)[\r\n]*$")
|
||||
from ansible_collections.kubernetes.core.plugins.module_utils.args_common import (
|
||||
AUTH_ARG_SPEC, RESOURCE_ARG_SPEC, WAIT_ARG_SPEC
|
||||
)
|
||||
|
||||
|
||||
class OpenShiftProcess(K8sAnsibleMixin):
|
||||
def argspec():
|
||||
argument_spec = {}
|
||||
argument_spec.update(AUTH_ARG_SPEC)
|
||||
argument_spec.update(WAIT_ARG_SPEC)
|
||||
argument_spec.update(RESOURCE_ARG_SPEC)
|
||||
argument_spec['state'] = dict(type='str', default='rendered', choices=['present', 'absent', 'rendered'])
|
||||
argument_spec['namespace'] = dict(type='str')
|
||||
argument_spec['namespace_target'] = dict(type='str')
|
||||
argument_spec['parameters'] = dict(type='dict')
|
||||
argument_spec['name'] = dict(type='str')
|
||||
argument_spec['parameter_file'] = dict(type='str')
|
||||
|
||||
def __init__(self):
|
||||
self.module = AnsibleModule(
|
||||
argument_spec=self.argspec,
|
||||
supports_check_mode=True,
|
||||
)
|
||||
self.fail_json = self.module.fail_json
|
||||
self.exit_json = self.module.exit_json
|
||||
|
||||
if not HAS_KUBERNETES_COLLECTION:
|
||||
self.module.fail_json(
|
||||
msg="The kubernetes.core collection must be installed",
|
||||
exception=K8S_COLLECTION_ERROR,
|
||||
error=to_native(k8s_collection_import_exception)
|
||||
)
|
||||
|
||||
super(OpenShiftProcess, self).__init__(self.module)
|
||||
|
||||
self.params = self.module.params
|
||||
self.check_mode = self.module.check_mode
|
||||
self.client = get_api_client(self.module)
|
||||
|
||||
@property
|
||||
def argspec(self):
|
||||
spec = copy.deepcopy(AUTH_ARG_SPEC)
|
||||
spec.update(copy.deepcopy(WAIT_ARG_SPEC))
|
||||
spec.update(copy.deepcopy(RESOURCE_ARG_SPEC))
|
||||
|
||||
spec['state'] = dict(type='str', default='rendered', choices=['present', 'absent', 'rendered'])
|
||||
spec['namespace'] = dict(type='str')
|
||||
spec['namespace_target'] = dict(type='str')
|
||||
spec['parameters'] = dict(type='dict')
|
||||
spec['name'] = dict(type='str')
|
||||
spec['parameter_file'] = dict(type='str')
|
||||
|
||||
return spec
|
||||
|
||||
def execute_module(self):
|
||||
v1_templates = self.find_resource('templates', 'template.openshift.io/v1', fail=True)
|
||||
v1_processed_templates = self.find_resource('processedtemplates', 'template.openshift.io/v1', fail=True)
|
||||
|
||||
name = self.params.get('name')
|
||||
namespace = self.params.get('namespace')
|
||||
namespace_target = self.params.get('namespace_target')
|
||||
definition = self.params.get('resource_definition')
|
||||
src = self.params.get('src')
|
||||
|
||||
state = self.params.get('state')
|
||||
|
||||
parameters = self.params.get('parameters') or {}
|
||||
parameter_file = self.params.get('parameter_file')
|
||||
|
||||
if (name and definition) or (name and src) or (src and definition):
|
||||
self.fail_json("Only one of src, name, or definition may be provided")
|
||||
|
||||
if name and not namespace:
|
||||
self.fail_json("namespace is required when name is set")
|
||||
|
||||
template = None
|
||||
|
||||
if src or definition:
|
||||
self.set_resource_definitions(self.module)
|
||||
if len(self.resource_definitions) < 1:
|
||||
self.fail_json('Unable to load a Template resource from src or resource_definition')
|
||||
elif len(self.resource_definitions) > 1:
|
||||
self.fail_json('Multiple Template resources found in src or resource_definition, only one Template may be processed at a time')
|
||||
template = self.resource_definitions[0]
|
||||
template_namespace = template.get('metadata', {}).get('namespace')
|
||||
namespace = template_namespace or namespace or namespace_target or 'default'
|
||||
elif name and namespace:
|
||||
try:
|
||||
template = v1_templates.get(name=name, namespace=namespace).to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg="Failed to retrieve Template with name '{0}' in namespace '{1}': {2}".format(name, namespace, exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
except Exception as exc:
|
||||
self.module.fail_json(msg="Failed to retrieve Template with name '{0}' in namespace '{1}': {2}".format(name, namespace, to_native(exc)),
|
||||
error='', status='', reason='')
|
||||
else:
|
||||
self.fail_json("One of resource_definition, src, or name and namespace must be provided")
|
||||
|
||||
if parameter_file:
|
||||
parameters = self.parse_dotenv_and_merge(parameters, parameter_file)
|
||||
|
||||
for k, v in parameters.items():
|
||||
template = self.update_template_param(template, k, v)
|
||||
|
||||
result = {'changed': False}
|
||||
|
||||
try:
|
||||
response = v1_processed_templates.create(body=template, namespace=namespace).to_dict()
|
||||
except DynamicApiError as exc:
|
||||
self.fail_json(msg="Server failed to render the Template: {0}".format(exc.body),
|
||||
error=exc.status, status=exc.status, reason=exc.reason)
|
||||
except Exception as exc:
|
||||
self.module.fail_json(msg="Server failed to render the Template: {0}".format(to_native(exc)),
|
||||
error='', status='', reason='')
|
||||
result['message'] = ""
|
||||
if "message" in response:
|
||||
result['message'] = response['message']
|
||||
result['resources'] = response['objects']
|
||||
|
||||
if state != 'rendered':
|
||||
self.resource_definitions = response['objects']
|
||||
self.kind = self.api_version = self.name = None
|
||||
self.namespace = self.params.get('namespace_target')
|
||||
self.append_hash = False
|
||||
self.apply = False
|
||||
self.params['validate'] = None
|
||||
self.params['merge_type'] = None
|
||||
super(OpenShiftProcess, self).execute_module()
|
||||
|
||||
self.module.exit_json(**result)
|
||||
|
||||
def update_template_param(self, template, k, v):
|
||||
for i, param in enumerate(template['parameters']):
|
||||
if param['name'] == k:
|
||||
template['parameters'][i]['value'] = v
|
||||
return template
|
||||
return template
|
||||
|
||||
def parse_dotenv_and_merge(self, parameters, parameter_file):
|
||||
path = os.path.normpath(parameter_file)
|
||||
if not os.path.exists(path):
|
||||
self.fail(msg="Error accessing {0}. Does the file exist?".format(path))
|
||||
try:
|
||||
with open(path, 'r') as f:
|
||||
multiline = ''
|
||||
for line in f.readlines():
|
||||
line = line.strip()
|
||||
if line.endswith('\\'):
|
||||
multiline += ' '.join(line.rsplit('\\', 1))
|
||||
continue
|
||||
if multiline:
|
||||
line = multiline + line
|
||||
multiline = ''
|
||||
match = DOTENV_PARSER.search(line)
|
||||
if not match:
|
||||
continue
|
||||
match = match.groupdict()
|
||||
if match.get('key'):
|
||||
if match['key'] in parameters:
|
||||
self.fail_json(msg="Duplicate value for '{0}' detected in parameter file".format(match['key']))
|
||||
parameters[match['key']] = match['value']
|
||||
except IOError as exc:
|
||||
self.fail(msg="Error loading parameter file: {0}".format(exc))
|
||||
return parameters
|
||||
return argument_spec
|
||||
|
||||
|
||||
def main():
|
||||
OpenShiftProcess().execute_module()
|
||||
argument_spec = argspec()
|
||||
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
|
||||
|
||||
from ansible_collections.community.okd.plugins.module_utils.openshift_process import (
|
||||
OpenShiftProcess)
|
||||
|
||||
openshift_process = OpenShiftProcess(module)
|
||||
# remove_aliases from kubernetes.core's common requires the argspec attribute. Ideally, it should
|
||||
# read that throught the module class, but we cannot change that.
|
||||
openshift_process.argspec = argument_spec
|
||||
openshift_process.execute_module()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
plugins/modules/k8s.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/k8s.py validate-modules:return-syntax-error
|
||||
plugins/modules/k8s.py pylint:bad-option-value
|
||||
plugins/module_utils/k8s.py pylint:bad-option-value
|
||||
plugins/modules/openshift_process.py validate-modules:parameter-type-not-in-doc
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
plugins/modules/k8s.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/k8s.py validate-modules:return-syntax-error
|
||||
plugins/modules/k8s.py pylint:bad-option-value
|
||||
plugins/module_utils/k8s.py pylint:bad-option-value
|
||||
plugins/modules/openshift_process.py validate-modules:parameter-type-not-in-doc
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
plugins/modules/k8s.py validate-modules:parameter-type-not-in-doc
|
||||
plugins/modules/k8s.py pylint:bad-option-value
|
||||
plugins/module_utils/k8s.py pylint:bad-option-value
|
||||
plugins/modules/openshift_process.py validate-modules:parameter-type-not-in-doc
|
||||
|
||||
Reference in New Issue
Block a user