Reformat everything.

This commit is contained in:
Felix Fontein
2025-11-01 12:08:41 +01:00
parent 3f2213791a
commit 340ff8586d
1008 changed files with 61301 additions and 58309 deletions

View File

@@ -15,9 +15,8 @@ display = Display()
class ActionModule(ActionBase): class ActionModule(ActionBase):
# Keep internal params away from user interactions # Keep internal params away from user interactions
_VALID_ARGS = frozenset(('path', 'state', 'table', 'noflush', 'counters', 'modprobe', 'ip_version', 'wait')) _VALID_ARGS = frozenset(("path", "state", "table", "noflush", "counters", "modprobe", "ip_version", "wait"))
DEFAULT_SUDOABLE = True DEFAULT_SUDOABLE = True
@staticmethod @staticmethod
@@ -27,7 +26,8 @@ class ActionModule(ActionBase):
"is set to 'restored'. To enable its rollback feature (that needs the " "is set to 'restored'. To enable its rollback feature (that needs the "
"module to run asynchronously on the remote), please set task attribute " "module to run asynchronously on the remote), please set task attribute "
f"'poll' (={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than " f"'poll' (={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than "
f"'ansible_timeout' (={max_timeout}) (recommended).") f"'ansible_timeout' (={max_timeout}) (recommended)."
)
@staticmethod @staticmethod
def msg_warning__no_async_is_no_rollback(task_poll, task_async, max_timeout): def msg_warning__no_async_is_no_rollback(task_poll, task_async, max_timeout):
@@ -37,7 +37,8 @@ class ActionModule(ActionBase):
"regain it before fixing firewall rules through a serial console, or any " "regain it before fixing firewall rules through a serial console, or any "
f"other way except SSH. Please set task attribute 'poll' (={task_poll}) to 0, and " f"other way except SSH. Please set task attribute 'poll' (={task_poll}) to 0, and "
f"'async' (={task_async}) to a value >2 and not greater than 'ansible_timeout' (={max_timeout}) " f"'async' (={task_async}) to a value >2 and not greater than 'ansible_timeout' (={max_timeout}) "
"(recommended).") "(recommended)."
)
@staticmethod @staticmethod
def msg_warning__async_greater_than_timeout(task_poll, task_async, max_timeout): def msg_warning__async_greater_than_timeout(task_poll, task_async, max_timeout):
@@ -46,44 +47,48 @@ class ActionModule(ActionBase):
"but with settings that will lead this rollback to happen AFTER that the " "but with settings that will lead this rollback to happen AFTER that the "
"controller will reach its own timeout. Please set task attribute 'poll' " "controller will reach its own timeout. Please set task attribute 'poll' "
f"(={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than " f"(={task_poll}) to 0, and 'async' (={task_async}) to a value >2 and not greater than "
f"'ansible_timeout' (={max_timeout}) (recommended).") f"'ansible_timeout' (={max_timeout}) (recommended)."
)
def _async_result(self, async_status_args, task_vars, timeout): def _async_result(self, async_status_args, task_vars, timeout):
''' """
Retrieve results of the asynchronous task, and display them in place of Retrieve results of the asynchronous task, and display them in place of
the async wrapper results (those with the ansible_job_id key). the async wrapper results (those with the ansible_job_id key).
''' """
async_status = self._task.copy() async_status = self._task.copy()
async_status.args = async_status_args async_status.args = async_status_args
async_status.action = 'ansible.builtin.async_status' async_status.action = "ansible.builtin.async_status"
async_status.async_val = 0 async_status.async_val = 0
async_action = self._shared_loader_obj.action_loader.get( async_action = self._shared_loader_obj.action_loader.get(
async_status.action, task=async_status, connection=self._connection, async_status.action,
play_context=self._play_context, loader=self._loader, templar=self._templar, task=async_status,
shared_loader_obj=self._shared_loader_obj) connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=self._templar,
shared_loader_obj=self._shared_loader_obj,
)
if async_status.args['mode'] == 'cleanup': if async_status.args["mode"] == "cleanup":
return async_action.run(task_vars=task_vars) return async_action.run(task_vars=task_vars)
# At least one iteration is required, even if timeout is 0. # At least one iteration is required, even if timeout is 0.
for dummy in range(max(1, timeout)): for dummy in range(max(1, timeout)):
async_result = async_action.run(task_vars=task_vars) async_result = async_action.run(task_vars=task_vars)
if async_result.get('finished', 0) == 1: if async_result.get("finished", 0) == 1:
break break
time.sleep(min(1, timeout)) time.sleep(min(1, timeout))
return async_result return async_result
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True self._supports_check_mode = True
self._supports_async = True self._supports_async = True
result = super().run(tmp, task_vars) result = super().run(tmp, task_vars)
del tmp # tmp no longer has any effect del tmp # tmp no longer has any effect
if not result.get('skipped'): if not result.get("skipped"):
# FUTURE: better to let _execute_module calculate this internally? # FUTURE: better to let _execute_module calculate this internally?
wrap_async = self._task.async_val and not self._connection.has_native_async wrap_async = self._task.async_val and not self._connection.has_native_async
@@ -98,41 +103,38 @@ class ActionModule(ActionBase):
starter_cmd = None starter_cmd = None
confirm_cmd = None confirm_cmd = None
if module_args.get('state', None) == 'restored': if module_args.get("state", None) == "restored":
if not wrap_async: if not wrap_async:
if not check_mode: if not check_mode:
display.warning(self.msg_error__async_and_poll_not_zero( display.warning(self.msg_error__async_and_poll_not_zero(task_poll, task_async, max_timeout))
task_poll,
task_async,
max_timeout))
elif task_poll: elif task_poll:
raise AnsibleActionFail(self.msg_warning__no_async_is_no_rollback( raise AnsibleActionFail(
task_poll, self.msg_warning__no_async_is_no_rollback(task_poll, task_async, max_timeout)
task_async, )
max_timeout))
else: else:
if task_async > max_timeout and not check_mode: if task_async > max_timeout and not check_mode:
display.warning(self.msg_warning__async_greater_than_timeout( display.warning(
task_poll, self.msg_warning__async_greater_than_timeout(task_poll, task_async, max_timeout)
task_async, )
max_timeout))
# inject the async directory based on the shell option into the # inject the async directory based on the shell option into the
# module args # module args
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async") async_dir = self.get_shell_option("async_dir", default="~/.ansible_async")
# Bind the loop max duration to consistent values on both # Bind the loop max duration to consistent values on both
# remote and local sides (if not the same, make the loop # remote and local sides (if not the same, make the loop
# longer on the controller); and set a backup file path. # longer on the controller); and set a backup file path.
module_args['_timeout'] = task_async module_args["_timeout"] = task_async
module_args['_back'] = f'{async_dir}/iptables.state' module_args["_back"] = f"{async_dir}/iptables.state"
async_status_args = dict(mode='status') async_status_args = dict(mode="status")
confirm_cmd = f"rm -f {module_args['_back']}" confirm_cmd = f"rm -f {module_args['_back']}"
starter_cmd = f"touch {module_args['_back']}.starter" starter_cmd = f"touch {module_args['_back']}.starter"
remaining_time = max(task_async, max_timeout) remaining_time = max(task_async, max_timeout)
# do work! # do work!
result = merge_hash(result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async)) result = merge_hash(
result, self._execute_module(module_args=module_args, task_vars=task_vars, wrap_async=wrap_async)
)
# Then the 3-steps "go ahead or rollback": # Then the 3-steps "go ahead or rollback":
# 1. Catch early errors of the module (in asynchronous task) if any. # 1. Catch early errors of the module (in asynchronous task) if any.
@@ -140,9 +142,9 @@ class ActionModule(ActionBase):
# 2. Reset connection to ensure a persistent one will not be reused. # 2. Reset connection to ensure a persistent one will not be reused.
# 3. Confirm the restored state by removing the backup on the remote. # 3. Confirm the restored state by removing the backup on the remote.
# Retrieve the results of the asynchronous task to return them. # Retrieve the results of the asynchronous task to return them.
if '_back' in module_args: if "_back" in module_args:
async_status_args['jid'] = result.get('ansible_job_id', None) async_status_args["jid"] = result.get("ansible_job_id", None)
if async_status_args['jid'] is None: if async_status_args["jid"] is None:
raise AnsibleActionFail("Unable to get 'ansible_job_id'.") raise AnsibleActionFail("Unable to get 'ansible_job_id'.")
# Catch early errors due to missing mandatory option, bad # Catch early errors due to missing mandatory option, bad
@@ -156,7 +158,7 @@ class ActionModule(ActionBase):
# As the main command is not yet executed on the target, here # As the main command is not yet executed on the target, here
# 'finished' means 'failed before main command be executed'. # 'finished' means 'failed before main command be executed'.
if not result['finished']: if not result["finished"]:
try: try:
self._connection.reset() self._connection.reset()
except AttributeError: except AttributeError:
@@ -178,16 +180,16 @@ class ActionModule(ActionBase):
result = merge_hash(result, self._async_result(async_status_args, task_vars, remaining_time)) result = merge_hash(result, self._async_result(async_status_args, task_vars, remaining_time))
# Cleanup async related stuff and internal params # Cleanup async related stuff and internal params
for key in ('ansible_job_id', 'results_file', 'started', 'finished'): for key in ("ansible_job_id", "results_file", "started", "finished"):
if result.get(key): if result.get(key):
del result[key] del result[key]
if result.get('invocation', {}).get('module_args'): if result.get("invocation", {}).get("module_args"):
for key in ('_back', '_timeout', '_async_dir', 'jid'): for key in ("_back", "_timeout", "_async_dir", "jid"):
if result['invocation']['module_args'].get(key): if result["invocation"]["module_args"].get(key):
del result['invocation']['module_args'][key] del result["invocation"]["module_args"][key]
async_status_args['mode'] = 'cleanup' async_status_args["mode"] = "cleanup"
dummy = self._async_result(async_status_args, task_vars, 0) dummy = self._async_result(async_status_args, task_vars, 0)
if not wrap_async: if not wrap_async:

View File

@@ -26,35 +26,31 @@ class TimedOutException(Exception):
class ActionModule(ActionBase): class ActionModule(ActionBase):
TRANSFERS_FILES = False TRANSFERS_FILES = False
_VALID_ARGS = frozenset(( _VALID_ARGS = frozenset(("msg", "delay", "search_paths"))
'msg',
'delay',
'search_paths'
))
DEFAULT_CONNECT_TIMEOUT = None DEFAULT_CONNECT_TIMEOUT = None
DEFAULT_PRE_SHUTDOWN_DELAY = 0 DEFAULT_PRE_SHUTDOWN_DELAY = 0
DEFAULT_SHUTDOWN_MESSAGE = 'Shut down initiated by Ansible' DEFAULT_SHUTDOWN_MESSAGE = "Shut down initiated by Ansible"
DEFAULT_SHUTDOWN_COMMAND = 'shutdown' DEFAULT_SHUTDOWN_COMMAND = "shutdown"
DEFAULT_SHUTDOWN_COMMAND_ARGS = '-h {delay_min} "{message}"' DEFAULT_SHUTDOWN_COMMAND_ARGS = '-h {delay_min} "{message}"'
DEFAULT_SUDOABLE = True DEFAULT_SUDOABLE = True
SHUTDOWN_COMMANDS = { SHUTDOWN_COMMANDS = {
'alpine': 'poweroff', "alpine": "poweroff",
'vmkernel': 'halt', "vmkernel": "halt",
} }
SHUTDOWN_COMMAND_ARGS = { SHUTDOWN_COMMAND_ARGS = {
'alpine': '', "alpine": "",
'void': '-h +{delay_min} "{message}"', "void": '-h +{delay_min} "{message}"',
'freebsd': '-p +{delay_sec}s "{message}"', "freebsd": '-p +{delay_sec}s "{message}"',
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS, "linux": DEFAULT_SHUTDOWN_COMMAND_ARGS,
'macosx': '-h +{delay_min} "{message}"', "macosx": '-h +{delay_min} "{message}"',
'openbsd': '-h +{delay_min} "{message}"', "openbsd": '-h +{delay_min} "{message}"',
'solaris': '-y -g {delay_sec} -i 5 "{message}"', "solaris": '-y -g {delay_sec} -i 5 "{message}"',
'sunos': '-y -g {delay_sec} -i 5 "{message}"', "sunos": '-y -g {delay_sec} -i 5 "{message}"',
'vmkernel': '-d {delay_sec}', "vmkernel": "-d {delay_sec}",
'aix': '-Fh', "aix": "-Fh",
} }
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@@ -62,7 +58,7 @@ class ActionModule(ActionBase):
@property @property
def delay(self): def delay(self):
return self._check_delay('delay', self.DEFAULT_PRE_SHUTDOWN_DELAY) return self._check_delay("delay", self.DEFAULT_PRE_SHUTDOWN_DELAY)
def _check_delay(self, key, default): def _check_delay(self, key, default):
"""Ensure that the value is positive or zero""" """Ensure that the value is positive or zero"""
@@ -75,29 +71,28 @@ class ActionModule(ActionBase):
"""Get dist+version specific args first, then distribution, then family, lastly use default""" """Get dist+version specific args first, then distribution, then family, lastly use default"""
attr = getattr(self, variable_name) attr = getattr(self, variable_name)
value = attr.get( value = attr.get(
distribution['name'] + distribution['version'], distribution["name"] + distribution["version"],
attr.get( attr.get(distribution["name"], attr.get(distribution["family"], getattr(self, default_value))),
distribution['name'], )
attr.get(
distribution['family'],
getattr(self, default_value))))
return value return value
def get_distribution(self, task_vars): def get_distribution(self, task_vars):
# FIXME: only execute the module if we don't already have the facts we need # FIXME: only execute the module if we don't already have the facts we need
distribution = {} distribution = {}
display.debug(f'{self._task.action}: running setup module to get distribution') display.debug(f"{self._task.action}: running setup module to get distribution")
module_output = self._execute_module( module_output = self._execute_module(
task_vars=task_vars, task_vars=task_vars, module_name="ansible.legacy.setup", module_args={"gather_subset": "min"}
module_name='ansible.legacy.setup', )
module_args={'gather_subset': 'min'})
try: try:
if module_output.get('failed', False): if module_output.get("failed", False):
raise AnsibleError(f"Failed to determine system distribution. {fmt(module_output, 'module_stdout')}, {fmt(module_output, 'module_stderr')}") raise AnsibleError(
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower() f"Failed to determine system distribution. {fmt(module_output, 'module_stdout')}, {fmt(module_output, 'module_stderr')}"
distribution['version'] = to_text( )
module_output['ansible_facts']['ansible_distribution_version'].split('.')[0]) distribution["name"] = module_output["ansible_facts"]["ansible_distribution"].lower()
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower()) distribution["version"] = to_text(
module_output["ansible_facts"]["ansible_distribution_version"].split(".")[0]
)
distribution["family"] = to_text(module_output["ansible_facts"]["ansible_os_family"].lower())
display.debug(f"{self._task.action}: distribution: {distribution}") display.debug(f"{self._task.action}: distribution: {distribution}")
return distribution return distribution
except KeyError as ke: except KeyError as ke:
@@ -105,22 +100,20 @@ class ActionModule(ActionBase):
def get_shutdown_command(self, task_vars, distribution): def get_shutdown_command(self, task_vars, distribution):
def find_command(command, find_search_paths): def find_command(command, find_search_paths):
display.debug(f'{self._task.action}: running find module looking in {find_search_paths} to get path for "{command}"') display.debug(
f'{self._task.action}: running find module looking in {find_search_paths} to get path for "{command}"'
)
find_result = self._execute_module( find_result = self._execute_module(
task_vars=task_vars, task_vars=task_vars,
# prevent collection search by calling with ansible.legacy (still allows library/ override of find) # prevent collection search by calling with ansible.legacy (still allows library/ override of find)
module_name='ansible.legacy.find', module_name="ansible.legacy.find",
module_args={ module_args={"paths": find_search_paths, "patterns": [command], "file_type": "any"},
'paths': find_search_paths,
'patterns': [command],
'file_type': 'any'
}
) )
return [x['path'] for x in find_result['files']] return [x["path"] for x in find_result["files"]]
shutdown_bin = self._get_value_from_facts('SHUTDOWN_COMMANDS', distribution, 'DEFAULT_SHUTDOWN_COMMAND') shutdown_bin = self._get_value_from_facts("SHUTDOWN_COMMANDS", distribution, "DEFAULT_SHUTDOWN_COMMAND")
default_search_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin'] default_search_paths = ["/sbin", "/usr/sbin", "/usr/local/sbin"]
search_paths = self._task.args.get('search_paths', default_search_paths) search_paths = self._task.args.get("search_paths", default_search_paths)
# FIXME: switch all this to user arg spec validation methods when they are available # FIXME: switch all this to user arg spec validation methods when they are available
# Convert bare strings to a list # Convert bare strings to a list
@@ -138,26 +131,28 @@ class ActionModule(ActionBase):
full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command full_path = find_command(shutdown_bin, search_paths) # find the path to the shutdown command
if not full_path: # if we could not find the shutdown command if not full_path: # if we could not find the shutdown command
# tell the user we will try with systemd # tell the user we will try with systemd
display.vvv(f'Unable to find command "{shutdown_bin}" in search paths: {search_paths}, will attempt a shutdown using systemd directly.') display.vvv(
systemctl_search_paths = ['/bin', '/usr/bin'] f'Unable to find command "{shutdown_bin}" in search paths: {search_paths}, will attempt a shutdown using systemd directly.'
full_path = find_command('systemctl', systemctl_search_paths) # find the path to the systemctl command )
systemctl_search_paths = ["/bin", "/usr/bin"]
full_path = find_command("systemctl", systemctl_search_paths) # find the path to the systemctl command
if not full_path: # if we couldn't find systemctl if not full_path: # if we couldn't find systemctl
raise AnsibleError( raise AnsibleError(
f'Could not find command "{shutdown_bin}" in search paths: {search_paths} or systemctl' f'Could not find command "{shutdown_bin}" in search paths: {search_paths} or systemctl'
f' command in search paths: {systemctl_search_paths}, unable to shutdown.') # we give up here f" command in search paths: {systemctl_search_paths}, unable to shutdown."
) # we give up here
else: else:
return f"{full_path[0]} poweroff" # done, since we cannot use args with systemd shutdown return f"{full_path[0]} poweroff" # done, since we cannot use args with systemd shutdown
# systemd case taken care of, here we add args to the command # systemd case taken care of, here we add args to the command
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS') args = self._get_value_from_facts("SHUTDOWN_COMMAND_ARGS", distribution, "DEFAULT_SHUTDOWN_COMMAND_ARGS")
# Convert seconds to minutes. If less that 60, set it to 0. # Convert seconds to minutes. If less that 60, set it to 0.
delay_sec = self.delay delay_sec = self.delay
shutdown_message = self._task.args.get('msg', self.DEFAULT_SHUTDOWN_MESSAGE) shutdown_message = self._task.args.get("msg", self.DEFAULT_SHUTDOWN_MESSAGE)
af = args.format(delay_sec=delay_sec, delay_min=delay_sec // 60, message=shutdown_message) af = args.format(delay_sec=delay_sec, delay_min=delay_sec // 60, message=shutdown_message)
return f'{full_path[0]} {af}' return f"{full_path[0]} {af}"
def perform_shutdown(self, task_vars, distribution): def perform_shutdown(self, task_vars, distribution):
result = {} result = {}
@@ -169,23 +164,24 @@ class ActionModule(ActionBase):
display.vvv(f"{self._task.action}: shutting down server...") display.vvv(f"{self._task.action}: shutting down server...")
display.debug(f"{self._task.action}: shutting down server with command '{shutdown_command_exec}'") display.debug(f"{self._task.action}: shutting down server with command '{shutdown_command_exec}'")
if self._play_context.check_mode: if self._play_context.check_mode:
shutdown_result['rc'] = 0 shutdown_result["rc"] = 0
else: else:
shutdown_result = self._low_level_execute_command(shutdown_command_exec, sudoable=self.DEFAULT_SUDOABLE) shutdown_result = self._low_level_execute_command(shutdown_command_exec, sudoable=self.DEFAULT_SUDOABLE)
except AnsibleConnectionFailure as e: except AnsibleConnectionFailure as e:
# If the connection is closed too quickly due to the system being shutdown, carry on # If the connection is closed too quickly due to the system being shutdown, carry on
display.debug( display.debug(f"{self._task.action}: AnsibleConnectionFailure caught and handled: {e}")
f'{self._task.action}: AnsibleConnectionFailure caught and handled: {e}') shutdown_result["rc"] = 0
shutdown_result['rc'] = 0
if shutdown_result['rc'] != 0: if shutdown_result["rc"] != 0:
result['failed'] = True result["failed"] = True
result['shutdown'] = False result["shutdown"] = False
result['msg'] = f"Shutdown command failed. Error was {fmt(shutdown_result, 'stdout')}, {fmt(shutdown_result, 'stderr')}" result["msg"] = (
f"Shutdown command failed. Error was {fmt(shutdown_result, 'stdout')}, {fmt(shutdown_result, 'stderr')}"
)
return result return result
result['failed'] = False result["failed"] = False
result['shutdown_command'] = shutdown_command_exec result["shutdown_command"] = shutdown_command_exec
return result return result
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
@@ -193,16 +189,16 @@ class ActionModule(ActionBase):
self._supports_async = True self._supports_async = True
# If running with local connection, fail so we don't shutdown ourself # If running with local connection, fail so we don't shutdown ourself
if self._connection.transport == 'local' and (not self._play_context.check_mode): if self._connection.transport == "local" and (not self._play_context.check_mode):
msg = f'Running {self._task.action} with local connection would shutdown the control node.' msg = f"Running {self._task.action} with local connection would shutdown the control node."
return {'changed': False, 'elapsed': 0, 'shutdown': False, 'failed': True, 'msg': msg} return {"changed": False, "elapsed": 0, "shutdown": False, "failed": True, "msg": msg}
if task_vars is None: if task_vars is None:
task_vars = {} task_vars = {}
result = super().run(tmp, task_vars) result = super().run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False): if result.get("skipped", False) or result.get("failed", False):
return result return result
distribution = self.get_distribution(task_vars) distribution = self.get_distribution(task_vars)
@@ -210,12 +206,12 @@ class ActionModule(ActionBase):
# Initiate shutdown # Initiate shutdown
shutdown_result = self.perform_shutdown(task_vars, distribution) shutdown_result = self.perform_shutdown(task_vars, distribution)
if shutdown_result['failed']: if shutdown_result["failed"]:
result = shutdown_result result = shutdown_result
return result return result
result['shutdown'] = True result["shutdown"] = True
result['changed'] = True result["changed"] = True
result['shutdown_command'] = shutdown_result['shutdown_command'] result["shutdown_command"] = shutdown_result["shutdown_command"]
return result return result

View File

@@ -94,23 +94,22 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.doas"
name = 'community.general.doas'
# messages for detecting prompted password issues # messages for detecting prompted password issues
fail = ('Permission denied',) fail = ("Permission denied",)
missing = ('Authorization required',) missing = ("Authorization required",)
# See https://github.com/ansible-collections/community.general/issues/9977, # See https://github.com/ansible-collections/community.general/issues/9977,
# https://github.com/ansible/ansible/pull/78111 # https://github.com/ansible/ansible/pull/78111
pipelining = False pipelining = False
def check_password_prompt(self, b_output): def check_password_prompt(self, b_output):
''' checks if the expected password prompt exists in b_output ''' """checks if the expected password prompt exists in b_output"""
# FIXME: more accurate would be: 'doas (%s@' % remote_user # FIXME: more accurate would be: 'doas (%s@' % remote_user
# however become plugins don't have that information currently # however become plugins don't have that information currently
b_prompts = [to_bytes(p) for p in self.get_option('prompt_l10n')] or [br'doas \(', br'Password:'] b_prompts = [to_bytes(p) for p in self.get_option("prompt_l10n")] or [rb"doas \(", rb"Password:"]
b_prompt = b"|".join(b_prompts) b_prompt = b"|".join(b_prompts)
return bool(re.match(b_prompt, b_output)) return bool(re.match(b_prompt, b_output))
@@ -123,16 +122,16 @@ class BecomeModule(BecomeBase):
self.prompt = True self.prompt = True
become_exe = self.get_option('become_exe') become_exe = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
if not self.get_option('become_pass') and '-n' not in flags: if not self.get_option("become_pass") and "-n" not in flags:
flags += ' -n' flags += " -n"
become_user = self.get_option('become_user') become_user = self.get_option("become_user")
user = f'-u {become_user}' if become_user else '' user = f"-u {become_user}" if become_user else ""
success_cmd = self._build_success_command(cmd, shell, noexe=True) success_cmd = self._build_success_command(cmd, shell, noexe=True)
executable = getattr(shell, 'executable', shell.SHELL_FAMILY) executable = getattr(shell, "executable", shell.SHELL_FAMILY)
return f'{become_exe} {flags} {user} {executable} -c {success_cmd}' return f"{become_exe} {flags} {user} {executable} -c {success_cmd}"

View File

@@ -74,11 +74,10 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.dzdo"
name = 'community.general.dzdo'
# messages for detecting prompted password issues # messages for detecting prompted password issues
fail = ('Sorry, try again.',) fail = ("Sorry, try again.",)
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -86,14 +85,14 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
becomecmd = self.get_option('become_exe') becomecmd = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
if self.get_option('become_pass'): if self.get_option("become_pass"):
self.prompt = f'[dzdo via ansible, key={self._id}] password:' self.prompt = f"[dzdo via ansible, key={self._id}] password:"
flags = f"{flags.replace('-n', '')} -p \"{self.prompt}\"" flags = f'{flags.replace("-n", "")} -p "{self.prompt}"'
become_user = self.get_option('become_user') become_user = self.get_option("become_user")
user = f'-u {become_user}' if become_user else '' user = f"-u {become_user}" if become_user else ""
return f"{becomecmd} {flags} {user} {self._build_success_command(cmd, shell)}" return f"{becomecmd} {flags} {user} {self._build_success_command(cmd, shell)}"

View File

@@ -92,23 +92,21 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.ksu"
name = 'community.general.ksu'
# messages for detecting prompted password issues # messages for detecting prompted password issues
fail = ('Password incorrect',) fail = ("Password incorrect",)
missing = ('No password given',) missing = ("No password given",)
def check_password_prompt(self, b_output): def check_password_prompt(self, b_output):
''' checks if the expected password prompt exists in b_output ''' """checks if the expected password prompt exists in b_output"""
prompts = self.get_option('prompt_l10n') or ["Kerberos password for .*@.*:"] prompts = self.get_option("prompt_l10n") or ["Kerberos password for .*@.*:"]
b_prompt = b"|".join(to_bytes(p) for p in prompts) b_prompt = b"|".join(to_bytes(p) for p in prompts)
return bool(re.match(b_prompt, b_output)) return bool(re.match(b_prompt, b_output))
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
# Prompt handling for ``ksu`` is more complicated, this # Prompt handling for ``ksu`` is more complicated, this
@@ -118,8 +116,8 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
exe = self.get_option('become_exe') exe = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
user = self.get_option('become_user') user = self.get_option("become_user")
return f'{exe} {user} {flags} -e {self._build_success_command(cmd, shell)} ' return f"{exe} {user} {flags} -e {self._build_success_command(cmd, shell)} "

View File

@@ -96,16 +96,15 @@ from ansible.plugins.become import BecomeBase
from ansible.module_utils.common.text.converters import to_bytes from ansible.module_utils.common.text.converters import to_bytes
ansi_color_codes = re_compile(to_bytes(r'\x1B\[[0-9;]+m')) ansi_color_codes = re_compile(to_bytes(r"\x1B\[[0-9;]+m"))
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.machinectl"
name = 'community.general.machinectl' prompt = "Password: "
fail = ("==== AUTHENTICATION FAILED ====",)
prompt = 'Password: ' success = ("==== AUTHENTICATION COMPLETE ====",)
fail = ('==== AUTHENTICATION FAILED ====',)
success = ('==== AUTHENTICATION COMPLETE ====',)
require_tty = True # see https://github.com/ansible-collections/community.general/issues/6932 require_tty = True # see https://github.com/ansible-collections/community.general/issues/6932
# See https://github.com/ansible/ansible/issues/81254, # See https://github.com/ansible/ansible/issues/81254,
@@ -122,11 +121,11 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
become = self.get_option('become_exe') become = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
user = self.get_option('become_user') user = self.get_option("become_user")
return f'{become} -q shell {flags} {user}@ {self._build_success_command(cmd, shell)}' return f"{become} -q shell {flags} {user}@ {self._build_success_command(cmd, shell)}"
def check_success(self, b_output): def check_success(self, b_output):
b_output = self.remove_ansi_codes(b_output) b_output = self.remove_ansi_codes(b_output)

View File

@@ -86,10 +86,9 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.pbrun"
name = 'community.general.pbrun' prompt = "Password:"
prompt = 'Password:'
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -97,11 +96,11 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
become_exe = self.get_option('become_exe') become_exe = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
become_user = self.get_option('become_user') become_user = self.get_option("become_user")
user = f'-u {become_user}' if become_user else '' user = f"-u {become_user}" if become_user else ""
noexe = not self.get_option('wrap_exe') noexe = not self.get_option("wrap_exe")
return f"{become_exe} {flags} {user} {self._build_success_command(cmd, shell, noexe=noexe)}" return f"{become_exe} {flags} {user} {self._build_success_command(cmd, shell, noexe=noexe)}"

View File

@@ -91,8 +91,7 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.pfexec"
name = 'community.general.pfexec'
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -100,8 +99,8 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
exe = self.get_option('become_exe') exe = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
noexe = not self.get_option('wrap_exe') noexe = not self.get_option("wrap_exe")
return f'{exe} {flags} {self._build_success_command(cmd, shell, noexe=noexe)}' return f"{exe} {flags} {self._build_success_command(cmd, shell, noexe=noexe)}"

View File

@@ -63,9 +63,8 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.pmrun"
name = 'community.general.pmrun' prompt = "Enter UPM user password:"
prompt = 'Enter UPM user password:'
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -73,7 +72,7 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
become = self.get_option('become_exe') become = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
return f'{become} {flags} {shlex_quote(self._build_success_command(cmd, shell))}' return f"{become} {flags} {shlex_quote(self._build_success_command(cmd, shell))}"

View File

@@ -85,15 +85,12 @@ ansi_color_codes = re_compile(to_bytes(r"\x1B\[[0-9;]+m"))
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.run0" name = "community.general.run0"
prompt = "Password: " prompt = "Password: "
fail = ("==== AUTHENTICATION FAILED ====",) fail = ("==== AUTHENTICATION FAILED ====",)
success = ("==== AUTHENTICATION COMPLETE ====",) success = ("==== AUTHENTICATION COMPLETE ====",)
require_tty = ( require_tty = True # see https://github.com/ansible-collections/community.general/issues/6932
True # see https://github.com/ansible-collections/community.general/issues/6932
)
@staticmethod @staticmethod
def remove_ansi_codes(line): def remove_ansi_codes(line):
@@ -109,9 +106,7 @@ class BecomeModule(BecomeBase):
flags = self.get_option("become_flags") flags = self.get_option("become_flags")
user = self.get_option("become_user") user = self.get_option("become_user")
return ( return f"{become} --user={user} {flags} {self._build_success_command(cmd, shell)}"
f"{become} --user={user} {flags} {self._build_success_command(cmd, shell)}"
)
def check_success(self, b_output): def check_success(self, b_output):
b_output = self.remove_ansi_codes(b_output) b_output = self.remove_ansi_codes(b_output)

View File

@@ -75,11 +75,10 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.sesu"
name = 'community.general.sesu' prompt = "Please enter your password:"
fail = missing = ("Sorry, try again with sesu.",)
prompt = 'Please enter your password:'
fail = missing = ('Sorry, try again with sesu.',)
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -87,8 +86,8 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
become = self.get_option('become_exe') become = self.get_option("become_exe")
flags = self.get_option('become_flags') flags = self.get_option("become_flags")
user = self.get_option('become_user') user = self.get_option("become_user")
return f'{become} {flags} {user} -c {self._build_success_command(cmd, shell)}' return f"{become} {flags} {user} -c {self._build_success_command(cmd, shell)}"

View File

@@ -79,12 +79,11 @@ from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase): class BecomeModule(BecomeBase):
name = "community.general.sudosu"
name = 'community.general.sudosu'
# messages for detecting prompted password issues # messages for detecting prompted password issues
fail = ('Sorry, try again.',) fail = ("Sorry, try again.",)
missing = ('Sorry, a password is required to run sudo', 'sudo: a password is required') missing = ("Sorry, a password is required to run sudo", "sudo: a password is required")
def build_become_command(self, cmd, shell): def build_become_command(self, cmd, shell):
super().build_become_command(cmd, shell) super().build_become_command(cmd, shell)
@@ -92,21 +91,21 @@ class BecomeModule(BecomeBase):
if not cmd: if not cmd:
return cmd return cmd
becomecmd = 'sudo' becomecmd = "sudo"
flags = self.get_option('become_flags') or '' flags = self.get_option("become_flags") or ""
prompt = '' prompt = ""
if self.get_option('become_pass'): if self.get_option("become_pass"):
self.prompt = f'[sudo via ansible, key={self._id}] password:' self.prompt = f"[sudo via ansible, key={self._id}] password:"
if flags: # this could be simplified, but kept as is for now for backwards string matching if flags: # this could be simplified, but kept as is for now for backwards string matching
flags = flags.replace('-n', '') flags = flags.replace("-n", "")
prompt = f'-p "{self.prompt}"' prompt = f'-p "{self.prompt}"'
user = self.get_option('become_user') or '' user = self.get_option("become_user") or ""
if user: if user:
user = f'{user}' user = f"{user}"
if self.get_option('alt_method'): if self.get_option("alt_method"):
return f"{becomecmd} {flags} {prompt} su -l {user} -c {self._build_success_command(cmd, shell, True)}" return f"{becomecmd} {flags} {prompt} su -l {user} -c {self._build_success_command(cmd, shell, True)}"
else: else:
return f"{becomecmd} {flags} {prompt} su -l {user} {self._build_success_command(cmd, shell)}" return f"{becomecmd} {flags} {prompt} su -l {user} {self._build_success_command(cmd, shell)}"

View File

@@ -59,6 +59,7 @@ from ansible.utils.display import Display
try: try:
import memcache import memcache
HAS_MEMCACHE = True HAS_MEMCACHE = True
except ImportError: except ImportError:
HAS_MEMCACHE = False HAS_MEMCACHE = False
@@ -75,7 +76,7 @@ class ProxyClientPool:
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.max_connections = kwargs.pop('max_connections', 1024) self.max_connections = kwargs.pop("max_connections", 1024)
self.connection_args = args self.connection_args = args
self.connection_kwargs = kwargs self.connection_kwargs = kwargs
self.reset() self.reset()
@@ -123,6 +124,7 @@ class ProxyClientPool:
def __getattr__(self, name): def __getattr__(self, name):
def wrapped(*args, **kwargs): def wrapped(*args, **kwargs):
return self._proxy_client(name, *args, **kwargs) return self._proxy_client(name, *args, **kwargs)
return wrapped return wrapped
def _proxy_client(self, name, *args, **kwargs): def _proxy_client(self, name, *args, **kwargs):
@@ -139,7 +141,8 @@ class CacheModuleKeys(MutableSet):
A set subclass that keeps track of insertion time and persists A set subclass that keeps track of insertion time and persists
the set in memcached. the set in memcached.
""" """
PREFIX = 'ansible_cache_keys'
PREFIX = "ansible_cache_keys"
def __init__(self, cache, *args, **kwargs): def __init__(self, cache, *args, **kwargs):
self._cache = cache self._cache = cache
@@ -171,15 +174,14 @@ class CacheModuleKeys(MutableSet):
class CacheModule(BaseCacheModule): class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
connection = ['127.0.0.1:11211'] connection = ["127.0.0.1:11211"]
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if self.get_option('_uri'): if self.get_option("_uri"):
connection = self.get_option('_uri') connection = self.get_option("_uri")
self._timeout = self.get_option('_timeout') self._timeout = self.get_option("_timeout")
self._prefix = self.get_option('_prefix') self._prefix = self.get_option("_prefix")
if not HAS_MEMCACHE: if not HAS_MEMCACHE:
raise AnsibleError("python-memcached is required for the memcached fact cache") raise AnsibleError("python-memcached is required for the memcached fact cache")

View File

@@ -51,14 +51,15 @@ class CacheModule(BaseFileCacheModule):
""" """
A caching module backed by pickle files. A caching module backed by pickle files.
""" """
_persistent = False # prevent unnecessary JSON serialization and key munging _persistent = False # prevent unnecessary JSON serialization and key munging
def _load(self, filepath): def _load(self, filepath):
# Pickle is a binary format # Pickle is a binary format
with open(filepath, 'rb') as f: with open(filepath, "rb") as f:
return pickle.load(f, encoding='bytes') return pickle.load(f, encoding="bytes")
def _dump(self, value, filepath): def _dump(self, value, filepath):
with open(filepath, 'wb') as f: with open(filepath, "wb") as f:
# Use pickle protocol 2 which is compatible with Python 2.3+. # Use pickle protocol 2 which is compatible with Python 2.3+.
pickle.dump(value, f, protocol=2) pickle.dump(value, f, protocol=2)

View File

@@ -77,6 +77,7 @@ from ansible.utils.display import Display
try: try:
from redis import StrictRedis, VERSION from redis import StrictRedis, VERSION
HAS_REDIS = True HAS_REDIS = True
except ImportError: except ImportError:
HAS_REDIS = False HAS_REDIS = False
@@ -93,32 +94,35 @@ class CacheModule(BaseCacheModule):
to expire keys. This mechanism is used or a pattern matched 'scan' for to expire keys. This mechanism is used or a pattern matched 'scan' for
performance. performance.
""" """
_sentinel_service_name = None _sentinel_service_name = None
re_url_conn = re.compile(r'^([^:]+|\[[^]]+\]):(\d+):(\d+)(?::(.*))?$') re_url_conn = re.compile(r"^([^:]+|\[[^]]+\]):(\d+):(\d+)(?::(.*))?$")
re_sent_conn = re.compile(r'^(.*):(\d+)$') re_sent_conn = re.compile(r"^(.*):(\d+)$")
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
uri = '' uri = ""
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if self.get_option('_uri'): if self.get_option("_uri"):
uri = self.get_option('_uri') uri = self.get_option("_uri")
self._timeout = float(self.get_option('_timeout')) self._timeout = float(self.get_option("_timeout"))
self._prefix = self.get_option('_prefix') self._prefix = self.get_option("_prefix")
self._keys_set = self.get_option('_keyset_name') self._keys_set = self.get_option("_keyset_name")
self._sentinel_service_name = self.get_option('_sentinel_service_name') self._sentinel_service_name = self.get_option("_sentinel_service_name")
if not HAS_REDIS: if not HAS_REDIS:
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'") raise AnsibleError(
"The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'"
)
self._cache = {} self._cache = {}
kw = {} kw = {}
# tls connection # tls connection
tlsprefix = 'tls://' tlsprefix = "tls://"
if uri.startswith(tlsprefix): if uri.startswith(tlsprefix):
kw['ssl'] = True kw["ssl"] = True
uri = uri[len(tlsprefix):] uri = uri[len(tlsprefix) :]
# redis sentinel connection # redis sentinel connection
if self._sentinel_service_name: if self._sentinel_service_name:
@@ -128,7 +132,7 @@ class CacheModule(BaseCacheModule):
connection = self._parse_connection(self.re_url_conn, uri) connection = self._parse_connection(self.re_url_conn, uri)
self._db = StrictRedis(*connection, **kw) self._db = StrictRedis(*connection, **kw)
display.vv(f'Redis connection: {self._db}') display.vv(f"Redis connection: {self._db}")
@staticmethod @staticmethod
def _parse_connection(re_patt, uri): def _parse_connection(re_patt, uri):
@@ -146,33 +150,32 @@ class CacheModule(BaseCacheModule):
except ImportError: except ImportError:
raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.") raise AnsibleError("The 'redis' python module (version 2.9.0 or newer) is required to use redis sentinel.")
if ';' not in uri: if ";" not in uri:
raise AnsibleError('_uri does not have sentinel syntax.') raise AnsibleError("_uri does not have sentinel syntax.")
# format: "localhost:26379;localhost2:26379;0:changeme" # format: "localhost:26379;localhost2:26379;0:changeme"
connections = uri.split(';') connections = uri.split(";")
connection_args = connections.pop(-1) connection_args = connections.pop(-1)
if len(connection_args) > 0: # handle if no db nr is given if len(connection_args) > 0: # handle if no db nr is given
connection_args = connection_args.split(':') connection_args = connection_args.split(":")
kw['db'] = connection_args.pop(0) kw["db"] = connection_args.pop(0)
try: try:
kw['password'] = connection_args.pop(0) kw["password"] = connection_args.pop(0)
except IndexError: except IndexError:
pass # password is optional pass # password is optional
sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections] sentinels = [self._parse_connection(self.re_sent_conn, shost) for shost in connections]
display.vv(f'\nUsing redis sentinels: {sentinels}') display.vv(f"\nUsing redis sentinels: {sentinels}")
scon = Sentinel(sentinels, **kw) scon = Sentinel(sentinels, **kw)
try: try:
return scon.master_for(self._sentinel_service_name, socket_timeout=0.2) return scon.master_for(self._sentinel_service_name, socket_timeout=0.2)
except Exception as exc: except Exception as exc:
raise AnsibleError(f'Could not connect to redis sentinel: {exc}') raise AnsibleError(f"Could not connect to redis sentinel: {exc}")
def _make_key(self, key): def _make_key(self, key):
return self._prefix + key return self._prefix + key
def get(self, key): def get(self, key):
if key not in self._cache: if key not in self._cache:
value = self._db.get(self._make_key(key)) value = self._db.get(self._make_key(key))
# guard against the key not being removed from the zset; # guard against the key not being removed from the zset;
@@ -186,7 +189,6 @@ class CacheModule(BaseCacheModule):
return self._cache.get(key) return self._cache.get(key)
def set(self, key, value): def set(self, key, value):
value2 = json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4) value2 = json.dumps(value, cls=AnsibleJSONEncoder, sort_keys=True, indent=4)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire' if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._db.setex(self._make_key(key), int(self._timeout), value2) self._db.setex(self._make_key(key), int(self._timeout), value2)
@@ -210,7 +212,7 @@ class CacheModule(BaseCacheModule):
def contains(self, key): def contains(self, key):
self._expire_keys() self._expire_keys()
return (self._db.zrank(self._keys_set, key) is not None) return self._db.zrank(self._keys_set, key) is not None
def delete(self, key): def delete(self, key):
if key in self._cache: if key in self._cache:

View File

@@ -58,9 +58,9 @@ class CacheModule(BaseFileCacheModule):
""" """
def _load(self, filepath): def _load(self, filepath):
with open(os.path.abspath(filepath), 'r', encoding='utf-8') as f: with open(os.path.abspath(filepath), "r", encoding="utf-8") as f:
return AnsibleLoader(f).get_single_data() return AnsibleLoader(f).get_single_data()
def _dump(self, value, filepath): def _dump(self, value, filepath):
with open(os.path.abspath(filepath), 'w', encoding='utf-8') as f: with open(os.path.abspath(filepath), "w", encoding="utf-8") as f:
yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False) yaml.dump(value, f, Dumper=AnsibleDumper, default_flow_style=False)

View File

@@ -49,6 +49,7 @@ from ansible.plugins.callback import CallbackBase
class MemProf(threading.Thread): class MemProf(threading.Thread):
"""Python thread for recording memory usage""" """Python thread for recording memory usage"""
def __init__(self, path, obj=None): def __init__(self, path, obj=None):
threading.Thread.__init__(self) threading.Thread.__init__(self)
self.obj = obj self.obj = obj
@@ -66,8 +67,8 @@ class MemProf(threading.Thread):
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate' CALLBACK_TYPE = "aggregate"
CALLBACK_NAME = 'community.general.cgroup_memory_recap' CALLBACK_NAME = "community.general.cgroup_memory_recap"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -80,11 +81,11 @@ class CallbackModule(CallbackBase):
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.cgroup_max_file = self.get_option('max_mem_file') self.cgroup_max_file = self.get_option("max_mem_file")
self.cgroup_current_file = self.get_option('cur_mem_file') self.cgroup_current_file = self.get_option("cur_mem_file")
with open(self.cgroup_max_file, 'w+') as f: with open(self.cgroup_max_file, "w+") as f:
f.write('0') f.write("0")
def _profile_memory(self, obj=None): def _profile_memory(self, obj=None):
prev_task = None prev_task = None
@@ -112,8 +113,8 @@ class CallbackModule(CallbackBase):
with open(self.cgroup_max_file) as f: with open(self.cgroup_max_file) as f:
max_results = int(f.read().strip()) / 1024 / 1024 max_results = int(f.read().strip()) / 1024 / 1024
self._display.banner('CGROUP MEMORY RECAP') self._display.banner("CGROUP MEMORY RECAP")
self._display.display(f'Execution Maximum: {max_results:0.2f}MB\n\n') self._display.display(f"Execution Maximum: {max_results:0.2f}MB\n\n")
for task, memory in self.task_results: for task, memory in self.task_results:
self._display.display(f'{task.get_name()} ({task._uuid}): {memory:0.2f}MB') self._display.display(f"{task.get_name()} ({task._uuid}): {memory:0.2f}MB")

View File

@@ -25,9 +25,10 @@ class CallbackModule(CallbackBase):
This is a very trivial example of how any callback function can get at play and task objects. This is a very trivial example of how any callback function can get at play and task objects.
play will be 'None' for runner invocations, and task will be None for 'setup' invocations. play will be 'None' for runner invocations, and task will be None for 'setup' invocations.
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate' CALLBACK_TYPE = "aggregate"
CALLBACK_NAME = 'community.general.context_demo' CALLBACK_NAME = "community.general.context_demo"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@@ -40,11 +41,11 @@ class CallbackModule(CallbackBase):
self._display.display(" --- ARGS ") self._display.display(" --- ARGS ")
for i, a in enumerate(args): for i, a in enumerate(args):
self._display.display(f' {i}: {a}') self._display.display(f" {i}: {a}")
self._display.display(" --- KWARGS ") self._display.display(" --- KWARGS ")
for k in kwargs: for k in kwargs:
self._display.display(f' {k}: {kwargs[k]}') self._display.display(f" {k}: {kwargs[k]}")
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
self.play = play self.play = play

View File

@@ -1,9 +1,9 @@
# Copyright (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com> # Copyright (c) 2018, Ivan Aragones Muniesa <ivan.aragones.muniesa@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
''' """
Counter enabled Ansible callback plugin (See DOCUMENTATION for more information) Counter enabled Ansible callback plugin (See DOCUMENTATION for more information)
''' """
from __future__ import annotations from __future__ import annotations
@@ -29,15 +29,14 @@ from ansible.playbook.task_include import TaskInclude
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
"""
'''
This is the default callback interface, which simply prints messages This is the default callback interface, which simply prints messages
to stdout when new callback events are received. to stdout when new callback events are received.
''' """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.counter_enabled' CALLBACK_NAME = "community.general.counter_enabled"
_task_counter = 1 _task_counter = 1
_task_total = 0 _task_total = 0
@@ -55,11 +54,7 @@ class CallbackModule(CallbackBase):
def _all_vars(self, host=None, task=None): def _all_vars(self, host=None, task=None):
# host and task need to be specified in case 'magic variables' (host vars, group vars, etc) # host and task need to be specified in case 'magic variables' (host vars, group vars, etc)
# need to be loaded as well # need to be loaded as well
return self._play.get_variable_manager().get_vars( return self._play.get_variable_manager().get_vars(play=self._play, host=host, task=task)
play=self._play,
host=host,
task=task
)
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self._playbook = playbook self._playbook = playbook
@@ -77,8 +72,8 @@ class CallbackModule(CallbackBase):
self._play = play self._play = play
self._previous_batch_total = self._current_batch_total self._previous_batch_total = self._current_batch_total
self._current_batch_total = self._previous_batch_total + len(self._all_vars()['vars']['ansible_play_batch']) self._current_batch_total = self._previous_batch_total + len(self._all_vars()["vars"]["ansible_play_batch"])
self._host_total = len(self._all_vars()['vars']['ansible_play_hosts_all']) self._host_total = len(self._all_vars()["vars"]["ansible_play_hosts_all"])
self._task_total = len(self._play.get_tasks()[0]) self._task_total = len(self._play.get_tasks()[0])
self._task_counter = 1 self._task_counter = 1
@@ -93,39 +88,39 @@ class CallbackModule(CallbackBase):
f"{hostcolor(host, stat)} : {colorize('ok', stat['ok'], C.COLOR_OK)} {colorize('changed', stat['changed'], C.COLOR_CHANGED)} " f"{hostcolor(host, stat)} : {colorize('ok', stat['ok'], C.COLOR_OK)} {colorize('changed', stat['changed'], C.COLOR_CHANGED)} "
f"{colorize('unreachable', stat['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', stat['failures'], C.COLOR_ERROR)} " f"{colorize('unreachable', stat['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', stat['failures'], C.COLOR_ERROR)} "
f"{colorize('rescued', stat['rescued'], C.COLOR_OK)} {colorize('ignored', stat['ignored'], C.COLOR_WARN)}", f"{colorize('rescued', stat['rescued'], C.COLOR_OK)} {colorize('ignored', stat['ignored'], C.COLOR_WARN)}",
screen_only=True screen_only=True,
) )
self._display.display( self._display.display(
f"{hostcolor(host, stat, False)} : {colorize('ok', stat['ok'], None)} {colorize('changed', stat['changed'], None)} " f"{hostcolor(host, stat, False)} : {colorize('ok', stat['ok'], None)} {colorize('changed', stat['changed'], None)} "
f"{colorize('unreachable', stat['unreachable'], None)} {colorize('failed', stat['failures'], None)} " f"{colorize('unreachable', stat['unreachable'], None)} {colorize('failed', stat['failures'], None)} "
f"{colorize('rescued', stat['rescued'], None)} {colorize('ignored', stat['ignored'], None)}", f"{colorize('rescued', stat['rescued'], None)} {colorize('ignored', stat['ignored'], None)}",
log_only=True log_only=True,
) )
self._display.display("", screen_only=True) self._display.display("", screen_only=True)
# print custom stats # print custom stats
if self._plugin_options.get('show_custom_stats', C.SHOW_CUSTOM_STATS) and stats.custom: if self._plugin_options.get("show_custom_stats", C.SHOW_CUSTOM_STATS) and stats.custom:
# fallback on constants for inherited plugins missing docs # fallback on constants for inherited plugins missing docs
self._display.banner("CUSTOM STATS: ") self._display.banner("CUSTOM STATS: ")
# per host # per host
# TODO: come up with 'pretty format' # TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()): for k in sorted(stats.custom.keys()):
if k == '_run': if k == "_run":
continue continue
_custom_stats = self._dump_results(stats.custom[k], indent=1).replace('\n', '') _custom_stats = self._dump_results(stats.custom[k], indent=1).replace("\n", "")
self._display.display(f'\t{k}: {_custom_stats}') self._display.display(f"\t{k}: {_custom_stats}")
# print per run custom stats # print per run custom stats
if '_run' in stats.custom: if "_run" in stats.custom:
self._display.display("", screen_only=True) self._display.display("", screen_only=True)
_custom_stats_run = self._dump_results(stats.custom['_run'], indent=1).replace('\n', '') _custom_stats_run = self._dump_results(stats.custom["_run"], indent=1).replace("\n", "")
self._display.display(f'\tRUN: {_custom_stats_run}') self._display.display(f"\tRUN: {_custom_stats_run}")
self._display.display("", screen_only=True) self._display.display("", screen_only=True)
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
args = '' args = ""
# args can be specified as no_log in several places: in the task or in # args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the # the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target # argument spec can't be because that is only run on the target
@@ -135,8 +130,8 @@ class CallbackModule(CallbackBase):
# that they can secure this if they feel that their stdout is insecure # that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc). # (shoulder surfing, logging stdout straight to a file, etc).
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT: if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = ', '.join(('{k}={v}' for k, v in task.args.items())) args = ", ".join(("{k}={v}" for k, v in task.args.items()))
args = f' {args}' args = f" {args}"
self._display.banner(f"TASK {self._task_counter}/{self._task_total} [{task.get_name().strip()}{args}]") self._display.banner(f"TASK {self._task_counter}/{self._task_total} [{task.get_name().strip()}{args}]")
if self._display.verbosity >= 2: if self._display.verbosity >= 2:
path = task.get_path() path = task.get_path()
@@ -146,17 +141,16 @@ class CallbackModule(CallbackBase):
self._task_counter += 1 self._task_counter += 1
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
self._host_counter += 1 self._host_counter += 1
delegated_vars = result._result.get('_ansible_delegated_vars', None) delegated_vars = result._result.get("_ansible_delegated_vars", None)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid: if self._play.strategy == "free" and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task) self._print_task_banner(result._task)
if isinstance(result._task, TaskInclude): if isinstance(result._task, TaskInclude):
return return
elif result._result.get('changed', False): elif result._result.get("changed", False):
if delegated_vars: if delegated_vars:
msg = f"changed: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> {delegated_vars['ansible_host']}]" msg = f"changed: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> {delegated_vars['ansible_host']}]"
else: else:
@@ -171,7 +165,7 @@ class CallbackModule(CallbackBase):
self._handle_warnings(result._result) self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result: if result._task.loop and "results" in result._result:
self._process_items(result) self._process_items(result)
else: else:
self._clean_results(result._result, result._task.action) self._clean_results(result._result, result._task.action)
@@ -181,19 +175,18 @@ class CallbackModule(CallbackBase):
self._display.display(msg, color=color) self._display.display(msg, color=color)
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
self._host_counter += 1 self._host_counter += 1
delegated_vars = result._result.get('_ansible_delegated_vars', None) delegated_vars = result._result.get("_ansible_delegated_vars", None)
self._clean_results(result._result, result._task.action) self._clean_results(result._result, result._task.action)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid: if self._play.strategy == "free" and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task) self._print_task_banner(result._task)
self._handle_exception(result._result) self._handle_exception(result._result)
self._handle_warnings(result._result) self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result: if result._task.loop and "results" in result._result:
self._process_items(result) self._process_items(result)
else: else:
@@ -201,12 +194,12 @@ class CallbackModule(CallbackBase):
self._display.display( self._display.display(
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> " f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> "
f"{delegated_vars['ansible_host']}]: FAILED! => {self._dump_results(result._result)}", f"{delegated_vars['ansible_host']}]: FAILED! => {self._dump_results(result._result)}",
color=C.COLOR_ERROR color=C.COLOR_ERROR,
) )
else: else:
self._display.display( self._display.display(
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: FAILED! => {self._dump_results(result._result)}", f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: FAILED! => {self._dump_results(result._result)}",
color=C.COLOR_ERROR color=C.COLOR_ERROR,
) )
if ignore_errors: if ignore_errors:
@@ -215,14 +208,15 @@ class CallbackModule(CallbackBase):
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
self._host_counter += 1 self._host_counter += 1
if self._plugin_options.get('show_skipped_hosts', C.DISPLAY_SKIPPED_HOSTS): # fallback on constants for inherited plugins missing docs if self._plugin_options.get(
"show_skipped_hosts", C.DISPLAY_SKIPPED_HOSTS
): # fallback on constants for inherited plugins missing docs
self._clean_results(result._result, result._task.action) self._clean_results(result._result, result._task.action)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid: if self._play.strategy == "free" and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task) self._print_task_banner(result._task)
if result._task.loop and 'results' in result._result: if result._task.loop and "results" in result._result:
self._process_items(result) self._process_items(result)
else: else:
msg = f"skipping: {self._host_counter}/{self._host_total} [{result._host.get_name()}]" msg = f"skipping: {self._host_counter}/{self._host_total} [{result._host.get_name()}]"
@@ -233,18 +227,18 @@ class CallbackModule(CallbackBase):
def v2_runner_on_unreachable(self, result): def v2_runner_on_unreachable(self, result):
self._host_counter += 1 self._host_counter += 1
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid: if self._play.strategy == "free" and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task) self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None) delegated_vars = result._result.get("_ansible_delegated_vars", None)
if delegated_vars: if delegated_vars:
self._display.display( self._display.display(
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> " f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()} -> "
f"{delegated_vars['ansible_host']}]: UNREACHABLE! => {self._dump_results(result._result)}", f"{delegated_vars['ansible_host']}]: UNREACHABLE! => {self._dump_results(result._result)}",
color=C.COLOR_UNREACHABLE color=C.COLOR_UNREACHABLE,
) )
else: else:
self._display.display( self._display.display(
f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: UNREACHABLE! => {self._dump_results(result._result)}", f"fatal: {self._host_counter}/{self._host_total} [{result._host.get_name()}]: UNREACHABLE! => {self._dump_results(result._result)}",
color=C.COLOR_UNREACHABLE color=C.COLOR_UNREACHABLE,
) )

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2024, Felix Fontein <felix@fontein.de> # Copyright (c) 2024, Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -36,8 +35,8 @@ from ansible.plugins.callback.default import CallbackModule as Default
class CallbackModule(Default): class CallbackModule(Default):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.default_without_diff' CALLBACK_NAME = "community.general.default_without_diff"
def v2_on_file_diff(self, result): def v2_on_file_diff(self, result):
pass pass

View File

@@ -22,6 +22,7 @@ requirements:
HAS_OD = False HAS_OD = False
try: try:
from collections import OrderedDict from collections import OrderedDict
HAS_OD = True HAS_OD = True
except ImportError: except ImportError:
pass pass
@@ -69,66 +70,66 @@ display = Display()
# FIXME: Importing constants as C simply does not work, beats me :-/ # FIXME: Importing constants as C simply does not work, beats me :-/
# from ansible import constants as C # from ansible import constants as C
class C: class C:
COLOR_HIGHLIGHT = 'white' COLOR_HIGHLIGHT = "white"
COLOR_VERBOSE = 'blue' COLOR_VERBOSE = "blue"
COLOR_WARN = 'bright purple' COLOR_WARN = "bright purple"
COLOR_ERROR = 'red' COLOR_ERROR = "red"
COLOR_DEBUG = 'dark gray' COLOR_DEBUG = "dark gray"
COLOR_DEPRECATE = 'purple' COLOR_DEPRECATE = "purple"
COLOR_SKIP = 'cyan' COLOR_SKIP = "cyan"
COLOR_UNREACHABLE = 'bright red' COLOR_UNREACHABLE = "bright red"
COLOR_OK = 'green' COLOR_OK = "green"
COLOR_CHANGED = 'yellow' COLOR_CHANGED = "yellow"
# Taken from Dstat # Taken from Dstat
class vt100: class vt100:
black = '\033[0;30m' black = "\033[0;30m"
darkred = '\033[0;31m' darkred = "\033[0;31m"
darkgreen = '\033[0;32m' darkgreen = "\033[0;32m"
darkyellow = '\033[0;33m' darkyellow = "\033[0;33m"
darkblue = '\033[0;34m' darkblue = "\033[0;34m"
darkmagenta = '\033[0;35m' darkmagenta = "\033[0;35m"
darkcyan = '\033[0;36m' darkcyan = "\033[0;36m"
gray = '\033[0;37m' gray = "\033[0;37m"
darkgray = '\033[1;30m' darkgray = "\033[1;30m"
red = '\033[1;31m' red = "\033[1;31m"
green = '\033[1;32m' green = "\033[1;32m"
yellow = '\033[1;33m' yellow = "\033[1;33m"
blue = '\033[1;34m' blue = "\033[1;34m"
magenta = '\033[1;35m' magenta = "\033[1;35m"
cyan = '\033[1;36m' cyan = "\033[1;36m"
white = '\033[1;37m' white = "\033[1;37m"
blackbg = '\033[40m' blackbg = "\033[40m"
redbg = '\033[41m' redbg = "\033[41m"
greenbg = '\033[42m' greenbg = "\033[42m"
yellowbg = '\033[43m' yellowbg = "\033[43m"
bluebg = '\033[44m' bluebg = "\033[44m"
magentabg = '\033[45m' magentabg = "\033[45m"
cyanbg = '\033[46m' cyanbg = "\033[46m"
whitebg = '\033[47m' whitebg = "\033[47m"
reset = '\033[0;0m' reset = "\033[0;0m"
bold = '\033[1m' bold = "\033[1m"
reverse = '\033[2m' reverse = "\033[2m"
underline = '\033[4m' underline = "\033[4m"
clear = '\033[2J' clear = "\033[2J"
# clearline = '\033[K' # clearline = '\033[K'
clearline = '\033[2K' clearline = "\033[2K"
save = '\033[s' save = "\033[s"
restore = '\033[u' restore = "\033[u"
save_all = '\0337' save_all = "\0337"
restore_all = '\0338' restore_all = "\0338"
linewrap = '\033[7h' linewrap = "\033[7h"
nolinewrap = '\033[7l' nolinewrap = "\033[7l"
up = '\033[1A' up = "\033[1A"
down = '\033[1B' down = "\033[1B"
right = '\033[1C' right = "\033[1C"
left = '\033[1D' left = "\033[1D"
colors = dict( colors = dict(
@@ -140,26 +141,23 @@ colors = dict(
unreachable=vt100.red, unreachable=vt100.red,
) )
states = ('skipped', 'ok', 'changed', 'failed', 'unreachable') states = ("skipped", "ok", "changed", "failed", "unreachable")
class CallbackModule(CallbackModule_default): class CallbackModule(CallbackModule_default):
"""
'''
This is the dense callback interface, where screen estate is still valued. This is the dense callback interface, where screen estate is still valued.
''' """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'dense' CALLBACK_NAME = "dense"
def __init__(self): def __init__(self):
# From CallbackModule # From CallbackModule
self._display = display self._display = display
if HAS_OD: if HAS_OD:
self.disabled = False self.disabled = False
self.super_ref = super() self.super_ref = super()
self.super_ref.__init__() self.super_ref.__init__()
@@ -167,14 +165,14 @@ class CallbackModule(CallbackModule_default):
# Attributes to remove from results for more density # Attributes to remove from results for more density
self.removed_attributes = ( self.removed_attributes = (
# 'changed', # 'changed',
'delta', "delta",
# 'diff', # 'diff',
'end', "end",
'failed', "failed",
'failed_when_result', "failed_when_result",
'invocation', "invocation",
'start', "start",
'stdout_lines', "stdout_lines",
) )
# Initiate data structures # Initiate data structures
@@ -182,13 +180,15 @@ class CallbackModule(CallbackModule_default):
self.keep = False self.keep = False
self.shown_title = False self.shown_title = False
self.count = dict(play=0, handler=0, task=0) self.count = dict(play=0, handler=0, task=0)
self.type = 'foo' self.type = "foo"
# Start immediately on the first line # Start immediately on the first line
sys.stdout.write(vt100.reset + vt100.save + vt100.clearline) sys.stdout.write(vt100.reset + vt100.save + vt100.clearline)
sys.stdout.flush() sys.stdout.flush()
else: else:
display.warning("The 'dense' callback plugin requires OrderedDict which is not available in this version of python, disabling.") display.warning(
"The 'dense' callback plugin requires OrderedDict which is not available in this version of python, disabling."
)
self.disabled = True self.disabled = True
def __del__(self): def __del__(self):
@@ -198,27 +198,27 @@ class CallbackModule(CallbackModule_default):
name = result._host.get_name() name = result._host.get_name()
# Add a new status in case a failed task is ignored # Add a new status in case a failed task is ignored
if status == 'failed' and result._task.ignore_errors: if status == "failed" and result._task.ignore_errors:
status = 'ignored' status = "ignored"
# Check if we have to update an existing state (when looping over items) # Check if we have to update an existing state (when looping over items)
if name not in self.hosts: if name not in self.hosts:
self.hosts[name] = dict(state=status) self.hosts[name] = dict(state=status)
elif states.index(self.hosts[name]['state']) < states.index(status): elif states.index(self.hosts[name]["state"]) < states.index(status):
self.hosts[name]['state'] = status self.hosts[name]["state"] = status
# Store delegated hostname, if needed # Store delegated hostname, if needed
delegated_vars = result._result.get('_ansible_delegated_vars', None) delegated_vars = result._result.get("_ansible_delegated_vars", None)
if delegated_vars: if delegated_vars:
self.hosts[name]['delegate'] = delegated_vars['ansible_host'] self.hosts[name]["delegate"] = delegated_vars["ansible_host"]
# Print progress bar # Print progress bar
self._display_progress(result) self._display_progress(result)
# # Ensure that tasks with changes/failures stay on-screen, and during diff-mode # # Ensure that tasks with changes/failures stay on-screen, and during diff-mode
# if status in ['changed', 'failed', 'unreachable'] or (result.get('_diff_mode', False) and result._resultget('diff', False)): # if status in ['changed', 'failed', 'unreachable'] or (result.get('_diff_mode', False) and result._resultget('diff', False)):
# Ensure that tasks with changes/failures stay on-screen # Ensure that tasks with changes/failures stay on-screen
if status in ['changed', 'failed', 'unreachable']: if status in ["changed", "failed", "unreachable"]:
self.keep = True self.keep = True
if self._display.verbosity == 1: if self._display.verbosity == 1:
@@ -239,9 +239,9 @@ class CallbackModule(CallbackModule_default):
del result[attr] del result[attr]
def _handle_exceptions(self, result): def _handle_exceptions(self, result):
if 'exception' in result: if "exception" in result:
# Remove the exception from the result so it is not shown every time # Remove the exception from the result so it is not shown every time
del result['exception'] del result["exception"]
if self._display.verbosity == 1: if self._display.verbosity == 1:
return "An exception occurred during task execution. To see the full traceback, use -vvv." return "An exception occurred during task execution. To see the full traceback, use -vvv."
@@ -249,16 +249,16 @@ class CallbackModule(CallbackModule_default):
def _display_progress(self, result=None): def _display_progress(self, result=None):
# Always rewrite the complete line # Always rewrite the complete line
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.nolinewrap + vt100.underline) sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.nolinewrap + vt100.underline)
sys.stdout.write(f'{self.type} {self.count[self.type]}:') sys.stdout.write(f"{self.type} {self.count[self.type]}:")
sys.stdout.write(vt100.reset) sys.stdout.write(vt100.reset)
sys.stdout.flush() sys.stdout.flush()
# Print out each host in its own status-color # Print out each host in its own status-color
for name in self.hosts: for name in self.hosts:
sys.stdout.write(' ') sys.stdout.write(" ")
if self.hosts[name].get('delegate', None): if self.hosts[name].get("delegate", None):
sys.stdout.write(f"{self.hosts[name]['delegate']}>") sys.stdout.write(f"{self.hosts[name]['delegate']}>")
sys.stdout.write(colors[self.hosts[name]['state']] + name + vt100.reset) sys.stdout.write(colors[self.hosts[name]["state"]] + name + vt100.reset)
sys.stdout.flush() sys.stdout.flush()
sys.stdout.write(vt100.linewrap) sys.stdout.write(vt100.linewrap)
@@ -267,7 +267,7 @@ class CallbackModule(CallbackModule_default):
if not self.shown_title: if not self.shown_title:
self.shown_title = True self.shown_title = True
sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline) sys.stdout.write(vt100.restore + vt100.reset + vt100.clearline + vt100.underline)
sys.stdout.write(f'{self.type} {self.count[self.type]}: {self.task.get_name().strip()}') sys.stdout.write(f"{self.type} {self.count[self.type]}: {self.task.get_name().strip()}")
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}") sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
sys.stdout.flush() sys.stdout.flush()
else: else:
@@ -284,29 +284,31 @@ class CallbackModule(CallbackModule_default):
self._clean_results(result._result) self._clean_results(result._result)
dump = '' dump = ""
if result._task.action == 'include': if result._task.action == "include":
return return
elif status == 'ok': elif status == "ok":
return return
elif status == 'ignored': elif status == "ignored":
dump = self._handle_exceptions(result._result) dump = self._handle_exceptions(result._result)
elif status == 'failed': elif status == "failed":
dump = self._handle_exceptions(result._result) dump = self._handle_exceptions(result._result)
elif status == 'unreachable': elif status == "unreachable":
dump = result._result['msg'] dump = result._result["msg"]
if not dump: if not dump:
dump = self._dump_results(result._result) dump = self._dump_results(result._result)
if result._task.loop and 'results' in result._result: if result._task.loop and "results" in result._result:
self._process_items(result) self._process_items(result)
else: else:
sys.stdout.write(f"{colors[status] + status}: ") sys.stdout.write(f"{colors[status] + status}: ")
delegated_vars = result._result.get('_ansible_delegated_vars', None) delegated_vars = result._result.get("_ansible_delegated_vars", None)
if delegated_vars: if delegated_vars:
sys.stdout.write(f"{vt100.reset}{result._host.get_name()}>{colors[status]}{delegated_vars['ansible_host']}") sys.stdout.write(
f"{vt100.reset}{result._host.get_name()}>{colors[status]}{delegated_vars['ansible_host']}"
)
else: else:
sys.stdout.write(result._host.get_name()) sys.stdout.write(result._host.get_name())
@@ -314,7 +316,7 @@ class CallbackModule(CallbackModule_default):
sys.stdout.write(f"{vt100.reset}{vt100.save}{vt100.clearline}") sys.stdout.write(f"{vt100.reset}{vt100.save}{vt100.clearline}")
sys.stdout.flush() sys.stdout.flush()
if status == 'changed': if status == "changed":
self._handle_warnings(result._result) self._handle_warnings(result._result)
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
@@ -327,13 +329,13 @@ class CallbackModule(CallbackModule_default):
# Reset at the start of each play # Reset at the start of each play
self.keep = False self.keep = False
self.count.update(dict(handler=0, task=0)) self.count.update(dict(handler=0, task=0))
self.count['play'] += 1 self.count["play"] += 1
self.play = play self.play = play
# Write the next play on screen IN UPPERCASE, and make it permanent # Write the next play on screen IN UPPERCASE, and make it permanent
name = play.get_name().strip() name = play.get_name().strip()
if not name: if not name:
name = 'unnamed' name = "unnamed"
sys.stdout.write(f"PLAY {self.count['play']}: {name.upper()}") sys.stdout.write(f"PLAY {self.count['play']}: {name.upper()}")
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}") sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
sys.stdout.flush() sys.stdout.flush()
@@ -351,14 +353,14 @@ class CallbackModule(CallbackModule_default):
self.shown_title = False self.shown_title = False
self.hosts = OrderedDict() self.hosts = OrderedDict()
self.task = task self.task = task
self.type = 'task' self.type = "task"
# Enumerate task if not setup (task names are too long for dense output) # Enumerate task if not setup (task names are too long for dense output)
if task.get_name() != 'setup': if task.get_name() != "setup":
self.count['task'] += 1 self.count["task"] += 1
# Write the next task on screen (behind the prompt is the previous output) # Write the next task on screen (behind the prompt is the previous output)
sys.stdout.write(f'{self.type} {self.count[self.type]}.') sys.stdout.write(f"{self.type} {self.count[self.type]}.")
sys.stdout.write(vt100.reset) sys.stdout.write(vt100.reset)
sys.stdout.flush() sys.stdout.flush()
@@ -374,36 +376,36 @@ class CallbackModule(CallbackModule_default):
self.shown_title = False self.shown_title = False
self.hosts = OrderedDict() self.hosts = OrderedDict()
self.task = task self.task = task
self.type = 'handler' self.type = "handler"
# Enumerate handler if not setup (handler names may be too long for dense output) # Enumerate handler if not setup (handler names may be too long for dense output)
if task.get_name() != 'setup': if task.get_name() != "setup":
self.count[self.type] += 1 self.count[self.type] += 1
# Write the next task on screen (behind the prompt is the previous output) # Write the next task on screen (behind the prompt is the previous output)
sys.stdout.write(f'{self.type} {self.count[self.type]}.') sys.stdout.write(f"{self.type} {self.count[self.type]}.")
sys.stdout.write(vt100.reset) sys.stdout.write(vt100.reset)
sys.stdout.flush() sys.stdout.flush()
def v2_playbook_on_cleanup_task_start(self, task): def v2_playbook_on_cleanup_task_start(self, task):
# TBD # TBD
sys.stdout.write('cleanup.') sys.stdout.write("cleanup.")
sys.stdout.flush() sys.stdout.flush()
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
self._add_host(result, 'failed') self._add_host(result, "failed")
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
if result._result.get('changed', False): if result._result.get("changed", False):
self._add_host(result, 'changed') self._add_host(result, "changed")
else: else:
self._add_host(result, 'ok') self._add_host(result, "ok")
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
self._add_host(result, 'skipped') self._add_host(result, "skipped")
def v2_runner_on_unreachable(self, result): def v2_runner_on_unreachable(self, result):
self._add_host(result, 'unreachable') self._add_host(result, "unreachable")
def v2_runner_on_include(self, included_file): def v2_runner_on_include(self, included_file):
pass pass
@@ -423,24 +425,24 @@ class CallbackModule(CallbackModule_default):
self.v2_runner_item_on_ok(result) self.v2_runner_item_on_ok(result)
def v2_runner_item_on_ok(self, result): def v2_runner_item_on_ok(self, result):
if result._result.get('changed', False): if result._result.get("changed", False):
self._add_host(result, 'changed') self._add_host(result, "changed")
else: else:
self._add_host(result, 'ok') self._add_host(result, "ok")
# Old definition in v2.0 # Old definition in v2.0
def v2_playbook_item_on_failed(self, result): def v2_playbook_item_on_failed(self, result):
self.v2_runner_item_on_failed(result) self.v2_runner_item_on_failed(result)
def v2_runner_item_on_failed(self, result): def v2_runner_item_on_failed(self, result):
self._add_host(result, 'failed') self._add_host(result, "failed")
# Old definition in v2.0 # Old definition in v2.0
def v2_playbook_item_on_skipped(self, result): def v2_playbook_item_on_skipped(self, result):
self.v2_runner_item_on_skipped(result) self.v2_runner_item_on_skipped(result)
def v2_runner_item_on_skipped(self, result): def v2_runner_item_on_skipped(self, result):
self._add_host(result, 'skipped') self._add_host(result, "skipped")
def v2_playbook_on_no_hosts_remaining(self): def v2_playbook_on_no_hosts_remaining(self):
if self._display.verbosity == 0 and self.keep: if self._display.verbosity == 0 and self.keep:
@@ -467,7 +469,7 @@ class CallbackModule(CallbackModule_default):
return return
sys.stdout.write(vt100.bold + vt100.underline) sys.stdout.write(vt100.bold + vt100.underline)
sys.stdout.write('SUMMARY') sys.stdout.write("SUMMARY")
sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}") sys.stdout.write(f"{vt100.restore}{vt100.reset}\n{vt100.save}{vt100.clearline}")
sys.stdout.flush() sys.stdout.flush()
@@ -479,7 +481,7 @@ class CallbackModule(CallbackModule_default):
f"{hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} " f"{hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} "
f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} " f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} "
f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}", f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}",
screen_only=True screen_only=True,
) )

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2019, Trevor Highfill <trevor.highfill@outlook.com> # Copyright (c) 2019, Trevor Highfill <trevor.highfill@outlook.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -786,6 +785,7 @@ from ansible.module_utils.common.text.converters import to_text
try: try:
from ansible.template import trust_as_template # noqa: F401, pylint: disable=unused-import from ansible.template import trust_as_template # noqa: F401, pylint: disable=unused-import
SUPPORTS_DATA_TAGGING = True SUPPORTS_DATA_TAGGING = True
except ImportError: except ImportError:
SUPPORTS_DATA_TAGGING = False SUPPORTS_DATA_TAGGING = False
@@ -806,11 +806,12 @@ class CallbackModule(Default):
""" """
Callback plugin that allows you to supply your own custom callback templates to be output. Callback plugin that allows you to supply your own custom callback templates to be output.
""" """
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'community.general.diy'
DIY_NS = 'ansible_callback_diy' CALLBACK_VERSION = 2.0
CALLBACK_TYPE = "stdout"
CALLBACK_NAME = "community.general.diy"
DIY_NS = "ansible_callback_diy"
@contextmanager @contextmanager
def _suppress_stdout(self, enabled): def _suppress_stdout(self, enabled):
@@ -823,50 +824,48 @@ class CallbackModule(Default):
def _get_output_specification(self, loader, variables): def _get_output_specification(self, loader, variables):
_ret = {} _ret = {}
_calling_method = sys._getframe(1).f_code.co_name _calling_method = sys._getframe(1).f_code.co_name
_callback_type = (_calling_method[3:] if _calling_method[:3] == "v2_" else _calling_method) _callback_type = _calling_method[3:] if _calling_method[:3] == "v2_" else _calling_method
_callback_options = ['msg', 'msg_color'] _callback_options = ["msg", "msg_color"]
for option in _callback_options: for option in _callback_options:
_option_name = f'{_callback_type}_{option}' _option_name = f"{_callback_type}_{option}"
_option_template = variables.get( _option_template = variables.get(f"{self.DIY_NS}_{_option_name}", self.get_option(_option_name))
f"{self.DIY_NS}_{_option_name}", _ret.update({option: self._template(loader=loader, template=_option_template, variables=variables)})
self.get_option(_option_name)
)
_ret.update({option: self._template(
loader=loader,
template=_option_template,
variables=variables
)})
_ret.update({'vars': variables}) _ret.update({"vars": variables})
return _ret return _ret
def _using_diy(self, spec): def _using_diy(self, spec):
sentinel = object() sentinel = object()
omit = spec['vars'].get('omit', sentinel) omit = spec["vars"].get("omit", sentinel)
# With Data Tagging, omit is sentinel # With Data Tagging, omit is sentinel
return (spec['msg'] is not None) and (spec['msg'] != omit or omit is sentinel) return (spec["msg"] is not None) and (spec["msg"] != omit or omit is sentinel)
def _parent_has_callback(self): def _parent_has_callback(self):
return hasattr(super(), sys._getframe(1).f_code.co_name) return hasattr(super(), sys._getframe(1).f_code.co_name)
def _template(self, loader, template, variables): def _template(self, loader, template, variables):
_templar = Templar(loader=loader, variables=variables) _templar = Templar(loader=loader, variables=variables)
return _templar.template( return _templar.template(template, preserve_trailing_newlines=True, convert_data=False, escape_backslashes=True)
template,
preserve_trailing_newlines=True,
convert_data=False,
escape_backslashes=True
)
def _output(self, spec, stderr=False): def _output(self, spec, stderr=False):
_msg = to_text(spec['msg']) _msg = to_text(spec["msg"])
if len(_msg) > 0: if len(_msg) > 0:
self._display.display(msg=_msg, color=spec['msg_color'], stderr=stderr) self._display.display(msg=_msg, color=spec["msg_color"], stderr=stderr)
def _get_vars(self, playbook, play=None, host=None, task=None, included_file=None, def _get_vars(
handler=None, result=None, stats=None, remove_attr_ref_loop=True): self,
playbook,
play=None,
host=None,
task=None,
included_file=None,
handler=None,
result=None,
stats=None,
remove_attr_ref_loop=True,
):
def _get_value(obj, attr=None, method=None): def _get_value(obj, attr=None, method=None):
if attr: if attr:
return getattr(obj, attr, getattr(obj, f"_{attr}", None)) return getattr(obj, attr, getattr(obj, f"_{attr}", None))
@@ -876,8 +875,8 @@ class CallbackModule(Default):
return _method() return _method()
def _remove_attr_ref_loop(obj, attributes): def _remove_attr_ref_loop(obj, attributes):
_loop_var = getattr(obj, 'loop_control', None) _loop_var = getattr(obj, "loop_control", None)
_loop_var = (_loop_var or 'item') _loop_var = _loop_var or "item"
for attr in attributes: for attr in attributes:
if str(_loop_var) in str(_get_value(obj=obj, attr=attr)): if str(_loop_var) in str(_get_value(obj=obj, attr=attr)):
@@ -896,56 +895,128 @@ class CallbackModule(Default):
_all = _variable_manager.get_vars() _all = _variable_manager.get_vars()
if play: if play:
_all = play.get_variable_manager().get_vars( _all = play.get_variable_manager().get_vars(
play=play, play=play, host=(host if host else getattr(result, "_host", None)), task=(handler if handler else task)
host=(host if host else getattr(result, '_host', None)),
task=(handler if handler else task)
) )
_ret.update(_all) _ret.update(_all)
_ret.update(_ret.get(self.DIY_NS, {self.DIY_NS: {} if SUPPORTS_DATA_TAGGING else CallbackDIYDict()})) _ret.update(_ret.get(self.DIY_NS, {self.DIY_NS: {} if SUPPORTS_DATA_TAGGING else CallbackDIYDict()}))
_ret[self.DIY_NS].update({'playbook': {}}) _ret[self.DIY_NS].update({"playbook": {}})
_playbook_attributes = ['entries', 'file_name', 'basedir'] _playbook_attributes = ["entries", "file_name", "basedir"]
for attr in _playbook_attributes: for attr in _playbook_attributes:
_ret[self.DIY_NS]['playbook'].update({attr: _get_value(obj=playbook, attr=attr)}) _ret[self.DIY_NS]["playbook"].update({attr: _get_value(obj=playbook, attr=attr)})
if play: if play:
_ret[self.DIY_NS].update({'play': {}}) _ret[self.DIY_NS].update({"play": {}})
_play_attributes = ['any_errors_fatal', 'become', 'become_flags', 'become_method', _play_attributes = [
'become_user', 'check_mode', 'collections', 'connection', "any_errors_fatal",
'debugger', 'diff', 'environment', 'fact_path', 'finalized', "become",
'force_handlers', 'gather_facts', 'gather_subset', "become_flags",
'gather_timeout', 'handlers', 'hosts', 'ignore_errors', "become_method",
'ignore_unreachable', 'included_conditional', 'included_path', "become_user",
'max_fail_percentage', 'module_defaults', 'name', 'no_log', "check_mode",
'only_tags', 'order', 'port', 'post_tasks', 'pre_tasks', "collections",
'remote_user', 'removed_hosts', 'roles', 'run_once', 'serial', "connection",
'skip_tags', 'squashed', 'strategy', 'tags', 'tasks', 'uuid', "debugger",
'validated', 'vars_files', 'vars_prompt'] "diff",
"environment",
"fact_path",
"finalized",
"force_handlers",
"gather_facts",
"gather_subset",
"gather_timeout",
"handlers",
"hosts",
"ignore_errors",
"ignore_unreachable",
"included_conditional",
"included_path",
"max_fail_percentage",
"module_defaults",
"name",
"no_log",
"only_tags",
"order",
"port",
"post_tasks",
"pre_tasks",
"remote_user",
"removed_hosts",
"roles",
"run_once",
"serial",
"skip_tags",
"squashed",
"strategy",
"tags",
"tasks",
"uuid",
"validated",
"vars_files",
"vars_prompt",
]
for attr in _play_attributes: for attr in _play_attributes:
_ret[self.DIY_NS]['play'].update({attr: _get_value(obj=play, attr=attr)}) _ret[self.DIY_NS]["play"].update({attr: _get_value(obj=play, attr=attr)})
if host: if host:
_ret[self.DIY_NS].update({'host': {}}) _ret[self.DIY_NS].update({"host": {}})
_host_attributes = ['name', 'uuid', 'address', 'implicit'] _host_attributes = ["name", "uuid", "address", "implicit"]
for attr in _host_attributes: for attr in _host_attributes:
_ret[self.DIY_NS]['host'].update({attr: _get_value(obj=host, attr=attr)}) _ret[self.DIY_NS]["host"].update({attr: _get_value(obj=host, attr=attr)})
if task: if task:
_ret[self.DIY_NS].update({'task': {}}) _ret[self.DIY_NS].update({"task": {}})
_task_attributes = ['action', 'any_errors_fatal', 'args', 'async', 'async_val', _task_attributes = [
'become', 'become_flags', 'become_method', 'become_user', "action",
'changed_when', 'check_mode', 'collections', 'connection', "any_errors_fatal",
'debugger', 'delay', 'delegate_facts', 'delegate_to', 'diff', "args",
'environment', 'failed_when', 'finalized', 'ignore_errors', "async",
'ignore_unreachable', 'loop', 'loop_control', 'loop_with', "async_val",
'module_defaults', 'name', 'no_log', 'notify', 'parent', 'poll', "become",
'port', 'register', 'remote_user', 'retries', 'role', 'run_once', "become_flags",
'squashed', 'tags', 'untagged', 'until', 'uuid', 'validated', "become_method",
'when'] "become_user",
"changed_when",
"check_mode",
"collections",
"connection",
"debugger",
"delay",
"delegate_facts",
"delegate_to",
"diff",
"environment",
"failed_when",
"finalized",
"ignore_errors",
"ignore_unreachable",
"loop",
"loop_control",
"loop_with",
"module_defaults",
"name",
"no_log",
"notify",
"parent",
"poll",
"port",
"register",
"remote_user",
"retries",
"role",
"run_once",
"squashed",
"tags",
"untagged",
"until",
"uuid",
"validated",
"when",
]
# remove arguments that reference a loop var because they cause templating issues in # remove arguments that reference a loop var because they cause templating issues in
# callbacks that do not have the loop context(e.g. playbook_on_task_start) # callbacks that do not have the loop context(e.g. playbook_on_task_start)
@@ -953,74 +1024,114 @@ class CallbackModule(Default):
_task_attributes = _remove_attr_ref_loop(obj=task, attributes=_task_attributes) _task_attributes = _remove_attr_ref_loop(obj=task, attributes=_task_attributes)
for attr in _task_attributes: for attr in _task_attributes:
_ret[self.DIY_NS]['task'].update({attr: _get_value(obj=task, attr=attr)}) _ret[self.DIY_NS]["task"].update({attr: _get_value(obj=task, attr=attr)})
if included_file: if included_file:
_ret[self.DIY_NS].update({'included_file': {}}) _ret[self.DIY_NS].update({"included_file": {}})
_included_file_attributes = ['args', 'filename', 'hosts', 'is_role', 'task'] _included_file_attributes = ["args", "filename", "hosts", "is_role", "task"]
for attr in _included_file_attributes: for attr in _included_file_attributes:
_ret[self.DIY_NS]['included_file'].update({attr: _get_value( _ret[self.DIY_NS]["included_file"].update({attr: _get_value(obj=included_file, attr=attr)})
obj=included_file,
attr=attr
)})
if handler: if handler:
_ret[self.DIY_NS].update({'handler': {}}) _ret[self.DIY_NS].update({"handler": {}})
_handler_attributes = ['action', 'any_errors_fatal', 'args', 'async', 'async_val', _handler_attributes = [
'become', 'become_flags', 'become_method', 'become_user', "action",
'changed_when', 'check_mode', 'collections', 'connection', "any_errors_fatal",
'debugger', 'delay', 'delegate_facts', 'delegate_to', 'diff', "args",
'environment', 'failed_when', 'finalized', 'ignore_errors', "async",
'ignore_unreachable', 'listen', 'loop', 'loop_control', "async_val",
'loop_with', 'module_defaults', 'name', 'no_log', "become",
'notified_hosts', 'notify', 'parent', 'poll', 'port', "become_flags",
'register', 'remote_user', 'retries', 'role', 'run_once', "become_method",
'squashed', 'tags', 'untagged', 'until', 'uuid', 'validated', "become_user",
'when'] "changed_when",
"check_mode",
"collections",
"connection",
"debugger",
"delay",
"delegate_facts",
"delegate_to",
"diff",
"environment",
"failed_when",
"finalized",
"ignore_errors",
"ignore_unreachable",
"listen",
"loop",
"loop_control",
"loop_with",
"module_defaults",
"name",
"no_log",
"notified_hosts",
"notify",
"parent",
"poll",
"port",
"register",
"remote_user",
"retries",
"role",
"run_once",
"squashed",
"tags",
"untagged",
"until",
"uuid",
"validated",
"when",
]
if handler.loop and remove_attr_ref_loop: if handler.loop and remove_attr_ref_loop:
_handler_attributes = _remove_attr_ref_loop(obj=handler, _handler_attributes = _remove_attr_ref_loop(obj=handler, attributes=_handler_attributes)
attributes=_handler_attributes)
for attr in _handler_attributes: for attr in _handler_attributes:
_ret[self.DIY_NS]['handler'].update({attr: _get_value(obj=handler, attr=attr)}) _ret[self.DIY_NS]["handler"].update({attr: _get_value(obj=handler, attr=attr)})
_ret[self.DIY_NS]['handler'].update({'is_host_notified': handler.is_host_notified(host)}) _ret[self.DIY_NS]["handler"].update({"is_host_notified": handler.is_host_notified(host)})
if result: if result:
_ret[self.DIY_NS].update({'result': {}}) _ret[self.DIY_NS].update({"result": {}})
_result_attributes = ['host', 'task', 'task_name'] _result_attributes = ["host", "task", "task_name"]
for attr in _result_attributes: for attr in _result_attributes:
_ret[self.DIY_NS]['result'].update({attr: _get_value(obj=result, attr=attr)}) _ret[self.DIY_NS]["result"].update({attr: _get_value(obj=result, attr=attr)})
_result_methods = ['is_changed', 'is_failed', 'is_skipped', 'is_unreachable'] _result_methods = ["is_changed", "is_failed", "is_skipped", "is_unreachable"]
for method in _result_methods: for method in _result_methods:
_ret[self.DIY_NS]['result'].update({method: _get_value(obj=result, method=method)}) _ret[self.DIY_NS]["result"].update({method: _get_value(obj=result, method=method)})
_ret[self.DIY_NS]['result'].update({'output': getattr(result, '_result', None)}) _ret[self.DIY_NS]["result"].update({"output": getattr(result, "_result", None)})
_ret.update(result._result) _ret.update(result._result)
if stats: if stats:
_ret[self.DIY_NS].update({'stats': {}}) _ret[self.DIY_NS].update({"stats": {}})
_stats_attributes = ['changed', 'custom', 'dark', 'failures', 'ignored', _stats_attributes = [
'ok', 'processed', 'rescued', 'skipped'] "changed",
"custom",
"dark",
"failures",
"ignored",
"ok",
"processed",
"rescued",
"skipped",
]
for attr in _stats_attributes: for attr in _stats_attributes:
_ret[self.DIY_NS]['stats'].update({attr: _get_value(obj=stats, attr=attr)}) _ret[self.DIY_NS]["stats"].update({attr: _get_value(obj=stats, attr=attr)})
_ret[self.DIY_NS].update({'top_level_var_names': list(_ret.keys())}) _ret[self.DIY_NS].update({"top_level_var_names": list(_ret.keys())})
return _ret return _ret
def v2_on_any(self, *args, **kwargs): def v2_on_any(self, *args, **kwargs):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(loader=self._diy_loader, variables=self._diy_spec["vars"])
loader=self._diy_loader,
variables=self._diy_spec['vars']
)
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
self._output(spec=self._diy_spec) self._output(spec=self._diy_spec)
@@ -1033,11 +1144,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1051,11 +1159,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1069,11 +1174,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1087,11 +1189,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1121,8 +1220,8 @@ class CallbackModule(Default):
play=self._diy_play, play=self._diy_play,
task=self._diy_task, task=self._diy_task,
result=result, result=result,
remove_attr_ref_loop=False remove_attr_ref_loop=False,
) ),
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1140,8 +1239,8 @@ class CallbackModule(Default):
play=self._diy_play, play=self._diy_play,
task=self._diy_task, task=self._diy_task,
result=result, result=result,
remove_attr_ref_loop=False remove_attr_ref_loop=False,
) ),
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1159,8 +1258,8 @@ class CallbackModule(Default):
play=self._diy_play, play=self._diy_play,
task=self._diy_task, task=self._diy_task,
result=result, result=result,
remove_attr_ref_loop=False remove_attr_ref_loop=False,
) ),
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1174,11 +1273,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1195,11 +1291,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, host=self._diy_host, task=self._diy_task
play=self._diy_play, ),
host=self._diy_host,
task=self._diy_task
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1214,10 +1307,7 @@ class CallbackModule(Default):
self._diy_loader = self._diy_playbook.get_loader() self._diy_loader = self._diy_playbook.get_loader()
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader, variables=self._get_vars(playbook=self._diy_playbook)
variables=self._get_vars(
playbook=self._diy_playbook
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1234,11 +1324,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, host=self._diy_host, handler=self._diy_handler
play=self._diy_play, ),
host=self._diy_host,
handler=self._diy_handler
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1249,10 +1336,7 @@ class CallbackModule(Default):
super().v2_playbook_on_notify(handler, host) super().v2_playbook_on_notify(handler, host)
def v2_playbook_on_no_hosts_matched(self): def v2_playbook_on_no_hosts_matched(self):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(loader=self._diy_loader, variables=self._diy_spec["vars"])
loader=self._diy_loader,
variables=self._diy_spec['vars']
)
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
self._output(spec=self._diy_spec) self._output(spec=self._diy_spec)
@@ -1262,10 +1346,7 @@ class CallbackModule(Default):
super().v2_playbook_on_no_hosts_matched() super().v2_playbook_on_no_hosts_matched()
def v2_playbook_on_no_hosts_remaining(self): def v2_playbook_on_no_hosts_remaining(self):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(loader=self._diy_loader, variables=self._diy_spec["vars"])
loader=self._diy_loader,
variables=self._diy_spec['vars']
)
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
self._output(spec=self._diy_spec) self._output(spec=self._diy_spec)
@@ -1279,11 +1360,7 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task),
playbook=self._diy_playbook,
play=self._diy_play,
task=self._diy_task
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1302,11 +1379,7 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task),
playbook=self._diy_playbook,
play=self._diy_play,
task=self._diy_task
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1316,13 +1389,19 @@ class CallbackModule(Default):
with self._suppress_stdout(enabled=self._using_diy(spec=self._diy_spec)): with self._suppress_stdout(enabled=self._using_diy(spec=self._diy_spec)):
super().v2_playbook_on_handler_task_start(task) super().v2_playbook_on_handler_task_start(task)
def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, def v2_playbook_on_vars_prompt(
confirm=False, salt_size=None, salt=None, default=None, self,
unsafe=None): varname,
self._diy_spec = self._get_output_specification( private=True,
loader=self._diy_loader, prompt=None,
variables=self._diy_spec['vars'] encrypt=None,
) confirm=False,
salt_size=None,
salt=None,
default=None,
unsafe=None,
):
self._diy_spec = self._get_output_specification(loader=self._diy_loader, variables=self._diy_spec["vars"])
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
self._output(spec=self._diy_spec) self._output(spec=self._diy_spec)
@@ -1330,9 +1409,7 @@ class CallbackModule(Default):
if self._parent_has_callback(): if self._parent_has_callback():
with self._suppress_stdout(enabled=self._using_diy(spec=self._diy_spec)): with self._suppress_stdout(enabled=self._using_diy(spec=self._diy_spec)):
super().v2_playbook_on_vars_prompt( super().v2_playbook_on_vars_prompt(
varname, private, prompt, encrypt, varname, private, prompt, encrypt, confirm, salt_size, salt, default, unsafe
confirm, salt_size, salt, default,
unsafe
) )
# not implemented as the call to this is not implemented yet # not implemented as the call to this is not implemented yet
@@ -1347,11 +1424,7 @@ class CallbackModule(Default):
self._diy_play = play self._diy_play = play
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader, variables=self._get_vars(playbook=self._diy_playbook, play=self._diy_play)
variables=self._get_vars(
playbook=self._diy_playbook,
play=self._diy_play
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1366,11 +1439,7 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(playbook=self._diy_playbook, play=self._diy_play, stats=self._diy_stats),
playbook=self._diy_playbook,
play=self._diy_play,
stats=self._diy_stats
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1389,8 +1458,8 @@ class CallbackModule(Default):
playbook=self._diy_playbook, playbook=self._diy_playbook,
play=self._diy_play, play=self._diy_play,
task=self._diy_included_file._task, task=self._diy_included_file._task,
included_file=self._diy_included_file included_file=self._diy_included_file,
) ),
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):
@@ -1404,11 +1473,8 @@ class CallbackModule(Default):
self._diy_spec = self._get_output_specification( self._diy_spec = self._get_output_specification(
loader=self._diy_loader, loader=self._diy_loader,
variables=self._get_vars( variables=self._get_vars(
playbook=self._diy_playbook, playbook=self._diy_playbook, play=self._diy_play, task=self._diy_task, result=result
play=self._diy_play, ),
task=self._diy_task,
result=result
)
) )
if self._using_diy(spec=self._diy_spec): if self._using_diy(spec=self._diy_spec):

View File

@@ -116,9 +116,9 @@ class TaskData:
def add_host(self, host): def add_host(self, host):
if host.uuid in self.host_data: if host.uuid in self.host_data:
if host.status == 'included': if host.status == "included":
# concatenate task include output from multiple items # concatenate task include output from multiple items
host.result = f'{self.host_data[host.uuid].result}\n{host.result}' host.result = f"{self.host_data[host.uuid].result}\n{host.result}"
else: else:
return return
@@ -152,7 +152,7 @@ class ElasticSource:
self._display = display self._display = display
def start_task(self, tasks_data, hide_task_arguments, play_name, task): def start_task(self, tasks_data, hide_task_arguments, play_name, task):
""" record the start of a task for one or more hosts """ """record the start of a task for one or more hosts"""
uuid = task._uuid uuid = task._uuid
@@ -165,29 +165,39 @@ class ElasticSource:
args = None args = None
if not task.no_log and not hide_task_arguments: if not task.no_log and not hide_task_arguments:
args = ', '.join((f'{k}={v}' for k, v in task.args.items())) args = ", ".join((f"{k}={v}" for k, v in task.args.items()))
tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args) tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args)
def finish_task(self, tasks_data, status, result): def finish_task(self, tasks_data, status, result):
""" record the results of a task for a single host """ """record the results of a task for a single host"""
task_uuid = result._task._uuid task_uuid = result._task._uuid
if hasattr(result, '_host') and result._host is not None: if hasattr(result, "_host") and result._host is not None:
host_uuid = result._host._uuid host_uuid = result._host._uuid
host_name = result._host.name host_name = result._host.name
else: else:
host_uuid = 'include' host_uuid = "include"
host_name = 'include' host_name = "include"
task = tasks_data[task_uuid] task = tasks_data[task_uuid]
task.add_host(HostData(host_uuid, host_name, status, result)) task.add_host(HostData(host_uuid, host_name, status, result))
def generate_distributed_traces(self, tasks_data, status, end_time, traceparent, apm_service_name, def generate_distributed_traces(
apm_server_url, apm_verify_server_cert, apm_secret_token, apm_api_key): self,
""" generate distributed traces from the collected TaskData and HostData """ tasks_data,
status,
end_time,
traceparent,
apm_service_name,
apm_server_url,
apm_verify_server_cert,
apm_secret_token,
apm_api_key,
):
"""generate distributed traces from the collected TaskData and HostData"""
tasks = [] tasks = []
parent_start_time = None parent_start_time = None
@@ -196,7 +206,9 @@ class ElasticSource:
parent_start_time = task.start parent_start_time = task.start
tasks.append(task) tasks.append(task)
apm_cli = self.init_apm_client(apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key) apm_cli = self.init_apm_client(
apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key
)
if apm_cli: if apm_cli:
with closing(apm_cli): with closing(apm_cli):
instrument() # Only call this once, as early as possible. instrument() # Only call this once, as early as possible.
@@ -218,72 +230,80 @@ class ElasticSource:
apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time) apm_cli.end_transaction(name=__name__, result=status, duration=end_time - parent_start_time)
def create_span_data(self, apm_cli, task_data, host_data): def create_span_data(self, apm_cli, task_data, host_data):
""" create the span with the given TaskData and HostData """ """create the span with the given TaskData and HostData"""
name = f'[{host_data.name}] {task_data.play}: {task_data.name}' name = f"[{host_data.name}] {task_data.play}: {task_data.name}"
message = "success" message = "success"
status = "success" status = "success"
enriched_error_message = None enriched_error_message = None
if host_data.status == 'included': if host_data.status == "included":
rc = 0 rc = 0
else: else:
res = host_data.result._result res = host_data.result._result
rc = res.get('rc', 0) rc = res.get("rc", 0)
if host_data.status == 'failed': if host_data.status == "failed":
message = self.get_error_message(res) message = self.get_error_message(res)
enriched_error_message = self.enrich_error_message(res) enriched_error_message = self.enrich_error_message(res)
status = "failure" status = "failure"
elif host_data.status == 'skipped': elif host_data.status == "skipped":
if 'skip_reason' in res: if "skip_reason" in res:
message = res['skip_reason'] message = res["skip_reason"]
else: else:
message = 'skipped' message = "skipped"
status = "unknown" status = "unknown"
with capture_span(task_data.name, with capture_span(
start=task_data.start, task_data.name,
span_type="ansible.task.run", start=task_data.start,
duration=host_data.finish - task_data.start, span_type="ansible.task.run",
labels={"ansible.task.args": task_data.args, duration=host_data.finish - task_data.start,
"ansible.task.message": message, labels={
"ansible.task.module": task_data.action, "ansible.task.args": task_data.args,
"ansible.task.name": name, "ansible.task.message": message,
"ansible.task.result": rc, "ansible.task.module": task_data.action,
"ansible.task.host.name": host_data.name, "ansible.task.name": name,
"ansible.task.host.status": host_data.status}) as span: "ansible.task.result": rc,
"ansible.task.host.name": host_data.name,
"ansible.task.host.status": host_data.status,
},
) as span:
span.outcome = status span.outcome = status
if 'failure' in status: if "failure" in status:
exception = AnsibleRuntimeError(message=f"{task_data.action}: {name} failed with error message {enriched_error_message}") exception = AnsibleRuntimeError(
message=f"{task_data.action}: {name} failed with error message {enriched_error_message}"
)
apm_cli.capture_exception(exc_info=(type(exception), exception, exception.__traceback__), handled=True) apm_cli.capture_exception(exc_info=(type(exception), exception, exception.__traceback__), handled=True)
def init_apm_client(self, apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key): def init_apm_client(self, apm_server_url, apm_service_name, apm_verify_server_cert, apm_secret_token, apm_api_key):
if apm_server_url: if apm_server_url:
return Client(service_name=apm_service_name, return Client(
server_url=apm_server_url, service_name=apm_service_name,
verify_server_cert=False, server_url=apm_server_url,
secret_token=apm_secret_token, verify_server_cert=False,
api_key=apm_api_key, secret_token=apm_secret_token,
use_elastic_traceparent_header=True, api_key=apm_api_key,
debug=True) use_elastic_traceparent_header=True,
debug=True,
)
@staticmethod @staticmethod
def get_error_message(result): def get_error_message(result):
if result.get('exception') is not None: if result.get("exception") is not None:
return ElasticSource._last_line(result['exception']) return ElasticSource._last_line(result["exception"])
return result.get('msg', 'failed') return result.get("msg", "failed")
@staticmethod @staticmethod
def _last_line(text): def _last_line(text):
lines = text.strip().split('\n') lines = text.strip().split("\n")
return lines[-1] return lines[-1]
@staticmethod @staticmethod
def enrich_error_message(result): def enrich_error_message(result):
message = result.get('msg', 'failed') message = result.get("msg", "failed")
exception = result.get('exception') exception = result.get("exception")
stderr = result.get('stderr') stderr = result.get("stderr")
return f"message: \"{message}\"\nexception: \"{exception}\"\nstderr: \"{stderr}\"" return f'message: "{message}"\nexception: "{exception}"\nstderr: "{stderr}"'
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
@@ -292,8 +312,8 @@ class CallbackModule(CallbackBase):
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.elastic' CALLBACK_NAME = "community.general.elastic"
CALLBACK_NEEDS_ENABLED = True CALLBACK_NEEDS_ENABLED = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -308,7 +328,9 @@ class CallbackModule(CallbackBase):
self.disabled = False self.disabled = False
if ELASTIC_LIBRARY_IMPORT_ERROR: if ELASTIC_LIBRARY_IMPORT_ERROR:
raise AnsibleError('The `elastic-apm` must be installed to use this plugin') from ELASTIC_LIBRARY_IMPORT_ERROR raise AnsibleError(
"The `elastic-apm` must be installed to use this plugin"
) from ELASTIC_LIBRARY_IMPORT_ERROR
self.tasks_data = OrderedDict() self.tasks_data = OrderedDict()
@@ -317,17 +339,17 @@ class CallbackModule(CallbackBase):
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.hide_task_arguments = self.get_option('hide_task_arguments') self.hide_task_arguments = self.get_option("hide_task_arguments")
self.apm_service_name = self.get_option('apm_service_name') self.apm_service_name = self.get_option("apm_service_name")
if not self.apm_service_name: if not self.apm_service_name:
self.apm_service_name = 'ansible' self.apm_service_name = "ansible"
self.apm_server_url = self.get_option('apm_server_url') self.apm_server_url = self.get_option("apm_server_url")
self.apm_secret_token = self.get_option('apm_secret_token') self.apm_secret_token = self.get_option("apm_secret_token")
self.apm_api_key = self.get_option('apm_api_key') self.apm_api_key = self.get_option("apm_api_key")
self.apm_verify_server_cert = self.get_option('apm_verify_server_cert') self.apm_verify_server_cert = self.get_option("apm_verify_server_cert")
self.traceparent = self.get_option('traceparent') self.traceparent = self.get_option("traceparent")
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.ansible_playbook = basename(playbook._file_name) self.ansible_playbook = basename(playbook._file_name)
@@ -336,65 +358,29 @@ class CallbackModule(CallbackBase):
self.play_name = play.get_name() self.play_name = play.get_name()
def v2_runner_on_no_hosts(self, task): def v2_runner_on_no_hosts(self, task):
self.elastic.start_task( self.elastic.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
self.elastic.start_task( self.elastic.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_cleanup_task_start(self, task): def v2_playbook_on_cleanup_task_start(self, task):
self.elastic.start_task( self.elastic.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_handler_task_start(self, task): def v2_playbook_on_handler_task_start(self, task):
self.elastic.start_task( self.elastic.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
self.errors += 1 self.errors += 1
self.elastic.finish_task( self.elastic.finish_task(self.tasks_data, "failed", result)
self.tasks_data,
'failed',
result
)
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
self.elastic.finish_task( self.elastic.finish_task(self.tasks_data, "ok", result)
self.tasks_data,
'ok',
result
)
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
self.elastic.finish_task( self.elastic.finish_task(self.tasks_data, "skipped", result)
self.tasks_data,
'skipped',
result
)
def v2_playbook_on_include(self, included_file): def v2_playbook_on_include(self, included_file):
self.elastic.finish_task( self.elastic.finish_task(self.tasks_data, "included", included_file)
self.tasks_data,
'included',
included_file
)
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
if self.errors == 0: if self.errors == 0:
@@ -410,7 +396,7 @@ class CallbackModule(CallbackBase):
self.apm_server_url, self.apm_server_url,
self.apm_verify_server_cert, self.apm_verify_server_cert,
self.apm_secret_token, self.apm_secret_token,
self.apm_api_key self.apm_api_key,
) )
def v2_runner_on_async_failed(self, result, **kwargs): def v2_runner_on_async_failed(self, result, **kwargs):

View File

@@ -54,29 +54,31 @@ from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.jabber' CALLBACK_NAME = "community.general.jabber"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
super().__init__(display=display) super().__init__(display=display)
if not HAS_XMPP: if not HAS_XMPP:
self._display.warning("The required python xmpp library (xmpppy) is not installed. " self._display.warning(
"pip install git+https://github.com/ArchipelProject/xmpppy") "The required python xmpp library (xmpppy) is not installed. "
"pip install git+https://github.com/ArchipelProject/xmpppy"
)
self.disabled = True self.disabled = True
self.serv = os.getenv('JABBER_SERV') self.serv = os.getenv("JABBER_SERV")
self.j_user = os.getenv('JABBER_USER') self.j_user = os.getenv("JABBER_USER")
self.j_pass = os.getenv('JABBER_PASS') self.j_pass = os.getenv("JABBER_PASS")
self.j_to = os.getenv('JABBER_TO') self.j_to = os.getenv("JABBER_TO")
if (self.j_user or self.j_pass or self.serv or self.j_to) is None: if (self.j_user or self.j_pass or self.serv or self.j_to) is None:
self.disabled = True self.disabled = True
self._display.warning('Jabber CallBack wants the JABBER_SERV, JABBER_USER, JABBER_PASS and JABBER_TO environment variables') self._display.warning(
"Jabber CallBack wants the JABBER_SERV, JABBER_USER, JABBER_PASS and JABBER_TO environment variables"
)
def send_msg(self, msg): def send_msg(self, msg):
"""Send message""" """Send message"""
@@ -85,7 +87,7 @@ class CallbackModule(CallbackBase):
client.connect(server=(self.serv, 5222)) client.connect(server=(self.serv, 5222))
client.auth(jid.getNode(), self.j_pass, resource=jid.getResource()) client.auth(jid.getNode(), self.j_pass, resource=jid.getResource())
message = xmpp.Message(self.j_to, msg) message = xmpp.Message(self.j_to, msg)
message.setAttr('type', 'chat') message.setAttr("type", "chat")
client.send(message) client.send(message)
client.disconnect() client.disconnect()
@@ -109,9 +111,9 @@ class CallbackModule(CallbackBase):
unreachable = False unreachable = False
for h in hosts: for h in hosts:
s = stats.summarize(h) s = stats.summarize(h)
if s['failures'] > 0: if s["failures"] > 0:
failures = True failures = True
if s['unreachable'] > 0: if s["unreachable"] > 0:
unreachable = True unreachable = True
if failures or unreachable: if failures or unreachable:

View File

@@ -49,9 +49,10 @@ class CallbackModule(CallbackBase):
""" """
logs playbook results, per host, in /var/log/ansible/hosts logs playbook results, per host, in /var/log/ansible/hosts
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.log_plays' CALLBACK_NAME = "community.general.log_plays"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
TIME_FORMAT = "%b %d %Y %H:%M:%S" TIME_FORMAT = "%b %d %Y %H:%M:%S"
@@ -61,7 +62,6 @@ class CallbackModule(CallbackBase):
return f"{now} - {playbook} - {task_name} - {task_action} - {category} - {data}\n\n" return f"{now} - {playbook} - {task_name} - {task_action} - {category} - {data}\n\n"
def __init__(self): def __init__(self):
super().__init__() super().__init__()
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
@@ -75,12 +75,12 @@ class CallbackModule(CallbackBase):
def log(self, result, category): def log(self, result, category):
data = result._result data = result._result
if isinstance(data, MutableMapping): if isinstance(data, MutableMapping):
if '_ansible_verbose_override' in data: if "_ansible_verbose_override" in data:
# avoid logging extraneous data # avoid logging extraneous data
data = 'omitted' data = "omitted"
else: else:
data = data.copy() data = data.copy()
invocation = data.pop('invocation', None) invocation = data.pop("invocation", None)
data = json.dumps(data, cls=AnsibleJSONEncoder) data = json.dumps(data, cls=AnsibleJSONEncoder)
if invocation is not None: if invocation is not None:
data = f"{json.dumps(invocation)} => {data} " data = f"{json.dumps(invocation)} => {data} "
@@ -93,25 +93,25 @@ class CallbackModule(CallbackBase):
fd.write(msg) fd.write(msg)
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
self.log(result, 'FAILED') self.log(result, "FAILED")
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
self.log(result, 'OK') self.log(result, "OK")
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
self.log(result, 'SKIPPED') self.log(result, "SKIPPED")
def v2_runner_on_unreachable(self, result): def v2_runner_on_unreachable(self, result):
self.log(result, 'UNREACHABLE') self.log(result, "UNREACHABLE")
def v2_runner_on_async_failed(self, result): def v2_runner_on_async_failed(self, result):
self.log(result, 'ASYNC_FAILED') self.log(result, "ASYNC_FAILED")
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.playbook = playbook._file_name self.playbook = playbook._file_name
def v2_playbook_on_import_for_host(self, result, imported_file): def v2_playbook_on_import_for_host(self, result, imported_file):
self.log(result, 'IMPORTED', imported_file) self.log(result, "IMPORTED", imported_file)
def v2_playbook_on_not_import_for_host(self, result, missing_file): def v2_playbook_on_not_import_for_host(self, result, missing_file):
self.log(result, 'NOTIMPORTED', missing_file) self.log(result, "NOTIMPORTED", missing_file)

View File

@@ -83,11 +83,10 @@ class AzureLogAnalyticsSource:
def __build_signature(self, date, workspace_id, shared_key, content_length): def __build_signature(self, date, workspace_id, shared_key, content_length):
# Build authorisation signature for Azure log analytics API call # Build authorisation signature for Azure log analytics API call
sigs = f"POST\n{content_length}\napplication/json\nx-ms-date:{date}\n/api/logs" sigs = f"POST\n{content_length}\napplication/json\nx-ms-date:{date}\n/api/logs"
utf8_sigs = sigs.encode('utf-8') utf8_sigs = sigs.encode("utf-8")
decoded_shared_key = base64.b64decode(shared_key) decoded_shared_key = base64.b64decode(shared_key)
hmac_sha256_sigs = hmac.new( hmac_sha256_sigs = hmac.new(decoded_shared_key, utf8_sigs, digestmod=hashlib.sha256).digest()
decoded_shared_key, utf8_sigs, digestmod=hashlib.sha256).digest() encoded_hash = base64.b64encode(hmac_sha256_sigs).decode("utf-8")
encoded_hash = base64.b64encode(hmac_sha256_sigs).decode('utf-8')
signature = f"SharedKey {workspace_id}:{encoded_hash}" signature = f"SharedKey {workspace_id}:{encoded_hash}"
return signature return signature
@@ -95,10 +94,10 @@ class AzureLogAnalyticsSource:
return f"https://{workspace_id}.ods.opinsights.azure.com/api/logs?api-version=2016-04-01" return f"https://{workspace_id}.ods.opinsights.azure.com/api/logs?api-version=2016-04-01"
def __rfc1123date(self): def __rfc1123date(self):
return now().strftime('%a, %d %b %Y %H:%M:%S GMT') return now().strftime("%a, %d %b %Y %H:%M:%S GMT")
def send_event(self, workspace_id, shared_key, state, result, runtime): def send_event(self, workspace_id, shared_key, state, result, runtime):
if result._task_fields['args'].get('_ansible_check_mode') is True: if result._task_fields["args"].get("_ansible_check_mode") is True:
self.ansible_check_mode = True self.ansible_check_mode = True
if result._task._role: if result._task._role:
@@ -107,31 +106,31 @@ class AzureLogAnalyticsSource:
ansible_role = None ansible_role = None
data = {} data = {}
data['uuid'] = result._task._uuid data["uuid"] = result._task._uuid
data['session'] = self.session data["session"] = self.session
data['status'] = state data["status"] = state
data['timestamp'] = self.__rfc1123date() data["timestamp"] = self.__rfc1123date()
data['host'] = self.host data["host"] = self.host
data['user'] = self.user data["user"] = self.user
data['runtime'] = runtime data["runtime"] = runtime
data['ansible_version'] = ansible_version data["ansible_version"] = ansible_version
data['ansible_check_mode'] = self.ansible_check_mode data["ansible_check_mode"] = self.ansible_check_mode
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_playbook'] = self.ansible_playbook data["ansible_playbook"] = self.ansible_playbook
data['ansible_role'] = ansible_role data["ansible_role"] = ansible_role
data['ansible_task'] = result._task_fields data["ansible_task"] = result._task_fields
# Removing args since it can contain sensitive data # Removing args since it can contain sensitive data
if 'args' in data['ansible_task']: if "args" in data["ansible_task"]:
data['ansible_task'].pop('args') data["ansible_task"].pop("args")
data['ansible_result'] = result._result data["ansible_result"] = result._result
if 'content' in data['ansible_result']: if "content" in data["ansible_result"]:
data['ansible_result'].pop('content') data["ansible_result"].pop("content")
# Adding extra vars info # Adding extra vars info
data['extra_vars'] = self.extra_vars data["extra_vars"] = self.extra_vars
# Preparing the playbook logs as JSON format and send to Azure log analytics # Preparing the playbook logs as JSON format and send to Azure log analytics
jsondata = json.dumps({'event': data}, cls=AnsibleJSONEncoder, sort_keys=True) jsondata = json.dumps({"event": data}, cls=AnsibleJSONEncoder, sort_keys=True)
content_length = len(jsondata) content_length = len(jsondata)
rfc1123date = self.__rfc1123date() rfc1123date = self.__rfc1123date()
signature = self.__build_signature(rfc1123date, workspace_id, shared_key, content_length) signature = self.__build_signature(rfc1123date, workspace_id, shared_key, content_length)
@@ -141,19 +140,19 @@ class AzureLogAnalyticsSource:
workspace_url, workspace_url,
jsondata, jsondata,
headers={ headers={
'content-type': 'application/json', "content-type": "application/json",
'Authorization': signature, "Authorization": signature,
'Log-Type': 'ansible_playbook', "Log-Type": "ansible_playbook",
'x-ms-date': rfc1123date "x-ms-date": rfc1123date,
}, },
method='POST' method="POST",
) )
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'loganalytics' CALLBACK_NAME = "loganalytics"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -164,15 +163,12 @@ class CallbackModule(CallbackBase):
self.loganalytics = AzureLogAnalyticsSource() self.loganalytics = AzureLogAnalyticsSource()
def _seconds_since_start(self, result): def _seconds_since_start(self, result):
return ( return (now() - self.start_datetimes[result._task._uuid]).total_seconds()
now() -
self.start_datetimes[result._task._uuid]
).total_seconds()
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.workspace_id = self.get_option('workspace_id') self.workspace_id = self.get_option("workspace_id")
self.shared_key = self.get_option('shared_key') self.shared_key = self.get_option("shared_key")
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
vm = play.get_variable_manager() vm = play.get_variable_manager()
@@ -190,45 +186,25 @@ class CallbackModule(CallbackBase):
def v2_runner_on_ok(self, result, **kwargs): def v2_runner_on_ok(self, result, **kwargs):
self.loganalytics.send_event( self.loganalytics.send_event(
self.workspace_id, self.workspace_id, self.shared_key, "OK", result, self._seconds_since_start(result)
self.shared_key,
'OK',
result,
self._seconds_since_start(result)
) )
def v2_runner_on_skipped(self, result, **kwargs): def v2_runner_on_skipped(self, result, **kwargs):
self.loganalytics.send_event( self.loganalytics.send_event(
self.workspace_id, self.workspace_id, self.shared_key, "SKIPPED", result, self._seconds_since_start(result)
self.shared_key,
'SKIPPED',
result,
self._seconds_since_start(result)
) )
def v2_runner_on_failed(self, result, **kwargs): def v2_runner_on_failed(self, result, **kwargs):
self.loganalytics.send_event( self.loganalytics.send_event(
self.workspace_id, self.workspace_id, self.shared_key, "FAILED", result, self._seconds_since_start(result)
self.shared_key,
'FAILED',
result,
self._seconds_since_start(result)
) )
def runner_on_async_failed(self, result, **kwargs): def runner_on_async_failed(self, result, **kwargs):
self.loganalytics.send_event( self.loganalytics.send_event(
self.workspace_id, self.workspace_id, self.shared_key, "FAILED", result, self._seconds_since_start(result)
self.shared_key,
'FAILED',
result,
self._seconds_since_start(result)
) )
def v2_runner_on_unreachable(self, result, **kwargs): def v2_runner_on_unreachable(self, result, **kwargs):
self.loganalytics.send_event( self.loganalytics.send_event(
self.workspace_id, self.workspace_id, self.shared_key, "UNREACHABLE", result, self._seconds_since_start(result)
self.shared_key,
'UNREACHABLE',
result,
self._seconds_since_start(result)
) )

View File

@@ -64,6 +64,7 @@ from ansible.parsing.ajson import AnsibleJSONEncoder
try: try:
from logdna import LogDNAHandler from logdna import LogDNAHandler
HAS_LOGDNA = True HAS_LOGDNA = True
except ImportError: except ImportError:
HAS_LOGDNA = False HAS_LOGDNA = False
@@ -72,12 +73,12 @@ except ImportError:
# Getting MAC Address of system: # Getting MAC Address of system:
def get_mac(): def get_mac():
mac = f"{getnode():012x}" mac = f"{getnode():012x}"
return ":".join(map(lambda index: mac[index:index + 2], range(int(len(mac) / 2)))) return ":".join(map(lambda index: mac[index : index + 2], range(int(len(mac) / 2))))
# Getting hostname of system: # Getting hostname of system:
def get_hostname(): def get_hostname():
return str(socket.gethostname()).split('.local', 1)[0] return str(socket.gethostname()).split(".local", 1)[0]
# Getting IP of system: # Getting IP of system:
@@ -87,10 +88,10 @@ def get_ip():
except Exception: except Exception:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try: try:
s.connect(('10.255.255.255', 1)) s.connect(("10.255.255.255", 1))
IP = s.getsockname()[0] IP = s.getsockname()[0]
except Exception: except Exception:
IP = '127.0.0.1' IP = "127.0.0.1"
finally: finally:
s.close() s.close()
return IP return IP
@@ -107,10 +108,9 @@ def isJSONable(obj):
# LogDNA Callback Module: # LogDNA Callback Module:
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 0.1 CALLBACK_VERSION = 0.1
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.logdna' CALLBACK_NAME = "community.general.logdna"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -127,27 +127,27 @@ class CallbackModule(CallbackBase):
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.conf_key = self.get_option('conf_key') self.conf_key = self.get_option("conf_key")
self.plugin_ignore_errors = self.get_option('plugin_ignore_errors') self.plugin_ignore_errors = self.get_option("plugin_ignore_errors")
self.conf_hostname = self.get_option('conf_hostname') self.conf_hostname = self.get_option("conf_hostname")
self.conf_tags = self.get_option('conf_tags') self.conf_tags = self.get_option("conf_tags")
self.mac = get_mac() self.mac = get_mac()
self.ip = get_ip() self.ip = get_ip()
if self.conf_hostname is None: if self.conf_hostname is None:
self.conf_hostname = get_hostname() self.conf_hostname = get_hostname()
self.conf_tags = self.conf_tags.split(',') self.conf_tags = self.conf_tags.split(",")
if HAS_LOGDNA: if HAS_LOGDNA:
self.log = logging.getLogger('logdna') self.log = logging.getLogger("logdna")
self.log.setLevel(logging.INFO) self.log.setLevel(logging.INFO)
self.options = {'hostname': self.conf_hostname, 'mac': self.mac, 'index_meta': True} self.options = {"hostname": self.conf_hostname, "mac": self.mac, "index_meta": True}
self.log.addHandler(LogDNAHandler(self.conf_key, self.options)) self.log.addHandler(LogDNAHandler(self.conf_key, self.options))
self.disabled = False self.disabled = False
else: else:
self.disabled = True self.disabled = True
self._display.warning('WARNING:\nPlease, install LogDNA Python Package: `pip install logdna`') self._display.warning("WARNING:\nPlease, install LogDNA Python Package: `pip install logdna`")
def metaIndexing(self, meta): def metaIndexing(self, meta):
invalidKeys = [] invalidKeys = []
@@ -159,25 +159,25 @@ class CallbackModule(CallbackBase):
if ninvalidKeys > 0: if ninvalidKeys > 0:
for key in invalidKeys: for key in invalidKeys:
del meta[key] del meta[key]
meta['__errors'] = f"These keys have been sanitized: {', '.join(invalidKeys)}" meta["__errors"] = f"These keys have been sanitized: {', '.join(invalidKeys)}"
return meta return meta
def sanitizeJSON(self, data): def sanitizeJSON(self, data):
try: try:
return json.loads(json.dumps(data, sort_keys=True, cls=AnsibleJSONEncoder)) return json.loads(json.dumps(data, sort_keys=True, cls=AnsibleJSONEncoder))
except Exception: except Exception:
return {'warnings': ['JSON Formatting Issue', json.dumps(data, sort_keys=True, cls=AnsibleJSONEncoder)]} return {"warnings": ["JSON Formatting Issue", json.dumps(data, sort_keys=True, cls=AnsibleJSONEncoder)]}
def flush(self, log, options): def flush(self, log, options):
if HAS_LOGDNA: if HAS_LOGDNA:
self.log.info(json.dumps(log), options) self.log.info(json.dumps(log), options)
def sendLog(self, host, category, logdata): def sendLog(self, host, category, logdata):
options = {'app': 'ansible', 'meta': {'playbook': self.playbook_name, 'host': host, 'category': category}} options = {"app": "ansible", "meta": {"playbook": self.playbook_name, "host": host, "category": category}}
logdata['info'].pop('invocation', None) logdata["info"].pop("invocation", None)
warnings = logdata['info'].pop('warnings', None) warnings = logdata["info"].pop("warnings", None)
if warnings is not None: if warnings is not None:
self.flush({'warn': warnings}, options) self.flush({"warn": warnings}, options)
self.flush(logdata, options) self.flush(logdata, options)
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
@@ -188,21 +188,21 @@ class CallbackModule(CallbackBase):
result = dict() result = dict()
for host in stats.processed.keys(): for host in stats.processed.keys():
result[host] = stats.summarize(host) result[host] = stats.summarize(host)
self.sendLog(self.conf_hostname, 'STATS', {'info': self.sanitizeJSON(result)}) self.sendLog(self.conf_hostname, "STATS", {"info": self.sanitizeJSON(result)})
def runner_on_failed(self, host, res, ignore_errors=False): def runner_on_failed(self, host, res, ignore_errors=False):
if self.plugin_ignore_errors: if self.plugin_ignore_errors:
ignore_errors = self.plugin_ignore_errors ignore_errors = self.plugin_ignore_errors
self.sendLog(host, 'FAILED', {'info': self.sanitizeJSON(res), 'ignore_errors': ignore_errors}) self.sendLog(host, "FAILED", {"info": self.sanitizeJSON(res), "ignore_errors": ignore_errors})
def runner_on_ok(self, host, res): def runner_on_ok(self, host, res):
self.sendLog(host, 'OK', {'info': self.sanitizeJSON(res)}) self.sendLog(host, "OK", {"info": self.sanitizeJSON(res)})
def runner_on_unreachable(self, host, res): def runner_on_unreachable(self, host, res):
self.sendLog(host, 'UNREACHABLE', {'info': self.sanitizeJSON(res)}) self.sendLog(host, "UNREACHABLE", {"info": self.sanitizeJSON(res)})
def runner_on_async_failed(self, host, res, jid): def runner_on_async_failed(self, host, res, jid):
self.sendLog(host, 'ASYNC_FAILED', {'info': self.sanitizeJSON(res), 'job_id': jid}) self.sendLog(host, "ASYNC_FAILED", {"info": self.sanitizeJSON(res), "job_id": jid})
def runner_on_async_ok(self, host, res, jid): def runner_on_async_ok(self, host, res, jid):
self.sendLog(host, 'ASYNC_OK', {'info': self.sanitizeJSON(res), 'job_id': jid}) self.sendLog(host, "ASYNC_OK", {"info": self.sanitizeJSON(res), "job_id": jid})

View File

@@ -103,12 +103,14 @@ import uuid
try: try:
import certifi import certifi
HAS_CERTIFI = True HAS_CERTIFI = True
except ImportError: except ImportError:
HAS_CERTIFI = False HAS_CERTIFI = False
try: try:
import flatdict import flatdict
HAS_FLATDICT = True HAS_FLATDICT = True
except ImportError: except ImportError:
HAS_FLATDICT = False HAS_FLATDICT = False
@@ -121,8 +123,7 @@ from ansible.plugins.callback import CallbackBase
class PlainTextSocketAppender: class PlainTextSocketAppender:
def __init__(self, display, LE_API='data.logentries.com', LE_PORT=80, LE_TLS_PORT=443): def __init__(self, display, LE_API="data.logentries.com", LE_PORT=80, LE_TLS_PORT=443):
self.LE_API = LE_API self.LE_API = LE_API
self.LE_PORT = LE_PORT self.LE_PORT = LE_PORT
self.LE_TLS_PORT = LE_TLS_PORT self.LE_TLS_PORT = LE_TLS_PORT
@@ -131,7 +132,7 @@ class PlainTextSocketAppender:
# Error message displayed when an incorrect Token has been detected # Error message displayed when an incorrect Token has been detected
self.INVALID_TOKEN = "\n\nIt appears the LOGENTRIES_TOKEN parameter you entered is incorrect!\n\n" self.INVALID_TOKEN = "\n\nIt appears the LOGENTRIES_TOKEN parameter you entered is incorrect!\n\n"
# Unicode Line separator character \u2028 # Unicode Line separator character \u2028
self.LINE_SEP = '\u2028' self.LINE_SEP = "\u2028"
self._display = display self._display = display
self._conn = None self._conn = None
@@ -170,13 +171,13 @@ class PlainTextSocketAppender:
def put(self, data): def put(self, data):
# Replace newlines with Unicode line separator # Replace newlines with Unicode line separator
# for multi-line events # for multi-line events
data = to_text(data, errors='surrogate_or_strict') data = to_text(data, errors="surrogate_or_strict")
multiline = data.replace('\n', self.LINE_SEP) multiline = data.replace("\n", self.LINE_SEP)
multiline += "\n" multiline += "\n"
# Send data, reconnect if needed # Send data, reconnect if needed
while True: while True:
try: try:
self._conn.send(to_bytes(multiline, errors='surrogate_or_strict')) self._conn.send(to_bytes(multiline, errors="surrogate_or_strict"))
except socket.error: except socket.error:
self.reopen_connection() self.reopen_connection()
continue continue
@@ -187,6 +188,7 @@ class PlainTextSocketAppender:
try: try:
import ssl import ssl
HAS_SSL = True HAS_SSL = True
except ImportError: # for systems without TLS support. except ImportError: # for systems without TLS support.
SocketAppender = PlainTextSocketAppender SocketAppender = PlainTextSocketAppender
@@ -198,11 +200,13 @@ else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
context = ssl.create_default_context( context = ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH, purpose=ssl.Purpose.SERVER_AUTH,
cafile=certifi.where(), ) cafile=certifi.where(),
)
sock = context.wrap_socket( sock = context.wrap_socket(
sock=sock, sock=sock,
do_handshake_on_connect=True, do_handshake_on_connect=True,
suppress_ragged_eofs=True, ) suppress_ragged_eofs=True,
)
sock.connect((self.LE_API, self.LE_TLS_PORT)) sock.connect((self.LE_API, self.LE_TLS_PORT))
self._conn = sock self._conn = sock
@@ -211,12 +215,11 @@ else:
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.logentries' CALLBACK_NAME = "community.general.logentries"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self): def __init__(self):
# TODO: allow for alternate posting methods (REST/UDP/agent/etc) # TODO: allow for alternate posting methods (REST/UDP/agent/etc)
super().__init__() super().__init__()
@@ -226,7 +229,9 @@ class CallbackModule(CallbackBase):
if not HAS_CERTIFI: if not HAS_CERTIFI:
self.disabled = True self.disabled = True
self._display.warning('The `certifi` python module is not installed.\nDisabling the Logentries callback plugin.') self._display.warning(
"The `certifi` python module is not installed.\nDisabling the Logentries callback plugin."
)
self.le_jobid = str(uuid.uuid4()) self.le_jobid = str(uuid.uuid4())
@@ -234,41 +239,47 @@ class CallbackModule(CallbackBase):
self.timeout = 10 self.timeout = 10
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
# get options # get options
try: try:
self.api_url = self.get_option('api') self.api_url = self.get_option("api")
self.api_port = self.get_option('port') self.api_port = self.get_option("port")
self.api_tls_port = self.get_option('tls_port') self.api_tls_port = self.get_option("tls_port")
self.use_tls = self.get_option('use_tls') self.use_tls = self.get_option("use_tls")
self.flatten = self.get_option('flatten') self.flatten = self.get_option("flatten")
except KeyError as e: except KeyError as e:
self._display.warning(f"Missing option for Logentries callback plugin: {e}") self._display.warning(f"Missing option for Logentries callback plugin: {e}")
self.disabled = True self.disabled = True
try: try:
self.token = self.get_option('token') self.token = self.get_option("token")
except KeyError as e: except KeyError as e:
self._display.warning('Logentries token was not provided, this is required for this callback to operate, disabling') self._display.warning(
"Logentries token was not provided, this is required for this callback to operate, disabling"
)
self.disabled = True self.disabled = True
if self.flatten and not HAS_FLATDICT: if self.flatten and not HAS_FLATDICT:
self.disabled = True self.disabled = True
self._display.warning('You have chosen to flatten and the `flatdict` python module is not installed.\nDisabling the Logentries callback plugin.') self._display.warning(
"You have chosen to flatten and the `flatdict` python module is not installed.\nDisabling the Logentries callback plugin."
)
self._initialize_connections() self._initialize_connections()
def _initialize_connections(self): def _initialize_connections(self):
if not self.disabled: if not self.disabled:
if self.use_tls: if self.use_tls:
self._display.vvvv(f"Connecting to {self.api_url}:{self.api_tls_port} with TLS") self._display.vvvv(f"Connecting to {self.api_url}:{self.api_tls_port} with TLS")
self._appender = TLSSocketAppender(display=self._display, LE_API=self.api_url, LE_TLS_PORT=self.api_tls_port) self._appender = TLSSocketAppender(
display=self._display, LE_API=self.api_url, LE_TLS_PORT=self.api_tls_port
)
else: else:
self._display.vvvv(f"Connecting to {self.api_url}:{self.api_port}") self._display.vvvv(f"Connecting to {self.api_url}:{self.api_port}")
self._appender = PlainTextSocketAppender(display=self._display, LE_API=self.api_url, LE_PORT=self.api_port) self._appender = PlainTextSocketAppender(
display=self._display, LE_API=self.api_url, LE_PORT=self.api_port
)
self._appender.reopen_connection() self._appender.reopen_connection()
def emit_formatted(self, record): def emit_formatted(self, record):
@@ -279,50 +290,50 @@ class CallbackModule(CallbackBase):
self.emit(self._dump_results(record)) self.emit(self._dump_results(record))
def emit(self, record): def emit(self, record):
msg = record.rstrip('\n') msg = record.rstrip("\n")
msg = f"{self.token} {msg}" msg = f"{self.token} {msg}"
self._appender.put(msg) self._appender.put(msg)
self._display.vvvv("Sent event to logentries") self._display.vvvv("Sent event to logentries")
def _set_info(self, host, res): def _set_info(self, host, res):
return {'le_jobid': self.le_jobid, 'hostname': host, 'results': res} return {"le_jobid": self.le_jobid, "hostname": host, "results": res}
def runner_on_ok(self, host, res): def runner_on_ok(self, host, res):
results = self._set_info(host, res) results = self._set_info(host, res)
results['status'] = 'OK' results["status"] = "OK"
self.emit_formatted(results) self.emit_formatted(results)
def runner_on_failed(self, host, res, ignore_errors=False): def runner_on_failed(self, host, res, ignore_errors=False):
results = self._set_info(host, res) results = self._set_info(host, res)
results['status'] = 'FAILED' results["status"] = "FAILED"
self.emit_formatted(results) self.emit_formatted(results)
def runner_on_skipped(self, host, item=None): def runner_on_skipped(self, host, item=None):
results = self._set_info(host, item) results = self._set_info(host, item)
del results['results'] del results["results"]
results['status'] = 'SKIPPED' results["status"] = "SKIPPED"
self.emit_formatted(results) self.emit_formatted(results)
def runner_on_unreachable(self, host, res): def runner_on_unreachable(self, host, res):
results = self._set_info(host, res) results = self._set_info(host, res)
results['status'] = 'UNREACHABLE' results["status"] = "UNREACHABLE"
self.emit_formatted(results) self.emit_formatted(results)
def runner_on_async_failed(self, host, res, jid): def runner_on_async_failed(self, host, res, jid):
results = self._set_info(host, res) results = self._set_info(host, res)
results['jid'] = jid results["jid"] = jid
results['status'] = 'ASYNC_FAILED' results["status"] = "ASYNC_FAILED"
self.emit_formatted(results) self.emit_formatted(results)
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
results = {} results = {}
results['le_jobid'] = self.le_jobid results["le_jobid"] = self.le_jobid
results['started_by'] = os.getlogin() results["started_by"] = os.getlogin()
if play.name: if play.name:
results['play'] = play.name results["play"] = play.name
results['hosts'] = play.hosts results["hosts"] = play.hosts
self.emit_formatted(results) self.emit_formatted(results)
def playbook_on_stats(self, stats): def playbook_on_stats(self, stats):
""" close connection """ """close connection"""
self._appender.close_connection() self._appender.close_connection()

View File

@@ -103,6 +103,7 @@ import logging
try: try:
import logstash import logstash
HAS_LOGSTASH = True HAS_LOGSTASH = True
except ImportError: except ImportError:
HAS_LOGSTASH = False HAS_LOGSTASH = False
@@ -115,10 +116,9 @@ from ansible_collections.community.general.plugins.module_utils.datetime import
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.logstash' CALLBACK_NAME = "community.general.logstash"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self): def __init__(self):
@@ -132,14 +132,11 @@ class CallbackModule(CallbackBase):
def _init_plugin(self): def _init_plugin(self):
if not self.disabled: if not self.disabled:
self.logger = logging.getLogger('python-logstash-logger') self.logger = logging.getLogger("python-logstash-logger")
self.logger.setLevel(logging.DEBUG) self.logger.setLevel(logging.DEBUG)
self.handler = logstash.TCPLogstashHandler( self.handler = logstash.TCPLogstashHandler(
self.ls_server, self.ls_server, self.ls_port, version=1, message_type=self.ls_type
self.ls_port,
version=1,
message_type=self.ls_type
) )
self.logger.addHandler(self.handler) self.logger.addHandler(self.handler)
@@ -147,42 +144,36 @@ class CallbackModule(CallbackBase):
self.session = str(uuid.uuid4()) self.session = str(uuid.uuid4())
self.errors = 0 self.errors = 0
self.base_data = { self.base_data = {"session": self.session, "host": self.hostname}
'session': self.session,
'host': self.hostname
}
if self.ls_pre_command is not None: if self.ls_pre_command is not None:
self.base_data['ansible_pre_command_output'] = os.popen( self.base_data["ansible_pre_command_output"] = os.popen(self.ls_pre_command).read()
self.ls_pre_command).read()
if context.CLIARGS is not None: if context.CLIARGS is not None:
self.base_data['ansible_checkmode'] = context.CLIARGS.get('check') self.base_data["ansible_checkmode"] = context.CLIARGS.get("check")
self.base_data['ansible_tags'] = context.CLIARGS.get('tags') self.base_data["ansible_tags"] = context.CLIARGS.get("tags")
self.base_data['ansible_skip_tags'] = context.CLIARGS.get('skip_tags') self.base_data["ansible_skip_tags"] = context.CLIARGS.get("skip_tags")
self.base_data['inventory'] = context.CLIARGS.get('inventory') self.base_data["inventory"] = context.CLIARGS.get("inventory")
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.ls_server = self.get_option('server') self.ls_server = self.get_option("server")
self.ls_port = int(self.get_option('port')) self.ls_port = int(self.get_option("port"))
self.ls_type = self.get_option('type') self.ls_type = self.get_option("type")
self.ls_pre_command = self.get_option('pre_command') self.ls_pre_command = self.get_option("pre_command")
self.ls_format_version = self.get_option('format_version') self.ls_format_version = self.get_option("format_version")
self._init_plugin() self._init_plugin()
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "start" data["ansible_type"] = "start"
data['status'] = "OK" data["status"] = "OK"
data['ansible_playbook'] = playbook._file_name data["ansible_playbook"] = playbook._file_name
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info( self.logger.info("START PLAYBOOK | %s", data["ansible_playbook"], extra=data)
"START PLAYBOOK | %s", data['ansible_playbook'], extra=data
)
else: else:
self.logger.info("ansible start", extra=data) self.logger.info("ansible start", extra=data)
@@ -199,15 +190,13 @@ class CallbackModule(CallbackBase):
status = "FAILED" status = "FAILED"
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "finish" data["ansible_type"] = "finish"
data['status'] = status data["status"] = status
data['ansible_playbook_duration'] = runtime.total_seconds() data["ansible_playbook_duration"] = runtime.total_seconds()
data['ansible_result'] = json.dumps(summarize_stat) # deprecated field data["ansible_result"] = json.dumps(summarize_stat) # deprecated field
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info( self.logger.info("FINISH PLAYBOOK | %s", json.dumps(summarize_stat), extra=data)
"FINISH PLAYBOOK | %s", json.dumps(summarize_stat), extra=data
)
else: else:
self.logger.info("ansible stats", extra=data) self.logger.info("ansible stats", extra=data)
@@ -218,10 +207,10 @@ class CallbackModule(CallbackBase):
self.play_name = play.name self.play_name = play.name
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "start" data["ansible_type"] = "start"
data['status'] = "OK" data["status"] = "OK"
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info("START PLAY | %s", self.play_name, extra=data) self.logger.info("START PLAY | %s", self.play_name, extra=data)
@@ -231,64 +220,61 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
self.task_id = str(task._uuid) self.task_id = str(task._uuid)
''' """
Tasks and handler tasks are dealt with here Tasks and handler tasks are dealt with here
''' """
def v2_runner_on_ok(self, result, **kwargs): def v2_runner_on_ok(self, result, **kwargs):
task_name = str(result._task).replace('TASK: ', '').replace('HANDLER: ', '') task_name = str(result._task).replace("TASK: ", "").replace("HANDLER: ", "")
data = self.base_data.copy() data = self.base_data.copy()
if task_name == 'setup': if task_name == "setup":
data['ansible_type'] = "setup" data["ansible_type"] = "setup"
data['status'] = "OK" data["status"] = "OK"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_facts'] = self._dump_results(result._result) data["ansible_facts"] = self._dump_results(result._result)
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info( self.logger.info("SETUP FACTS | %s", self._dump_results(result._result), extra=data)
"SETUP FACTS | %s", self._dump_results(result._result), extra=data
)
else: else:
self.logger.info("ansible facts", extra=data) self.logger.info("ansible facts", extra=data)
else: else:
if 'changed' in result._result.keys(): if "changed" in result._result.keys():
data['ansible_changed'] = result._result['changed'] data["ansible_changed"] = result._result["changed"]
else: else:
data['ansible_changed'] = False data["ansible_changed"] = False
data['ansible_type'] = "task" data["ansible_type"] = "task"
data['status'] = "OK" data["status"] = "OK"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_task_id'] = self.task_id data["ansible_task_id"] = self.task_id
data['ansible_result'] = self._dump_results(result._result) data["ansible_result"] = self._dump_results(result._result)
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info( self.logger.info(
"TASK OK | %s | RESULT | %s", "TASK OK | %s | RESULT | %s", task_name, self._dump_results(result._result), extra=data
task_name, self._dump_results(result._result), extra=data
) )
else: else:
self.logger.info("ansible ok", extra=data) self.logger.info("ansible ok", extra=data)
def v2_runner_on_skipped(self, result, **kwargs): def v2_runner_on_skipped(self, result, **kwargs):
task_name = str(result._task).replace('TASK: ', '').replace('HANDLER: ', '') task_name = str(result._task).replace("TASK: ", "").replace("HANDLER: ", "")
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "task" data["ansible_type"] = "task"
data['status'] = "SKIPPED" data["status"] = "SKIPPED"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_task_id'] = self.task_id data["ansible_task_id"] = self.task_id
data['ansible_result'] = self._dump_results(result._result) data["ansible_result"] = self._dump_results(result._result)
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info("TASK SKIPPED | %s", task_name, extra=data) self.logger.info("TASK SKIPPED | %s", task_name, extra=data)
@@ -297,12 +283,12 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_import_for_host(self, result, imported_file): def v2_playbook_on_import_for_host(self, result, imported_file):
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "import" data["ansible_type"] = "import"
data['status'] = "IMPORTED" data["status"] = "IMPORTED"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['imported_file'] = imported_file data["imported_file"] = imported_file
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info("IMPORT | %s", imported_file, extra=data) self.logger.info("IMPORT | %s", imported_file, extra=data)
@@ -311,12 +297,12 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_not_import_for_host(self, result, missing_file): def v2_playbook_on_not_import_for_host(self, result, missing_file):
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "import" data["ansible_type"] = "import"
data['status'] = "NOT IMPORTED" data["status"] = "NOT IMPORTED"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['imported_file'] = missing_file data["imported_file"] = missing_file
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.info("NOT IMPORTED | %s", missing_file, extra=data) self.logger.info("NOT IMPORTED | %s", missing_file, extra=data)
@@ -324,75 +310,81 @@ class CallbackModule(CallbackBase):
self.logger.info("ansible import", extra=data) self.logger.info("ansible import", extra=data)
def v2_runner_on_failed(self, result, **kwargs): def v2_runner_on_failed(self, result, **kwargs):
task_name = str(result._task).replace('TASK: ', '').replace('HANDLER: ', '') task_name = str(result._task).replace("TASK: ", "").replace("HANDLER: ", "")
data = self.base_data.copy() data = self.base_data.copy()
if 'changed' in result._result.keys(): if "changed" in result._result.keys():
data['ansible_changed'] = result._result['changed'] data["ansible_changed"] = result._result["changed"]
else: else:
data['ansible_changed'] = False data["ansible_changed"] = False
data['ansible_type'] = "task" data["ansible_type"] = "task"
data['status'] = "FAILED" data["status"] = "FAILED"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_task_id'] = self.task_id data["ansible_task_id"] = self.task_id
data['ansible_result'] = self._dump_results(result._result) data["ansible_result"] = self._dump_results(result._result)
self.errors += 1 self.errors += 1
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.error( self.logger.error(
"TASK FAILED | %s | HOST | %s | RESULT | %s", "TASK FAILED | %s | HOST | %s | RESULT | %s",
task_name, self.hostname, task_name,
self._dump_results(result._result), extra=data self.hostname,
self._dump_results(result._result),
extra=data,
) )
else: else:
self.logger.error("ansible failed", extra=data) self.logger.error("ansible failed", extra=data)
def v2_runner_on_unreachable(self, result, **kwargs): def v2_runner_on_unreachable(self, result, **kwargs):
task_name = str(result._task).replace('TASK: ', '').replace('HANDLER: ', '') task_name = str(result._task).replace("TASK: ", "").replace("HANDLER: ", "")
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "task" data["ansible_type"] = "task"
data['status'] = "UNREACHABLE" data["status"] = "UNREACHABLE"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_task_id'] = self.task_id data["ansible_task_id"] = self.task_id
data['ansible_result'] = self._dump_results(result._result) data["ansible_result"] = self._dump_results(result._result)
self.errors += 1 self.errors += 1
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.error( self.logger.error(
"UNREACHABLE | %s | HOST | %s | RESULT | %s", "UNREACHABLE | %s | HOST | %s | RESULT | %s",
task_name, self.hostname, task_name,
self._dump_results(result._result), extra=data self.hostname,
self._dump_results(result._result),
extra=data,
) )
else: else:
self.logger.error("ansible unreachable", extra=data) self.logger.error("ansible unreachable", extra=data)
def v2_runner_on_async_failed(self, result, **kwargs): def v2_runner_on_async_failed(self, result, **kwargs):
task_name = str(result._task).replace('TASK: ', '').replace('HANDLER: ', '') task_name = str(result._task).replace("TASK: ", "").replace("HANDLER: ", "")
data = self.base_data.copy() data = self.base_data.copy()
data['ansible_type'] = "task" data["ansible_type"] = "task"
data['status'] = "FAILED" data["status"] = "FAILED"
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_play_id'] = self.play_id data["ansible_play_id"] = self.play_id
data['ansible_play_name'] = self.play_name data["ansible_play_name"] = self.play_name
data['ansible_task'] = task_name data["ansible_task"] = task_name
data['ansible_task_id'] = self.task_id data["ansible_task_id"] = self.task_id
data['ansible_result'] = self._dump_results(result._result) data["ansible_result"] = self._dump_results(result._result)
self.errors += 1 self.errors += 1
if self.ls_format_version == "v2": if self.ls_format_version == "v2":
self.logger.error( self.logger.error(
"ASYNC FAILED | %s | HOST | %s | RESULT | %s", "ASYNC FAILED | %s | HOST | %s | RESULT | %s",
task_name, self.hostname, task_name,
self._dump_results(result._result), extra=data self.hostname,
self._dump_results(result._result),
extra=data,
) )
else: else:
self.logger.error("ansible async", extra=data) self.logger.error("ansible async", extra=data)

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2012, Dag Wieers <dag@wieers.com> # Copyright (c) 2012, Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -92,33 +91,33 @@ from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
''' This Ansible callback plugin mails errors to interested parties. ''' """This Ansible callback plugin mails errors to interested parties."""
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.mail' CALLBACK_NAME = "community.general.mail"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
super().__init__(display=display) super().__init__(display=display)
self.sender = None self.sender = None
self.to = 'root' self.to = "root"
self.smtphost = os.getenv('SMTPHOST', 'localhost') self.smtphost = os.getenv("SMTPHOST", "localhost")
self.smtpport = 25 self.smtpport = 25
self.cc = None self.cc = None
self.bcc = None self.bcc = None
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.sender = self.get_option('sender') self.sender = self.get_option("sender")
self.to = self.get_option('to') self.to = self.get_option("to")
self.smtphost = self.get_option('mta') self.smtphost = self.get_option("mta")
self.smtpport = self.get_option('mtaport') self.smtpport = self.get_option("mtaport")
self.cc = self.get_option('cc') self.cc = self.get_option("cc")
self.bcc = self.get_option('bcc') self.bcc = self.get_option("bcc")
def mail(self, subject='Ansible error mail', body=None): def mail(self, subject="Ansible error mail", body=None):
if body is None: if body is None:
body = subject body = subject
@@ -132,14 +131,14 @@ class CallbackModule(CallbackBase):
if self.bcc: if self.bcc:
bcc_addresses = email.utils.getaddresses(self.bcc) bcc_addresses = email.utils.getaddresses(self.bcc)
content = f'Date: {email.utils.formatdate()}\n' content = f"Date: {email.utils.formatdate()}\n"
content += f'From: {email.utils.formataddr(sender_address)}\n' content += f"From: {email.utils.formataddr(sender_address)}\n"
if self.to: if self.to:
content += f"To: {', '.join([email.utils.formataddr(pair) for pair in to_addresses])}\n" content += f"To: {', '.join([email.utils.formataddr(pair) for pair in to_addresses])}\n"
if self.cc: if self.cc:
content += f"Cc: {', '.join([email.utils.formataddr(pair) for pair in cc_addresses])}\n" content += f"Cc: {', '.join([email.utils.formataddr(pair) for pair in cc_addresses])}\n"
content += f"Message-ID: {email.utils.make_msgid(domain=self.get_option('message_id_domain'))}\n" content += f"Message-ID: {email.utils.make_msgid(domain=self.get_option('message_id_domain'))}\n"
content += f'Subject: {subject.strip()}\n\n' content += f"Subject: {subject.strip()}\n\n"
content += body content += body
addresses = to_addresses addresses = to_addresses
@@ -149,23 +148,23 @@ class CallbackModule(CallbackBase):
addresses += bcc_addresses addresses += bcc_addresses
if not addresses: if not addresses:
self._display.warning('No receiver has been specified for the mail callback plugin.') self._display.warning("No receiver has been specified for the mail callback plugin.")
smtp.sendmail(self.sender, [address for name, address in addresses], to_bytes(content)) smtp.sendmail(self.sender, [address for name, address in addresses], to_bytes(content))
smtp.quit() smtp.quit()
def subject_msg(self, multiline, failtype, linenr): def subject_msg(self, multiline, failtype, linenr):
msg = multiline.strip('\r\n').splitlines()[linenr] msg = multiline.strip("\r\n").splitlines()[linenr]
return f'{failtype}: {msg}' return f"{failtype}: {msg}"
def indent(self, multiline, indent=8): def indent(self, multiline, indent=8):
return re.sub('^', ' ' * indent, multiline, flags=re.MULTILINE) return re.sub("^", " " * indent, multiline, flags=re.MULTILINE)
def body_blob(self, multiline, texttype): def body_blob(self, multiline, texttype):
''' Turn some text output in a well-indented block for sending in a mail body ''' """Turn some text output in a well-indented block for sending in a mail body"""
intro = f'with the following {texttype}:\n\n' intro = f"with the following {texttype}:\n\n"
blob = "\n".join(multiline.strip('\r\n').splitlines()) blob = "\n".join(multiline.strip("\r\n").splitlines())
return f"{intro}{self.indent(blob)}\n" return f"{intro}{self.indent(blob)}\n"
def mail_result(self, result, failtype): def mail_result(self, result, failtype):
@@ -176,83 +175,87 @@ class CallbackModule(CallbackBase):
# Add subject # Add subject
if self.itembody: if self.itembody:
subject = self.itemsubject subject = self.itemsubject
elif result._result.get('failed_when_result') is True: elif result._result.get("failed_when_result") is True:
subject = "Failed due to 'failed_when' condition" subject = "Failed due to 'failed_when' condition"
elif result._result.get('msg'): elif result._result.get("msg"):
subject = self.subject_msg(result._result['msg'], failtype, 0) subject = self.subject_msg(result._result["msg"], failtype, 0)
elif result._result.get('stderr'): elif result._result.get("stderr"):
subject = self.subject_msg(result._result['stderr'], failtype, -1) subject = self.subject_msg(result._result["stderr"], failtype, -1)
elif result._result.get('stdout'): elif result._result.get("stdout"):
subject = self.subject_msg(result._result['stdout'], failtype, -1) subject = self.subject_msg(result._result["stdout"], failtype, -1)
elif result._result.get('exception'): # Unrelated exceptions are added to output :-/ elif result._result.get("exception"): # Unrelated exceptions are added to output :-/
subject = self.subject_msg(result._result['exception'], failtype, -1) subject = self.subject_msg(result._result["exception"], failtype, -1)
else: else:
subject = f'{failtype}: {result._task.name or result._task.action}' subject = f"{failtype}: {result._task.name or result._task.action}"
# Make playbook name visible (e.g. in Outlook/Gmail condensed view) # Make playbook name visible (e.g. in Outlook/Gmail condensed view)
body = f'Playbook: {os.path.basename(self.playbook._file_name)}\n' body = f"Playbook: {os.path.basename(self.playbook._file_name)}\n"
if result._task.name: if result._task.name:
body += f'Task: {result._task.name}\n' body += f"Task: {result._task.name}\n"
body += f'Module: {result._task.action}\n' body += f"Module: {result._task.action}\n"
body += f'Host: {host}\n' body += f"Host: {host}\n"
body += '\n' body += "\n"
# Add task information (as much as possible) # Add task information (as much as possible)
body += 'The following task failed:\n\n' body += "The following task failed:\n\n"
if 'invocation' in result._result: if "invocation" in result._result:
body += self.indent(f"{result._task.action}: {json.dumps(result._result['invocation']['module_args'], indent=4)}\n") body += self.indent(
f"{result._task.action}: {json.dumps(result._result['invocation']['module_args'], indent=4)}\n"
)
elif result._task.name: elif result._task.name:
body += self.indent(f'{result._task.name} ({result._task.action})\n') body += self.indent(f"{result._task.name} ({result._task.action})\n")
else: else:
body += self.indent(f'{result._task.action}\n') body += self.indent(f"{result._task.action}\n")
body += '\n' body += "\n"
# Add item / message # Add item / message
if self.itembody: if self.itembody:
body += self.itembody body += self.itembody
elif result._result.get('failed_when_result') is True: elif result._result.get("failed_when_result") is True:
fail_cond_list = '\n- '.join(result._task.failed_when) fail_cond_list = "\n- ".join(result._task.failed_when)
fail_cond = self.indent(f"failed_when:\n- {fail_cond_list}") fail_cond = self.indent(f"failed_when:\n- {fail_cond_list}")
body += f"due to the following condition:\n\n{fail_cond}\n\n" body += f"due to the following condition:\n\n{fail_cond}\n\n"
elif result._result.get('msg'): elif result._result.get("msg"):
body += self.body_blob(result._result['msg'], 'message') body += self.body_blob(result._result["msg"], "message")
# Add stdout / stderr / exception / warnings / deprecations # Add stdout / stderr / exception / warnings / deprecations
if result._result.get('stdout'): if result._result.get("stdout"):
body += self.body_blob(result._result['stdout'], 'standard output') body += self.body_blob(result._result["stdout"], "standard output")
if result._result.get('stderr'): if result._result.get("stderr"):
body += self.body_blob(result._result['stderr'], 'error output') body += self.body_blob(result._result["stderr"], "error output")
if result._result.get('exception'): # Unrelated exceptions are added to output :-/ if result._result.get("exception"): # Unrelated exceptions are added to output :-/
body += self.body_blob(result._result['exception'], 'exception') body += self.body_blob(result._result["exception"], "exception")
if result._result.get('warnings'): if result._result.get("warnings"):
for i in range(len(result._result.get('warnings'))): for i in range(len(result._result.get("warnings"))):
body += self.body_blob(result._result['warnings'][i], f'exception {i + 1}') body += self.body_blob(result._result["warnings"][i], f"exception {i + 1}")
if result._result.get('deprecations'): if result._result.get("deprecations"):
for i in range(len(result._result.get('deprecations'))): for i in range(len(result._result.get("deprecations"))):
body += self.body_blob(result._result['deprecations'][i], f'exception {i + 1}') body += self.body_blob(result._result["deprecations"][i], f"exception {i + 1}")
body += 'and a complete dump of the error:\n\n' body += "and a complete dump of the error:\n\n"
body += self.indent(f'{failtype}: {json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4)}') body += self.indent(f"{failtype}: {json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4)}")
self.mail(subject=subject, body=body) self.mail(subject=subject, body=body)
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.playbook = playbook self.playbook = playbook
self.itembody = '' self.itembody = ""
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
if ignore_errors: if ignore_errors:
return return
self.mail_result(result, 'Failed') self.mail_result(result, "Failed")
def v2_runner_on_unreachable(self, result): def v2_runner_on_unreachable(self, result):
self.mail_result(result, 'Unreachable') self.mail_result(result, "Unreachable")
def v2_runner_on_async_failed(self, result): def v2_runner_on_async_failed(self, result):
self.mail_result(result, 'Async failure') self.mail_result(result, "Async failure")
def v2_runner_item_on_failed(self, result): def v2_runner_item_on_failed(self, result):
# Pass item information to task failure # Pass item information to task failure
self.itemsubject = result._result['msg'] self.itemsubject = result._result["msg"]
self.itembody += self.body_blob(json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4), f"failed item dump '{result._result['item']}'") self.itembody += self.body_blob(
json.dumps(result._result, cls=AnsibleJSONEncoder, indent=4), f"failed item dump '{result._result['item']}'"
)

View File

@@ -73,13 +73,13 @@ from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
''' """
send ansible-playbook to Nagios server using nrdp protocol send ansible-playbook to Nagios server using nrdp protocol
''' """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.nrdp' CALLBACK_NAME = "community.general.nrdp"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
# Nagios states # Nagios states
@@ -98,25 +98,26 @@ class CallbackModule(CallbackBase):
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.url = self.get_option('url') self.url = self.get_option("url")
if not self.url.endswith('/'): if not self.url.endswith("/"):
self.url += '/' self.url += "/"
self.token = self.get_option('token') self.token = self.get_option("token")
self.hostname = self.get_option('hostname') self.hostname = self.get_option("hostname")
self.servicename = self.get_option('servicename') self.servicename = self.get_option("servicename")
self.validate_nrdp_certs = self.get_option('validate_certs') self.validate_nrdp_certs = self.get_option("validate_certs")
if (self.url or self.token or self.hostname or if (self.url or self.token or self.hostname or self.servicename) is None:
self.servicename) is None: self._display.warning(
self._display.warning("NRDP callback wants the NRDP_URL," "NRDP callback wants the NRDP_URL,"
" NRDP_TOKEN, NRDP_HOSTNAME," " NRDP_TOKEN, NRDP_HOSTNAME,"
" NRDP_SERVICENAME" " NRDP_SERVICENAME"
" environment variables'." " environment variables'."
" The NRDP callback plugin is disabled.") " The NRDP callback plugin is disabled."
)
self.disabled = True self.disabled = True
def _send_nrdp(self, state, msg): def _send_nrdp(self, state, msg):
''' """
nrpd service check send XMLDATA like this: nrpd service check send XMLDATA like this:
<?xml version='1.0'?> <?xml version='1.0'?>
<checkresults> <checkresults>
@@ -127,7 +128,7 @@ class CallbackModule(CallbackBase):
<output>WARNING: Danger Will Robinson!|perfdata</output> <output>WARNING: Danger Will Robinson!|perfdata</output>
</checkresult> </checkresult>
</checkresults> </checkresults>
''' """
xmldata = "<?xml version='1.0'?>\n" xmldata = "<?xml version='1.0'?>\n"
xmldata += "<checkresults>\n" xmldata += "<checkresults>\n"
xmldata += "<checkresult type='service'>\n" xmldata += "<checkresult type='service'>\n"
@@ -138,31 +139,24 @@ class CallbackModule(CallbackBase):
xmldata += "</checkresult>\n" xmldata += "</checkresult>\n"
xmldata += "</checkresults>\n" xmldata += "</checkresults>\n"
body = { body = {"cmd": "submitcheck", "token": self.token, "XMLDATA": to_bytes(xmldata)}
'cmd': 'submitcheck',
'token': self.token,
'XMLDATA': to_bytes(xmldata)
}
try: try:
response = open_url(self.url, response = open_url(self.url, data=urlencode(body), method="POST", validate_certs=self.validate_nrdp_certs)
data=urlencode(body),
method='POST',
validate_certs=self.validate_nrdp_certs)
return response.read() return response.read()
except Exception as ex: except Exception as ex:
self._display.warning(f"NRDP callback cannot send result {ex}") self._display.warning(f"NRDP callback cannot send result {ex}")
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
''' """
Display Playbook and play start messages Display Playbook and play start messages
''' """
self.play = play self.play = play
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
''' """
Display info about playbook statistics Display info about playbook statistics
''' """
name = self.play name = self.play
gstats = "" gstats = ""
hosts = sorted(stats.processed.keys()) hosts = sorted(stats.processed.keys())
@@ -170,13 +164,14 @@ class CallbackModule(CallbackBase):
for host in hosts: for host in hosts:
stat = stats.summarize(host) stat = stats.summarize(host)
gstats += ( gstats += (
f"'{host}_ok'={stat['ok']} '{host}_changed'={stat['changed']} '{host}_unreachable'={stat['unreachable']} '{host}_failed'={stat['failures']} " f"'{host}_ok'={stat['ok']} '{host}_changed'={stat['changed']}"
f" '{host}_unreachable'={stat['unreachable']} '{host}_failed'={stat['failures']} "
) )
# Critical when failed tasks or unreachable host # Critical when failed tasks or unreachable host
critical += stat['failures'] critical += stat["failures"]
critical += stat['unreachable'] critical += stat["unreachable"]
# Warning when changed tasks # Warning when changed tasks
warning += stat['changed'] warning += stat["changed"]
msg = f"{name} | {gstats}" msg = f"{name} | {gstats}"
if critical: if critical:

View File

@@ -20,11 +20,10 @@ from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
"""
'''
This callback won't print messages to stdout when new callback events are received. This callback won't print messages to stdout when new callback events are received.
''' """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.null' CALLBACK_NAME = "community.general.null"

View File

@@ -155,13 +155,8 @@ try:
from opentelemetry.trace.status import Status, StatusCode from opentelemetry.trace.status import Status, StatusCode
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import ( from opentelemetry.sdk.trace.export import BatchSpanProcessor, SimpleSpanProcessor
BatchSpanProcessor, from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
SimpleSpanProcessor
)
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
InMemorySpanExporter
)
except ImportError as imp_exc: except ImportError as imp_exc:
OTEL_LIBRARY_IMPORT_ERROR = imp_exc OTEL_LIBRARY_IMPORT_ERROR = imp_exc
else: else:
@@ -186,9 +181,9 @@ class TaskData:
def add_host(self, host): def add_host(self, host):
if host.uuid in self.host_data: if host.uuid in self.host_data:
if host.status == 'included': if host.status == "included":
# concatenate task include output from multiple items # concatenate task include output from multiple items
host.result = f'{self.host_data[host.uuid].result}\n{host.result}' host.result = f"{self.host_data[host.uuid].result}\n{host.result}"
else: else:
return return
@@ -223,11 +218,11 @@ class OpenTelemetrySource:
def traceparent_context(self, traceparent): def traceparent_context(self, traceparent):
carrier = dict() carrier = dict()
carrier['traceparent'] = traceparent carrier["traceparent"] = traceparent
return TraceContextTextMapPropagator().extract(carrier=carrier) return TraceContextTextMapPropagator().extract(carrier=carrier)
def start_task(self, tasks_data, hide_task_arguments, play_name, task): def start_task(self, tasks_data, hide_task_arguments, play_name, task):
""" record the start of a task for one or more hosts """ """record the start of a task for one or more hosts"""
uuid = task._uuid uuid = task._uuid
@@ -245,33 +240,35 @@ class OpenTelemetrySource:
tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args) tasks_data[uuid] = TaskData(uuid, name, path, play_name, action, args)
def finish_task(self, tasks_data, status, result, dump): def finish_task(self, tasks_data, status, result, dump):
""" record the results of a task for a single host """ """record the results of a task for a single host"""
task_uuid = result._task._uuid task_uuid = result._task._uuid
if hasattr(result, '_host') and result._host is not None: if hasattr(result, "_host") and result._host is not None:
host_uuid = result._host._uuid host_uuid = result._host._uuid
host_name = result._host.name host_name = result._host.name
else: else:
host_uuid = 'include' host_uuid = "include"
host_name = 'include' host_name = "include"
task = tasks_data[task_uuid] task = tasks_data[task_uuid]
task.dump = dump task.dump = dump
task.add_host(HostData(host_uuid, host_name, status, result)) task.add_host(HostData(host_uuid, host_name, status, result))
def generate_distributed_traces(self, def generate_distributed_traces(
otel_service_name, self,
ansible_playbook, otel_service_name,
tasks_data, ansible_playbook,
status, tasks_data,
traceparent, status,
disable_logs, traceparent,
disable_attributes_in_logs, disable_logs,
otel_exporter_otlp_traces_protocol, disable_attributes_in_logs,
store_spans_in_file): otel_exporter_otlp_traces_protocol,
""" generate distributed traces from the collected TaskData and HostData """ store_spans_in_file,
):
"""generate distributed traces from the collected TaskData and HostData"""
tasks = [] tasks = []
parent_start_time = None parent_start_time = None
@@ -280,18 +277,14 @@ class OpenTelemetrySource:
parent_start_time = task.start parent_start_time = task.start
tasks.append(task) tasks.append(task)
trace.set_tracer_provider( trace.set_tracer_provider(TracerProvider(resource=Resource.create({SERVICE_NAME: otel_service_name})))
TracerProvider(
resource=Resource.create({SERVICE_NAME: otel_service_name})
)
)
otel_exporter = None otel_exporter = None
if store_spans_in_file: if store_spans_in_file:
otel_exporter = InMemorySpanExporter() otel_exporter = InMemorySpanExporter()
processor = SimpleSpanProcessor(otel_exporter) processor = SimpleSpanProcessor(otel_exporter)
else: else:
if otel_exporter_otlp_traces_protocol == 'grpc': if otel_exporter_otlp_traces_protocol == "grpc":
otel_exporter = GRPCOTLPSpanExporter() otel_exporter = GRPCOTLPSpanExporter()
else: else:
otel_exporter = HTTPOTLPSpanExporter() otel_exporter = HTTPOTLPSpanExporter()
@@ -301,8 +294,12 @@ class OpenTelemetrySource:
tracer = trace.get_tracer(__name__) tracer = trace.get_tracer(__name__)
with tracer.start_as_current_span(ansible_playbook, context=self.traceparent_context(traceparent), with tracer.start_as_current_span(
start_time=parent_start_time, kind=SpanKind.SERVER) as parent: ansible_playbook,
context=self.traceparent_context(traceparent),
start_time=parent_start_time,
kind=SpanKind.SERVER,
) as parent:
parent.set_status(status) parent.set_status(status)
# Populate trace metadata attributes # Populate trace metadata attributes
parent.set_attribute("ansible.version", ansible_version) parent.set_attribute("ansible.version", ansible_version)
@@ -319,36 +316,38 @@ class OpenTelemetrySource:
return otel_exporter return otel_exporter
def update_span_data(self, task_data, host_data, span, disable_logs, disable_attributes_in_logs): def update_span_data(self, task_data, host_data, span, disable_logs, disable_attributes_in_logs):
""" update the span with the given TaskData and HostData """ """update the span with the given TaskData and HostData"""
name = f'[{host_data.name}] {task_data.play}: {task_data.name}' name = f"[{host_data.name}] {task_data.play}: {task_data.name}"
message = 'success' message = "success"
res = {} res = {}
rc = 0 rc = 0
status = Status(status_code=StatusCode.OK) status = Status(status_code=StatusCode.OK)
if host_data.status != 'included': if host_data.status != "included":
# Support loops # Support loops
enriched_error_message = None enriched_error_message = None
if 'results' in host_data.result._result: if "results" in host_data.result._result:
if host_data.status == 'failed': if host_data.status == "failed":
message = self.get_error_message_from_results(host_data.result._result['results'], task_data.action) message = self.get_error_message_from_results(host_data.result._result["results"], task_data.action)
enriched_error_message = self.enrich_error_message_from_results(host_data.result._result['results'], task_data.action) enriched_error_message = self.enrich_error_message_from_results(
host_data.result._result["results"], task_data.action
)
else: else:
res = host_data.result._result res = host_data.result._result
rc = res.get('rc', 0) rc = res.get("rc", 0)
if host_data.status == 'failed': if host_data.status == "failed":
message = self.get_error_message(res) message = self.get_error_message(res)
enriched_error_message = self.enrich_error_message(res) enriched_error_message = self.enrich_error_message(res)
if host_data.status == 'failed': if host_data.status == "failed":
status = Status(status_code=StatusCode.ERROR, description=message) status = Status(status_code=StatusCode.ERROR, description=message)
# Record an exception with the task message # Record an exception with the task message
span.record_exception(BaseException(enriched_error_message)) span.record_exception(BaseException(enriched_error_message))
elif host_data.status == 'skipped': elif host_data.status == "skipped":
message = res['skip_reason'] if 'skip_reason' in res else 'skipped' message = res["skip_reason"] if "skip_reason" in res else "skipped"
status = Status(status_code=StatusCode.UNSET) status = Status(status_code=StatusCode.UNSET)
elif host_data.status == 'ignored': elif host_data.status == "ignored":
status = Status(status_code=StatusCode.UNSET) status = Status(status_code=StatusCode.UNSET)
span.set_status(status) span.set_status(status)
@@ -360,7 +359,7 @@ class OpenTelemetrySource:
"ansible.task.name": name, "ansible.task.name": name,
"ansible.task.result": rc, "ansible.task.result": rc,
"ansible.task.host.name": host_data.name, "ansible.task.host.name": host_data.name,
"ansible.task.host.status": host_data.status "ansible.task.host.status": host_data.status,
} }
if isinstance(task_data.args, dict) and "gather_facts" not in task_data.action: if isinstance(task_data.args, dict) and "gather_facts" not in task_data.action:
names = tuple(self.transform_ansible_unicode_to_str(k) for k in task_data.args.keys()) names = tuple(self.transform_ansible_unicode_to_str(k) for k in task_data.args.keys())
@@ -380,10 +379,10 @@ class OpenTelemetrySource:
span.end(end_time=host_data.finish) span.end(end_time=host_data.finish)
def set_span_attributes(self, span, attributes): def set_span_attributes(self, span, attributes):
""" update the span attributes with the given attributes if not None """ """update the span attributes with the given attributes if not None"""
if span is None and self._display is not None: if span is None and self._display is not None:
self._display.warning('span object is None. Please double check if that is expected.') self._display.warning("span object is None. Please double check if that is expected.")
else: else:
if attributes is not None: if attributes is not None:
span.set_attributes(attributes) span.set_attributes(attributes)
@@ -411,7 +410,18 @@ class OpenTelemetrySource:
@staticmethod @staticmethod
def url_from_args(args): def url_from_args(args):
# the order matters # the order matters
url_args = ("url", "api_url", "baseurl", "repo", "server_url", "chart_repo_url", "registry_url", "endpoint", "uri", "updates_url") url_args = (
"url",
"api_url",
"baseurl",
"repo",
"server_url",
"chart_repo_url",
"registry_url",
"endpoint",
"uri",
"updates_url",
)
for arg in url_args: for arg in url_args:
if args is not None and args.get(arg): if args is not None and args.get(arg):
return args.get(arg) return args.get(arg)
@@ -436,33 +446,33 @@ class OpenTelemetrySource:
@staticmethod @staticmethod
def get_error_message(result): def get_error_message(result):
if result.get('exception') is not None: if result.get("exception") is not None:
return OpenTelemetrySource._last_line(result['exception']) return OpenTelemetrySource._last_line(result["exception"])
return result.get('msg', 'failed') return result.get("msg", "failed")
@staticmethod @staticmethod
def get_error_message_from_results(results, action): def get_error_message_from_results(results, action):
for result in results: for result in results:
if result.get('failed', False): if result.get("failed", False):
return f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.get_error_message(result)}" return f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.get_error_message(result)}"
@staticmethod @staticmethod
def _last_line(text): def _last_line(text):
lines = text.strip().split('\n') lines = text.strip().split("\n")
return lines[-1] return lines[-1]
@staticmethod @staticmethod
def enrich_error_message(result): def enrich_error_message(result):
message = result.get('msg', 'failed') message = result.get("msg", "failed")
exception = result.get('exception') exception = result.get("exception")
stderr = result.get('stderr') stderr = result.get("stderr")
return f"message: \"{message}\"\nexception: \"{exception}\"\nstderr: \"{stderr}\"" return f'message: "{message}"\nexception: "{exception}"\nstderr: "{stderr}"'
@staticmethod @staticmethod
def enrich_error_message_from_results(results, action): def enrich_error_message_from_results(results, action):
message = "" message = ""
for result in results: for result in results:
if result.get('failed', False): if result.get("failed", False):
message = f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.enrich_error_message(result)}\n{message}" message = f"{action}({result.get('item', 'none')}) - {OpenTelemetrySource.enrich_error_message(result)}\n{message}"
return message return message
@@ -473,8 +483,8 @@ class CallbackModule(CallbackBase):
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.opentelemetry' CALLBACK_NAME = "community.general.opentelemetry"
CALLBACK_NEEDS_ENABLED = True CALLBACK_NEEDS_ENABLED = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -494,7 +504,7 @@ class CallbackModule(CallbackBase):
if OTEL_LIBRARY_IMPORT_ERROR: if OTEL_LIBRARY_IMPORT_ERROR:
raise AnsibleError( raise AnsibleError(
'The `opentelemetry-api`, `opentelemetry-exporter-otlp` or `opentelemetry-sdk` must be installed to use this plugin' "The `opentelemetry-api`, `opentelemetry-exporter-otlp` or `opentelemetry-sdk` must be installed to use this plugin"
) from OTEL_LIBRARY_IMPORT_ERROR ) from OTEL_LIBRARY_IMPORT_ERROR
self.tasks_data = OrderedDict() self.tasks_data = OrderedDict()
@@ -504,33 +514,33 @@ class CallbackModule(CallbackBase):
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
environment_variable = self.get_option('enable_from_environment') environment_variable = self.get_option("enable_from_environment")
if environment_variable is not None and os.environ.get(environment_variable, 'false').lower() != 'true': if environment_variable is not None and os.environ.get(environment_variable, "false").lower() != "true":
self.disabled = True self.disabled = True
self._display.warning( self._display.warning(
f"The `enable_from_environment` option has been set and {environment_variable} is not enabled. Disabling the `opentelemetry` callback plugin." f"The `enable_from_environment` option has been set and {environment_variable} is not enabled. Disabling the `opentelemetry` callback plugin."
) )
self.hide_task_arguments = self.get_option('hide_task_arguments') self.hide_task_arguments = self.get_option("hide_task_arguments")
self.disable_attributes_in_logs = self.get_option('disable_attributes_in_logs') self.disable_attributes_in_logs = self.get_option("disable_attributes_in_logs")
self.disable_logs = self.get_option('disable_logs') self.disable_logs = self.get_option("disable_logs")
self.store_spans_in_file = self.get_option('store_spans_in_file') self.store_spans_in_file = self.get_option("store_spans_in_file")
self.otel_service_name = self.get_option('otel_service_name') self.otel_service_name = self.get_option("otel_service_name")
if not self.otel_service_name: if not self.otel_service_name:
self.otel_service_name = 'ansible' self.otel_service_name = "ansible"
# See https://github.com/open-telemetry/opentelemetry-specification/issues/740 # See https://github.com/open-telemetry/opentelemetry-specification/issues/740
self.traceparent = self.get_option('traceparent') self.traceparent = self.get_option("traceparent")
self.otel_exporter_otlp_traces_protocol = self.get_option('otel_exporter_otlp_traces_protocol') self.otel_exporter_otlp_traces_protocol = self.get_option("otel_exporter_otlp_traces_protocol")
def dump_results(self, task, result): def dump_results(self, task, result):
""" dump the results if disable_logs is not enabled """ """dump the results if disable_logs is not enabled"""
if self.disable_logs: if self.disable_logs:
return "" return ""
# ansible.builtin.uri contains the response in the json field # ansible.builtin.uri contains the response in the json field
@@ -550,74 +560,40 @@ class CallbackModule(CallbackBase):
self.play_name = play.get_name() self.play_name = play.get_name()
def v2_runner_on_no_hosts(self, task): def v2_runner_on_no_hosts(self, task):
self.opentelemetry.start_task( self.opentelemetry.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
self.opentelemetry.start_task( self.opentelemetry.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_cleanup_task_start(self, task): def v2_playbook_on_cleanup_task_start(self, task):
self.opentelemetry.start_task( self.opentelemetry.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_playbook_on_handler_task_start(self, task): def v2_playbook_on_handler_task_start(self, task):
self.opentelemetry.start_task( self.opentelemetry.start_task(self.tasks_data, self.hide_task_arguments, self.play_name, task)
self.tasks_data,
self.hide_task_arguments,
self.play_name,
task
)
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
if ignore_errors: if ignore_errors:
status = 'ignored' status = "ignored"
else: else:
status = 'failed' status = "failed"
self.errors += 1 self.errors += 1
self.opentelemetry.finish_task( self.opentelemetry.finish_task(
self.tasks_data, self.tasks_data, status, result, self.dump_results(self.tasks_data[result._task._uuid], result)
status,
result,
self.dump_results(self.tasks_data[result._task._uuid], result)
) )
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
self.opentelemetry.finish_task( self.opentelemetry.finish_task(
self.tasks_data, self.tasks_data, "ok", result, self.dump_results(self.tasks_data[result._task._uuid], result)
'ok',
result,
self.dump_results(self.tasks_data[result._task._uuid], result)
) )
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
self.opentelemetry.finish_task( self.opentelemetry.finish_task(
self.tasks_data, self.tasks_data, "skipped", result, self.dump_results(self.tasks_data[result._task._uuid], result)
'skipped',
result,
self.dump_results(self.tasks_data[result._task._uuid], result)
) )
def v2_playbook_on_include(self, included_file): def v2_playbook_on_include(self, included_file):
self.opentelemetry.finish_task( self.opentelemetry.finish_task(self.tasks_data, "included", included_file, "")
self.tasks_data,
'included',
included_file,
""
)
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
if self.errors == 0: if self.errors == 0:
@@ -633,7 +609,7 @@ class CallbackModule(CallbackBase):
self.disable_logs, self.disable_logs,
self.disable_attributes_in_logs, self.disable_attributes_in_logs,
self.otel_exporter_otlp_traces_protocol, self.otel_exporter_otlp_traces_protocol,
self.store_spans_in_file self.store_spans_in_file,
) )
if self.store_spans_in_file: if self.store_spans_in_file:

View File

@@ -37,9 +37,10 @@ class CallbackModule(CallbackBase):
""" """
This callback module tells you how long your plays ran for. This callback module tells you how long your plays ran for.
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate' CALLBACK_TYPE = "aggregate"
CALLBACK_NAME = 'community.general.print_task' CALLBACK_NAME = "community.general.print_task"
CALLBACK_NEEDS_ENABLED = True CALLBACK_NEEDS_ENABLED = True
@@ -48,7 +49,7 @@ class CallbackModule(CallbackBase):
self._printed_message = False self._printed_message = False
def _print_task(self, task): def _print_task(self, task):
if hasattr(task, '_ds'): if hasattr(task, "_ds"):
task_snippet = load(str([task._ds.copy()]), Loader=SafeLoader) task_snippet = load(str([task._ds.copy()]), Loader=SafeLoader)
task_yaml = dump(task_snippet, sort_keys=False, Dumper=SafeDumper) task_yaml = dump(task_snippet, sort_keys=False, Dumper=SafeDumper)
self._display.display(f"\n{task_yaml}\n") self._display.display(f"\n{task_yaml}\n")

View File

@@ -30,13 +30,13 @@ class CallbackModule(CallbackBase):
""" """
makes Ansible much more exciting. makes Ansible much more exciting.
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.say' CALLBACK_NAME = "community.general.say"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self.FAILED_VOICE = None self.FAILED_VOICE = None
@@ -45,21 +45,23 @@ class CallbackModule(CallbackBase):
self.LASER_VOICE = None self.LASER_VOICE = None
try: try:
self.synthesizer = get_bin_path('say') self.synthesizer = get_bin_path("say")
if platform.system() != 'Darwin': if platform.system() != "Darwin":
# 'say' binary available, it might be GNUstep tool which doesn't support 'voice' parameter # 'say' binary available, it might be GNUstep tool which doesn't support 'voice' parameter
self._display.warning(f"'say' executable found but system is '{platform.system()}': ignoring voice parameter") self._display.warning(
f"'say' executable found but system is '{platform.system()}': ignoring voice parameter"
)
else: else:
self.FAILED_VOICE = 'Zarvox' self.FAILED_VOICE = "Zarvox"
self.REGULAR_VOICE = 'Trinoids' self.REGULAR_VOICE = "Trinoids"
self.HAPPY_VOICE = 'Cellos' self.HAPPY_VOICE = "Cellos"
self.LASER_VOICE = 'Princess' self.LASER_VOICE = "Princess"
except ValueError: except ValueError:
try: try:
self.synthesizer = get_bin_path('espeak') self.synthesizer = get_bin_path("espeak")
self.FAILED_VOICE = 'klatt' self.FAILED_VOICE = "klatt"
self.HAPPY_VOICE = 'f5' self.HAPPY_VOICE = "f5"
self.LASER_VOICE = 'whisper' self.LASER_VOICE = "whisper"
except ValueError: except ValueError:
self.synthesizer = None self.synthesizer = None
@@ -67,12 +69,14 @@ class CallbackModule(CallbackBase):
# ansible will not call any callback if disabled is set to True # ansible will not call any callback if disabled is set to True
if not self.synthesizer: if not self.synthesizer:
self.disabled = True self.disabled = True
self._display.warning(f"Unable to find either 'say' or 'espeak' executable, plugin {os.path.basename(__file__)} disabled") self._display.warning(
f"Unable to find either 'say' or 'espeak' executable, plugin {os.path.basename(__file__)} disabled"
)
def say(self, msg, voice): def say(self, msg, voice):
cmd = [self.synthesizer, msg] cmd = [self.synthesizer, msg]
if voice: if voice:
cmd.extend(('-v', voice)) cmd.extend(("-v", voice))
subprocess.call(cmd) subprocess.call(cmd)
def runner_on_failed(self, host, res, ignore_errors=False): def runner_on_failed(self, host, res, ignore_errors=False):

View File

@@ -45,14 +45,14 @@ from ansible.module_utils.common.text.converters import to_text
DONT_COLORIZE = False DONT_COLORIZE = False
COLORS = { COLORS = {
'normal': '\033[0m', "normal": "\033[0m",
'ok': f'\x1b[{C.COLOR_CODES[C.COLOR_OK]}m', # type: ignore "ok": f"\x1b[{C.COLOR_CODES[C.COLOR_OK]}m", # type: ignore
'bold': '\033[1m', "bold": "\033[1m",
'not_so_bold': '\033[1m\033[34m', "not_so_bold": "\033[1m\033[34m",
'changed': f'\x1b[{C.COLOR_CODES[C.COLOR_CHANGED]}m', # type: ignore "changed": f"\x1b[{C.COLOR_CODES[C.COLOR_CHANGED]}m", # type: ignore
'failed': f'\x1b[{C.COLOR_CODES[C.COLOR_ERROR]}m', # type: ignore "failed": f"\x1b[{C.COLOR_CODES[C.COLOR_ERROR]}m", # type: ignore
'endc': '\033[0m', "endc": "\033[0m",
'skipped': f'\x1b[{C.COLOR_CODES[C.COLOR_SKIP]}m', # type: ignore "skipped": f"\x1b[{C.COLOR_CODES[C.COLOR_SKIP]}m", # type: ignore
} }
@@ -78,8 +78,8 @@ class CallbackModule(CallbackBase):
"""selective.py callback plugin.""" """selective.py callback plugin."""
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.selective' CALLBACK_NAME = "community.general.selective"
def __init__(self, display=None): def __init__(self, display=None):
"""selective.py callback plugin.""" """selective.py callback plugin."""
@@ -89,11 +89,10 @@ class CallbackModule(CallbackBase):
self.printed_last_task = False self.printed_last_task = False
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
global DONT_COLORIZE global DONT_COLORIZE
DONT_COLORIZE = self.get_option('nocolor') DONT_COLORIZE = self.get_option("nocolor")
def _print_task(self, task_name=None): def _print_task(self, task_name=None):
if task_name is None: if task_name is None:
@@ -105,7 +104,7 @@ class CallbackModule(CallbackBase):
if self.last_skipped: if self.last_skipped:
print() print()
line = f"# {task_name} " line = f"# {task_name} "
msg = colorize(f"{line}{'*' * (line_length - len(line))}", 'bold') msg = colorize(f"{line}{'*' * (line_length - len(line))}", "bold")
print(msg) print(msg)
def _indent_text(self, text, indent_level): def _indent_text(self, text, indent_level):
@@ -113,48 +112,51 @@ class CallbackModule(CallbackBase):
result_lines = [] result_lines = []
for l in lines: for l in lines:
result_lines.append(f"{' ' * indent_level}{l}") result_lines.append(f"{' ' * indent_level}{l}")
return '\n'.join(result_lines) return "\n".join(result_lines)
def _print_diff(self, diff, indent_level): def _print_diff(self, diff, indent_level):
if isinstance(diff, dict): if isinstance(diff, dict):
try: try:
diff = '\n'.join(difflib.unified_diff(diff['before'].splitlines(), diff = "\n".join(
diff['after'].splitlines(), difflib.unified_diff(
fromfile=diff.get('before_header', diff["before"].splitlines(),
'new_file'), diff["after"].splitlines(),
tofile=diff['after_header'])) fromfile=diff.get("before_header", "new_file"),
tofile=diff["after_header"],
)
)
except AttributeError: except AttributeError:
diff = dict_diff(diff['before'], diff['after']) diff = dict_diff(diff["before"], diff["after"])
if diff: if diff:
diff = colorize(str(diff), 'changed') diff = colorize(str(diff), "changed")
print(self._indent_text(diff, indent_level + 4)) print(self._indent_text(diff, indent_level + 4))
def _print_host_or_item(self, host_or_item, changed, msg, diff, is_host, error, stdout, stderr): def _print_host_or_item(self, host_or_item, changed, msg, diff, is_host, error, stdout, stderr):
if is_host: if is_host:
indent_level = 0 indent_level = 0
name = colorize(host_or_item.name, 'not_so_bold') name = colorize(host_or_item.name, "not_so_bold")
else: else:
indent_level = 4 indent_level = 4
if isinstance(host_or_item, dict): if isinstance(host_or_item, dict):
if 'key' in host_or_item.keys(): if "key" in host_or_item.keys():
host_or_item = host_or_item['key'] host_or_item = host_or_item["key"]
name = colorize(to_text(host_or_item), 'bold') name = colorize(to_text(host_or_item), "bold")
if error: if error:
color = 'failed' color = "failed"
change_string = colorize('FAILED!!!', color) change_string = colorize("FAILED!!!", color)
else: else:
color = 'changed' if changed else 'ok' color = "changed" if changed else "ok"
change_string = colorize(f"changed={changed}", color) change_string = colorize(f"changed={changed}", color)
msg = colorize(msg, color) msg = colorize(msg, color)
line_length = 120 line_length = 120
spaces = ' ' * (40 - len(name) - indent_level) spaces = " " * (40 - len(name) - indent_level)
line = f"{' ' * indent_level} * {name}{spaces}- {change_string}" line = f"{' ' * indent_level} * {name}{spaces}- {change_string}"
if len(msg) < 50: if len(msg) < 50:
line += f' -- {msg}' line += f" -- {msg}"
print(f"{line} {'-' * (line_length - len(line))}---------") print(f"{line} {'-' * (line_length - len(line))}---------")
else: else:
print(f"{line} {'-' * (line_length - len(line))}") print(f"{line} {'-' * (line_length - len(line))}")
@@ -163,10 +165,10 @@ class CallbackModule(CallbackBase):
if diff: if diff:
self._print_diff(diff, indent_level) self._print_diff(diff, indent_level)
if stdout: if stdout:
stdout = colorize(stdout, 'failed') stdout = colorize(stdout, "failed")
print(self._indent_text(stdout, indent_level + 4)) print(self._indent_text(stdout, indent_level + 4))
if stderr: if stderr:
stderr = colorize(stderr, 'failed') stderr = colorize(stderr, "failed")
print(self._indent_text(stderr, indent_level + 4)) print(self._indent_text(stderr, indent_level + 4))
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
@@ -181,61 +183,61 @@ class CallbackModule(CallbackBase):
def _print_task_result(self, result, error=False, **kwargs): def _print_task_result(self, result, error=False, **kwargs):
"""Run when a task finishes correctly.""" """Run when a task finishes correctly."""
if 'print_action' in result._task.tags or error or self._display.verbosity > 1: if "print_action" in result._task.tags or error or self._display.verbosity > 1:
self._print_task() self._print_task()
self.last_skipped = False self.last_skipped = False
msg = to_text(result._result.get('msg', '')) or\ msg = to_text(result._result.get("msg", "")) or to_text(result._result.get("reason", ""))
to_text(result._result.get('reason', ''))
stderr = [result._result.get('exception', None), stderr = [result._result.get("exception", None), result._result.get("module_stderr", None)]
result._result.get('module_stderr', None)]
stderr = "\n".join([e for e in stderr if e]).strip() stderr = "\n".join([e for e in stderr if e]).strip()
self._print_host_or_item(result._host, self._print_host_or_item(
result._result.get('changed', False), result._host,
msg, result._result.get("changed", False),
result._result.get('diff', None), msg,
is_host=True, result._result.get("diff", None),
error=error, is_host=True,
stdout=result._result.get('module_stdout', None), error=error,
stderr=stderr.strip(), stdout=result._result.get("module_stdout", None),
) stderr=stderr.strip(),
if 'results' in result._result: )
for r in result._result['results']: if "results" in result._result:
failed = 'failed' in r and r['failed'] for r in result._result["results"]:
failed = "failed" in r and r["failed"]
stderr = [r.get('exception', None), r.get('module_stderr', None)] stderr = [r.get("exception", None), r.get("module_stderr", None)]
stderr = "\n".join([e for e in stderr if e]).strip() stderr = "\n".join([e for e in stderr if e]).strip()
self._print_host_or_item(r[r['ansible_loop_var']], self._print_host_or_item(
r.get('changed', False), r[r["ansible_loop_var"]],
to_text(r.get('msg', '')), r.get("changed", False),
r.get('diff', None), to_text(r.get("msg", "")),
is_host=False, r.get("diff", None),
error=failed, is_host=False,
stdout=r.get('module_stdout', None), error=failed,
stderr=stderr.strip(), stdout=r.get("module_stdout", None),
) stderr=stderr.strip(),
)
else: else:
self.last_skipped = True self.last_skipped = True
print('.', end="") print(".", end="")
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
"""Display info about playbook statistics.""" """Display info about playbook statistics."""
print() print()
self.printed_last_task = False self.printed_last_task = False
self._print_task('STATS') self._print_task("STATS")
hosts = sorted(stats.processed.keys()) hosts = sorted(stats.processed.keys())
for host in hosts: for host in hosts:
s = stats.summarize(host) s = stats.summarize(host)
if s['failures'] or s['unreachable']: if s["failures"] or s["unreachable"]:
color = 'failed' color = "failed"
elif s['changed']: elif s["changed"]:
color = 'changed' color = "changed"
else: else:
color = 'ok' color = "ok"
msg = ( msg = (
f"{host} : ok={s['ok']}\tchanged={s['changed']}\tfailed={s['failures']}\tunreachable=" f"{host} : ok={s['ok']}\tchanged={s['changed']}\tfailed={s['failures']}\tunreachable="
@@ -250,14 +252,13 @@ class CallbackModule(CallbackBase):
self.last_skipped = False self.last_skipped = False
line_length = 120 line_length = 120
spaces = ' ' * (31 - len(result._host.name) - 4) spaces = " " * (31 - len(result._host.name) - 4)
line = f" * {colorize(result._host.name, 'not_so_bold')}{spaces}- {colorize('skipped', 'skipped')}" line = f" * {colorize(result._host.name, 'not_so_bold')}{spaces}- {colorize('skipped', 'skipped')}"
reason = result._result.get('skipped_reason', '') or \ reason = result._result.get("skipped_reason", "") or result._result.get("skip_reason", "")
result._result.get('skip_reason', '')
if len(reason) < 50: if len(reason) < 50:
line += f' -- {reason}' line += f" -- {reason}"
print(f"{line} {'-' * (line_length - len(line))}---------") print(f"{line} {'-' * (line_length - len(line))}---------")
else: else:
print(f"{line} {'-' * (line_length - len(line))}") print(f"{line} {'-' * (line_length - len(line))}")

View File

@@ -70,6 +70,7 @@ from ansible.plugins.callback import CallbackBase
try: try:
import prettytable import prettytable
HAS_PRETTYTABLE = True HAS_PRETTYTABLE = True
except ImportError: except ImportError:
HAS_PRETTYTABLE = False HAS_PRETTYTABLE = False
@@ -79,20 +80,20 @@ class CallbackModule(CallbackBase):
"""This is an ansible callback plugin that sends status """This is an ansible callback plugin that sends status
updates to a Slack channel during playbook execution. updates to a Slack channel during playbook execution.
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.slack' CALLBACK_NAME = "community.general.slack"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
super().__init__(display=display) super().__init__(display=display)
if not HAS_PRETTYTABLE: if not HAS_PRETTYTABLE:
self.disabled = True self.disabled = True
self._display.warning('The `prettytable` python module is not ' self._display.warning(
'installed. Disabling the Slack callback ' "The `prettytable` python module is not installed. Disabling the Slack callback plugin."
'plugin.') )
self.playbook_name = None self.playbook_name = None
@@ -102,34 +103,34 @@ class CallbackModule(CallbackBase):
self.guid = uuid.uuid4().hex[:6] self.guid = uuid.uuid4().hex[:6]
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.webhook_url = self.get_option('webhook_url') self.webhook_url = self.get_option("webhook_url")
self.channel = self.get_option('channel') self.channel = self.get_option("channel")
self.username = self.get_option('username') self.username = self.get_option("username")
self.show_invocation = (self._display.verbosity > 1) self.show_invocation = self._display.verbosity > 1
self.validate_certs = self.get_option('validate_certs') self.validate_certs = self.get_option("validate_certs")
self.http_agent = self.get_option('http_agent') self.http_agent = self.get_option("http_agent")
if self.webhook_url is None: if self.webhook_url is None:
self.disabled = True self.disabled = True
self._display.warning('Slack Webhook URL was not provided. The ' self._display.warning(
'Slack Webhook URL can be provided using ' "Slack Webhook URL was not provided. The "
'the `SLACK_WEBHOOK_URL` environment ' "Slack Webhook URL can be provided using "
'variable.') "the `SLACK_WEBHOOK_URL` environment "
"variable."
)
def send_msg(self, attachments): def send_msg(self, attachments):
headers = { headers = {
'Content-type': 'application/json', "Content-type": "application/json",
} }
payload = { payload = {
'channel': self.channel, "channel": self.channel,
'username': self.username, "username": self.username,
'attachments': attachments, "attachments": attachments,
'parse': 'none', "parse": "none",
'icon_url': ('https://cdn2.hubspot.net/hub/330046/' "icon_url": ("https://cdn2.hubspot.net/hub/330046/file-449187601-png/ansible_badge.png"),
'file-449187601-png/ansible_badge.png'),
} }
data = json.dumps(payload) data = json.dumps(payload)
@@ -145,67 +146,63 @@ class CallbackModule(CallbackBase):
) )
return response.read() return response.read()
except Exception as e: except Exception as e:
self._display.warning(f'Could not submit message to Slack: {e}') self._display.warning(f"Could not submit message to Slack: {e}")
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.playbook_name = os.path.basename(playbook._file_name) self.playbook_name = os.path.basename(playbook._file_name)
title = [ title = [f"*Playbook initiated* (_{self.guid}_)"]
f'*Playbook initiated* (_{self.guid}_)'
]
invocation_items = [] invocation_items = []
if context.CLIARGS and self.show_invocation: if context.CLIARGS and self.show_invocation:
tags = context.CLIARGS['tags'] tags = context.CLIARGS["tags"]
skip_tags = context.CLIARGS['skip_tags'] skip_tags = context.CLIARGS["skip_tags"]
extra_vars = context.CLIARGS['extra_vars'] extra_vars = context.CLIARGS["extra_vars"]
subset = context.CLIARGS['subset'] subset = context.CLIARGS["subset"]
inventory = [os.path.abspath(i) for i in context.CLIARGS['inventory']] inventory = [os.path.abspath(i) for i in context.CLIARGS["inventory"]]
invocation_items.append(f"Inventory: {', '.join(inventory)}") invocation_items.append(f"Inventory: {', '.join(inventory)}")
if tags and tags != ['all']: if tags and tags != ["all"]:
invocation_items.append(f"Tags: {', '.join(tags)}") invocation_items.append(f"Tags: {', '.join(tags)}")
if skip_tags: if skip_tags:
invocation_items.append(f"Skip Tags: {', '.join(skip_tags)}") invocation_items.append(f"Skip Tags: {', '.join(skip_tags)}")
if subset: if subset:
invocation_items.append(f'Limit: {subset}') invocation_items.append(f"Limit: {subset}")
if extra_vars: if extra_vars:
invocation_items.append(f"Extra Vars: {' '.join(extra_vars)}") invocation_items.append(f"Extra Vars: {' '.join(extra_vars)}")
title.append(f"by *{context.CLIARGS['remote_user']}*") title.append(f"by *{context.CLIARGS['remote_user']}*")
title.append(f'\n\n*{self.playbook_name}*') title.append(f"\n\n*{self.playbook_name}*")
msg_items = [' '.join(title)] msg_items = [" ".join(title)]
if invocation_items: if invocation_items:
_inv_item = '\n'.join(invocation_items) _inv_item = "\n".join(invocation_items)
msg_items.append(f'```\n{_inv_item}\n```') msg_items.append(f"```\n{_inv_item}\n```")
msg = '\n'.join(msg_items) msg = "\n".join(msg_items)
attachments = [{ attachments = [
'fallback': msg, {
'fields': [ "fallback": msg,
{ "fields": [{"value": msg}],
'value': msg "color": "warning",
} "mrkdwn_in": ["text", "fallback", "fields"],
], }
'color': 'warning', ]
'mrkdwn_in': ['text', 'fallback', 'fields'],
}]
self.send_msg(attachments=attachments) self.send_msg(attachments=attachments)
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
"""Display Play start messages""" """Display Play start messages"""
name = play.name or f'Play name not specified ({play._uuid})' name = play.name or f"Play name not specified ({play._uuid})"
msg = f'*Starting play* (_{self.guid}_)\n\n*{name}*' msg = f"*Starting play* (_{self.guid}_)\n\n*{name}*"
attachments = [ attachments = [
{ {
'fallback': msg, "fallback": msg,
'text': msg, "text": msg,
'color': 'warning', "color": "warning",
'mrkdwn_in': ['text', 'fallback', 'fields'], "mrkdwn_in": ["text", "fallback", "fields"],
} }
] ]
self.send_msg(attachments=attachments) self.send_msg(attachments=attachments)
@@ -215,8 +212,7 @@ class CallbackModule(CallbackBase):
hosts = sorted(stats.processed.keys()) hosts = sorted(stats.processed.keys())
t = prettytable.PrettyTable(['Host', 'Ok', 'Changed', 'Unreachable', t = prettytable.PrettyTable(["Host", "Ok", "Changed", "Unreachable", "Failures", "Rescued", "Ignored"])
'Failures', 'Rescued', 'Ignored'])
failures = False failures = False
unreachable = False unreachable = False
@@ -224,38 +220,28 @@ class CallbackModule(CallbackBase):
for h in hosts: for h in hosts:
s = stats.summarize(h) s = stats.summarize(h)
if s['failures'] > 0: if s["failures"] > 0:
failures = True failures = True
if s['unreachable'] > 0: if s["unreachable"] > 0:
unreachable = True unreachable = True
t.add_row([h] + [s[k] for k in ['ok', 'changed', 'unreachable', t.add_row([h] + [s[k] for k in ["ok", "changed", "unreachable", "failures", "rescued", "ignored"]])
'failures', 'rescued', 'ignored']])
attachments = [] attachments = []
msg_items = [ msg_items = [f"*Playbook Complete* (_{self.guid}_)"]
f'*Playbook Complete* (_{self.guid}_)'
]
if failures or unreachable: if failures or unreachable:
color = 'danger' color = "danger"
msg_items.append('\n*Failed!*') msg_items.append("\n*Failed!*")
else: else:
color = 'good' color = "good"
msg_items.append('\n*Success!*') msg_items.append("\n*Success!*")
msg_items.append(f'```\n{t}\n```') msg_items.append(f"```\n{t}\n```")
msg = '\n'.join(msg_items) msg = "\n".join(msg_items)
attachments.append({ attachments.append(
'fallback': msg, {"fallback": msg, "fields": [{"value": msg}], "color": color, "mrkdwn_in": ["text", "fallback", "fields"]}
'fields': [ )
{
'value': msg
}
],
'color': color,
'mrkdwn_in': ['text', 'fallback', 'fields']
})
self.send_msg(attachments=attachments) self.send_msg(attachments=attachments)

View File

@@ -110,7 +110,7 @@ class SplunkHTTPCollectorSource:
self.user = getpass.getuser() self.user = getpass.getuser()
def send_event(self, url, authtoken, validate_certs, include_milliseconds, batch, state, result, runtime): def send_event(self, url, authtoken, validate_certs, include_milliseconds, batch, state, result, runtime):
if result._task_fields['args'].get('_ansible_check_mode') is True: if result._task_fields["args"].get("_ansible_check_mode") is True:
self.ansible_check_mode = True self.ansible_check_mode = True
if result._task._role: if result._task._role:
@@ -118,33 +118,33 @@ class SplunkHTTPCollectorSource:
else: else:
ansible_role = None ansible_role = None
if 'args' in result._task_fields: if "args" in result._task_fields:
del result._task_fields['args'] del result._task_fields["args"]
data = {} data = {}
data['uuid'] = result._task._uuid data["uuid"] = result._task._uuid
data['session'] = self.session data["session"] = self.session
if batch is not None: if batch is not None:
data['batch'] = batch data["batch"] = batch
data['status'] = state data["status"] = state
if include_milliseconds: if include_milliseconds:
time_format = '%Y-%m-%d %H:%M:%S.%f +0000' time_format = "%Y-%m-%d %H:%M:%S.%f +0000"
else: else:
time_format = '%Y-%m-%d %H:%M:%S +0000' time_format = "%Y-%m-%d %H:%M:%S +0000"
data['timestamp'] = now().strftime(time_format) data["timestamp"] = now().strftime(time_format)
data['host'] = self.host data["host"] = self.host
data['ip_address'] = self.ip_address data["ip_address"] = self.ip_address
data['user'] = self.user data["user"] = self.user
data['runtime'] = runtime data["runtime"] = runtime
data['ansible_version'] = ansible_version data["ansible_version"] = ansible_version
data['ansible_check_mode'] = self.ansible_check_mode data["ansible_check_mode"] = self.ansible_check_mode
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_playbook'] = self.ansible_playbook data["ansible_playbook"] = self.ansible_playbook
data['ansible_role'] = ansible_role data["ansible_role"] = ansible_role
data['ansible_task'] = result._task_fields data["ansible_task"] = result._task_fields
data['ansible_result'] = result._result data["ansible_result"] = result._result
# This wraps the json payload in and outer json event needed by Splunk # This wraps the json payload in and outer json event needed by Splunk
jsondata = json.dumps({"event": data}, cls=AnsibleJSONEncoder, sort_keys=True) jsondata = json.dumps({"event": data}, cls=AnsibleJSONEncoder, sort_keys=True)
@@ -152,19 +152,16 @@ class SplunkHTTPCollectorSource:
open_url( open_url(
url, url,
jsondata, jsondata,
headers={ headers={"Content-type": "application/json", "Authorization": f"Splunk {authtoken}"},
'Content-type': 'application/json', method="POST",
'Authorization': f"Splunk {authtoken}" validate_certs=validate_certs,
},
method='POST',
validate_certs=validate_certs
) )
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.splunk' CALLBACK_NAME = "community.general.splunk"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -178,39 +175,40 @@ class CallbackModule(CallbackBase):
self.splunk = SplunkHTTPCollectorSource() self.splunk = SplunkHTTPCollectorSource()
def _runtime(self, result): def _runtime(self, result):
return ( return (now() - self.start_datetimes[result._task._uuid]).total_seconds()
now() -
self.start_datetimes[result._task._uuid]
).total_seconds()
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.url = self.get_option('url') self.url = self.get_option("url")
if self.url is None: if self.url is None:
self.disabled = True self.disabled = True
self._display.warning('Splunk HTTP collector source URL was ' self._display.warning(
'not provided. The Splunk HTTP collector ' "Splunk HTTP collector source URL was "
'source URL can be provided using the ' "not provided. The Splunk HTTP collector "
'`SPLUNK_URL` environment variable or ' "source URL can be provided using the "
'in the ansible.cfg file.') "`SPLUNK_URL` environment variable or "
"in the ansible.cfg file."
)
self.authtoken = self.get_option('authtoken') self.authtoken = self.get_option("authtoken")
if self.authtoken is None: if self.authtoken is None:
self.disabled = True self.disabled = True
self._display.warning('Splunk HTTP collector requires an authentication' self._display.warning(
'token. The Splunk HTTP collector ' "Splunk HTTP collector requires an authentication"
'authentication token can be provided using the ' "token. The Splunk HTTP collector "
'`SPLUNK_AUTHTOKEN` environment variable or ' "authentication token can be provided using the "
'in the ansible.cfg file.') "`SPLUNK_AUTHTOKEN` environment variable or "
"in the ansible.cfg file."
)
self.validate_certs = self.get_option('validate_certs') self.validate_certs = self.get_option("validate_certs")
self.include_milliseconds = self.get_option('include_milliseconds') self.include_milliseconds = self.get_option("include_milliseconds")
self.batch = self.get_option('batch') self.batch = self.get_option("batch")
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.splunk.ansible_playbook = basename(playbook._file_name) self.splunk.ansible_playbook = basename(playbook._file_name)
@@ -228,9 +226,9 @@ class CallbackModule(CallbackBase):
self.validate_certs, self.validate_certs,
self.include_milliseconds, self.include_milliseconds,
self.batch, self.batch,
'OK', "OK",
result, result,
self._runtime(result) self._runtime(result),
) )
def v2_runner_on_skipped(self, result, **kwargs): def v2_runner_on_skipped(self, result, **kwargs):
@@ -240,9 +238,9 @@ class CallbackModule(CallbackBase):
self.validate_certs, self.validate_certs,
self.include_milliseconds, self.include_milliseconds,
self.batch, self.batch,
'SKIPPED', "SKIPPED",
result, result,
self._runtime(result) self._runtime(result),
) )
def v2_runner_on_failed(self, result, **kwargs): def v2_runner_on_failed(self, result, **kwargs):
@@ -252,9 +250,9 @@ class CallbackModule(CallbackBase):
self.validate_certs, self.validate_certs,
self.include_milliseconds, self.include_milliseconds,
self.batch, self.batch,
'FAILED', "FAILED",
result, result,
self._runtime(result) self._runtime(result),
) )
def runner_on_async_failed(self, result, **kwargs): def runner_on_async_failed(self, result, **kwargs):
@@ -264,9 +262,9 @@ class CallbackModule(CallbackBase):
self.validate_certs, self.validate_certs,
self.include_milliseconds, self.include_milliseconds,
self.batch, self.batch,
'FAILED', "FAILED",
result, result,
self._runtime(result) self._runtime(result),
) )
def v2_runner_on_unreachable(self, result, **kwargs): def v2_runner_on_unreachable(self, result, **kwargs):
@@ -276,7 +274,7 @@ class CallbackModule(CallbackBase):
self.validate_certs, self.validate_certs,
self.include_milliseconds, self.include_milliseconds,
self.batch, self.batch,
'UNREACHABLE', "UNREACHABLE",
result, result,
self._runtime(result) self._runtime(result),
) )

View File

@@ -67,7 +67,7 @@ class SumologicHTTPCollectorSource:
self.user = getpass.getuser() self.user = getpass.getuser()
def send_event(self, url, state, result, runtime): def send_event(self, url, state, result, runtime):
if result._task_fields['args'].get('_ansible_check_mode') is True: if result._task_fields["args"].get("_ansible_check_mode") is True:
self.ansible_check_mode = True self.ansible_check_mode = True
if result._task._role: if result._task._role:
@@ -75,41 +75,38 @@ class SumologicHTTPCollectorSource:
else: else:
ansible_role = None ansible_role = None
if 'args' in result._task_fields: if "args" in result._task_fields:
del result._task_fields['args'] del result._task_fields["args"]
data = {} data = {}
data['uuid'] = result._task._uuid data["uuid"] = result._task._uuid
data['session'] = self.session data["session"] = self.session
data['status'] = state data["status"] = state
data['timestamp'] = now().strftime('%Y-%m-%d %H:%M:%S +0000') data["timestamp"] = now().strftime("%Y-%m-%d %H:%M:%S +0000")
data['host'] = self.host data["host"] = self.host
data['ip_address'] = self.ip_address data["ip_address"] = self.ip_address
data['user'] = self.user data["user"] = self.user
data['runtime'] = runtime data["runtime"] = runtime
data['ansible_version'] = ansible_version data["ansible_version"] = ansible_version
data['ansible_check_mode'] = self.ansible_check_mode data["ansible_check_mode"] = self.ansible_check_mode
data['ansible_host'] = result._host.name data["ansible_host"] = result._host.name
data['ansible_playbook'] = self.ansible_playbook data["ansible_playbook"] = self.ansible_playbook
data['ansible_role'] = ansible_role data["ansible_role"] = ansible_role
data['ansible_task'] = result._task_fields data["ansible_task"] = result._task_fields
data['ansible_result'] = result._result data["ansible_result"] = result._result
open_url( open_url(
url, url,
data=json.dumps(data, cls=AnsibleJSONEncoder, sort_keys=True), data=json.dumps(data, cls=AnsibleJSONEncoder, sort_keys=True),
headers={ headers={"Content-type": "application/json", "X-Sumo-Host": data["ansible_host"]},
'Content-type': 'application/json', method="POST",
'X-Sumo-Host': data['ansible_host']
},
method='POST'
) )
class CallbackModule(CallbackBase): class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.sumologic' CALLBACK_NAME = "community.general.sumologic"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self, display=None): def __init__(self, display=None):
@@ -119,23 +116,22 @@ class CallbackModule(CallbackBase):
self.sumologic = SumologicHTTPCollectorSource() self.sumologic = SumologicHTTPCollectorSource()
def _runtime(self, result): def _runtime(self, result):
return ( return (now() - self.start_datetimes[result._task._uuid]).total_seconds()
now() -
self.start_datetimes[result._task._uuid]
).total_seconds()
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
self.url = self.get_option('url') self.url = self.get_option("url")
if self.url is None: if self.url is None:
self.disabled = True self.disabled = True
self._display.warning('Sumologic HTTP collector source URL was ' self._display.warning(
'not provided. The Sumologic HTTP collector ' "Sumologic HTTP collector source URL was "
'source URL can be provided using the ' "not provided. The Sumologic HTTP collector "
'`SUMOLOGIC_URL` environment variable or ' "source URL can be provided using the "
'in the ansible.cfg file.') "`SUMOLOGIC_URL` environment variable or "
"in the ansible.cfg file."
)
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
self.sumologic.ansible_playbook = basename(playbook._file_name) self.sumologic.ansible_playbook = basename(playbook._file_name)
@@ -147,41 +143,16 @@ class CallbackModule(CallbackBase):
self.start_datetimes[task._uuid] = now() self.start_datetimes[task._uuid] = now()
def v2_runner_on_ok(self, result, **kwargs): def v2_runner_on_ok(self, result, **kwargs):
self.sumologic.send_event( self.sumologic.send_event(self.url, "OK", result, self._runtime(result))
self.url,
'OK',
result,
self._runtime(result)
)
def v2_runner_on_skipped(self, result, **kwargs): def v2_runner_on_skipped(self, result, **kwargs):
self.sumologic.send_event( self.sumologic.send_event(self.url, "SKIPPED", result, self._runtime(result))
self.url,
'SKIPPED',
result,
self._runtime(result)
)
def v2_runner_on_failed(self, result, **kwargs): def v2_runner_on_failed(self, result, **kwargs):
self.sumologic.send_event( self.sumologic.send_event(self.url, "FAILED", result, self._runtime(result))
self.url,
'FAILED',
result,
self._runtime(result)
)
def runner_on_async_failed(self, result, **kwargs): def runner_on_async_failed(self, result, **kwargs):
self.sumologic.send_event( self.sumologic.send_event(self.url, "FAILED", result, self._runtime(result))
self.url,
'FAILED',
result,
self._runtime(result)
)
def v2_runner_on_unreachable(self, result, **kwargs): def v2_runner_on_unreachable(self, result, **kwargs):
self.sumologic.send_event( self.sumologic.send_event(self.url, "UNREACHABLE", result, self._runtime(result))
self.url,
'UNREACHABLE',
result,
self._runtime(result)
)

View File

@@ -68,62 +68,89 @@ class CallbackModule(CallbackBase):
""" """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification' CALLBACK_TYPE = "notification"
CALLBACK_NAME = 'community.general.syslog_json' CALLBACK_NAME = "community.general.syslog_json"
CALLBACK_NEEDS_WHITELIST = True CALLBACK_NEEDS_WHITELIST = True
def __init__(self): def __init__(self):
super().__init__() super().__init__()
def set_options(self, task_keys=None, var_options=None, direct=None): def set_options(self, task_keys=None, var_options=None, direct=None):
super().set_options(task_keys=task_keys, var_options=var_options, direct=direct) super().set_options(task_keys=task_keys, var_options=var_options, direct=direct)
syslog_host = self.get_option("server") syslog_host = self.get_option("server")
syslog_port = int(self.get_option("port")) syslog_port = int(self.get_option("port"))
syslog_facility = self.get_option("facility") syslog_facility = self.get_option("facility")
self.logger = logging.getLogger('ansible logger') self.logger = logging.getLogger("ansible logger")
self.logger.setLevel(logging.DEBUG) self.logger.setLevel(logging.DEBUG)
self.handler = logging.handlers.SysLogHandler( self.handler = logging.handlers.SysLogHandler(address=(syslog_host, syslog_port), facility=syslog_facility)
address=(syslog_host, syslog_port),
facility=syslog_facility
)
self.logger.addHandler(self.handler) self.logger.addHandler(self.handler)
self.hostname = socket.gethostname() self.hostname = socket.gethostname()
def v2_runner_on_failed(self, result, ignore_errors=False): def v2_runner_on_failed(self, result, ignore_errors=False):
res = result._result res = result._result
host = result._host.get_name() host = result._host.get_name()
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s', self.hostname, host, self._dump_results(res)) self.logger.error(
"%s ansible-command: task execution FAILED; host: %s; message: %s",
self.hostname,
host,
self._dump_results(res),
)
def v2_runner_on_ok(self, result): def v2_runner_on_ok(self, result):
res = result._result res = result._result
host = result._host.get_name() host = result._host.get_name()
if result._task.action != "gather_facts" or self.get_option("setup"): if result._task.action != "gather_facts" or self.get_option("setup"):
self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s', self.hostname, host, self._dump_results(res)) self.logger.info(
"%s ansible-command: task execution OK; host: %s; message: %s",
self.hostname,
host,
self._dump_results(res),
)
def v2_runner_on_skipped(self, result): def v2_runner_on_skipped(self, result):
host = result._host.get_name() host = result._host.get_name()
self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s', self.hostname, host, 'skipped') self.logger.info(
"%s ansible-command: task execution SKIPPED; host: %s; message: %s", self.hostname, host, "skipped"
)
def v2_runner_on_unreachable(self, result): def v2_runner_on_unreachable(self, result):
res = result._result res = result._result
host = result._host.get_name() host = result._host.get_name()
self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s', self.hostname, host, self._dump_results(res)) self.logger.error(
"%s ansible-command: task execution UNREACHABLE; host: %s; message: %s",
self.hostname,
host,
self._dump_results(res),
)
def v2_runner_on_async_failed(self, result): def v2_runner_on_async_failed(self, result):
res = result._result res = result._result
host = result._host.get_name() host = result._host.get_name()
jid = result._result.get('ansible_job_id') jid = result._result.get("ansible_job_id")
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s', self.hostname, host, self._dump_results(res)) self.logger.error(
"%s ansible-command: task execution FAILED; host: %s; message: %s",
self.hostname,
host,
self._dump_results(res),
)
def v2_playbook_on_import_for_host(self, result, imported_file): def v2_playbook_on_import_for_host(self, result, imported_file):
host = result._host.get_name() host = result._host.get_name()
self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s', self.hostname, host, imported_file) self.logger.info(
"%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s",
self.hostname,
host,
imported_file,
)
def v2_playbook_on_not_import_for_host(self, result, missing_file): def v2_playbook_on_not_import_for_host(self, result, missing_file):
host = result._host.get_name() host = result._host.get_name()
self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s', self.hostname, host, missing_file) self.logger.info(
"%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s",
self.hostname,
host,
missing_file,
)

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2025, Felix Fontein <felix@fontein.de> # Copyright (c) 2025, Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -51,8 +50,8 @@ from ansible.plugins.callback.default import CallbackModule as Default
class CallbackModule(Default): class CallbackModule(Default):
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.tasks_only' CALLBACK_NAME = "community.general.tasks_only"
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
pass pass

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2024, kurokobo <kurokobo@protonmail.com> # Copyright (c) 2024, kurokobo <kurokobo@protonmail.com>
# Copyright (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> # Copyright (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)

View File

@@ -28,8 +28,7 @@ from ansible.plugins.callback.default import CallbackModule as CallbackModule_de
class CallbackModule(CallbackModule_default): class CallbackModule(CallbackModule_default):
"""
'''
Design goals: Design goals:
- Print consolidated output that looks like a *NIX startup log - Print consolidated output that looks like a *NIX startup log
- Defaults should avoid displaying unnecessary information wherever possible - Defaults should avoid displaying unnecessary information wherever possible
@@ -39,14 +38,16 @@ class CallbackModule(CallbackModule_default):
- Add option to display all hostnames on a single line in the appropriate result color (failures may have a separate line) - Add option to display all hostnames on a single line in the appropriate result color (failures may have a separate line)
- Consolidate stats display - Consolidate stats display
- Don't show play name if no hosts found - Don't show play name if no hosts found
''' """
CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout' CALLBACK_TYPE = "stdout"
CALLBACK_NAME = 'community.general.unixy' CALLBACK_NAME = "community.general.unixy"
def _run_is_verbose(self, result): def _run_is_verbose(self, result):
return ((self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result) return (
self._display.verbosity > 0 or "_ansible_verbose_always" in result._result
) and "_ansible_verbose_override" not in result._result
def _get_task_display_name(self, task): def _get_task_display_name(self, task):
self.task_display_name = None self.task_display_name = None
@@ -59,8 +60,8 @@ class CallbackModule(CallbackModule_default):
self.task_display_name = task_display_name self.task_display_name = task_display_name
def _preprocess_result(self, result): def _preprocess_result(self, result):
self.delegated_vars = result._result.get('_ansible_delegated_vars', None) self.delegated_vars = result._result.get("_ansible_delegated_vars", None)
self._handle_exception(result._result, use_stderr=self.get_option('display_failed_stderr')) self._handle_exception(result._result, use_stderr=self.get_option("display_failed_stderr"))
self._handle_warnings(result._result) self._handle_warnings(result._result)
def _process_result_output(self, result, msg): def _process_result_output(self, result, msg):
@@ -72,16 +73,16 @@ class CallbackModule(CallbackModule_default):
return task_result return task_result
if self.delegated_vars: if self.delegated_vars:
task_delegate_host = self.delegated_vars['ansible_host'] task_delegate_host = self.delegated_vars["ansible_host"]
task_result = f"{task_host} -> {task_delegate_host} {msg}" task_result = f"{task_host} -> {task_delegate_host} {msg}"
if result._result.get('msg') and result._result.get('msg') != "All items completed": if result._result.get("msg") and result._result.get("msg") != "All items completed":
task_result += f" | msg: {to_text(result._result.get('msg'))}" task_result += f" | msg: {to_text(result._result.get('msg'))}"
if result._result.get('stdout'): if result._result.get("stdout"):
task_result += f" | stdout: {result._result.get('stdout')}" task_result += f" | stdout: {result._result.get('stdout')}"
if result._result.get('stderr'): if result._result.get("stderr"):
task_result += f" | stderr: {result._result.get('stderr')}" task_result += f" | stderr: {result._result.get('stderr')}"
return task_result return task_result
@@ -89,7 +90,7 @@ class CallbackModule(CallbackModule_default):
def v2_playbook_on_task_start(self, task, is_conditional): def v2_playbook_on_task_start(self, task, is_conditional):
self._get_task_display_name(task) self._get_task_display_name(task)
if self.task_display_name is not None: if self.task_display_name is not None:
if task.check_mode and self.get_option('check_mode_markers'): if task.check_mode and self.get_option("check_mode_markers"):
self._display.display(f"{self.task_display_name} (check mode)...") self._display.display(f"{self.task_display_name} (check mode)...")
else: else:
self._display.display(f"{self.task_display_name}...") self._display.display(f"{self.task_display_name}...")
@@ -97,14 +98,14 @@ class CallbackModule(CallbackModule_default):
def v2_playbook_on_handler_task_start(self, task): def v2_playbook_on_handler_task_start(self, task):
self._get_task_display_name(task) self._get_task_display_name(task)
if self.task_display_name is not None: if self.task_display_name is not None:
if task.check_mode and self.get_option('check_mode_markers'): if task.check_mode and self.get_option("check_mode_markers"):
self._display.display(f"{self.task_display_name} (via handler in check mode)... ") self._display.display(f"{self.task_display_name} (via handler in check mode)... ")
else: else:
self._display.display(f"{self.task_display_name} (via handler)... ") self._display.display(f"{self.task_display_name} (via handler)... ")
def v2_playbook_on_play_start(self, play): def v2_playbook_on_play_start(self, play):
name = play.get_name().strip() name = play.get_name().strip()
if play.check_mode and self.get_option('check_mode_markers'): if play.check_mode and self.get_option("check_mode_markers"):
if name and play.hosts: if name and play.hosts:
msg = f"\n- {name} (in check mode) on hosts: {','.join(play.hosts)} -" msg = f"\n- {name} (in check mode) on hosts: {','.join(play.hosts)} -"
else: else:
@@ -118,7 +119,7 @@ class CallbackModule(CallbackModule_default):
self._display.display(msg) self._display.display(msg)
def v2_runner_on_skipped(self, result, ignore_errors=False): def v2_runner_on_skipped(self, result, ignore_errors=False):
if self.get_option('display_skipped_hosts'): if self.get_option("display_skipped_hosts"):
self._preprocess_result(result) self._preprocess_result(result)
display_color = C.COLOR_SKIP display_color = C.COLOR_SKIP
msg = "skipped" msg = "skipped"
@@ -137,12 +138,12 @@ class CallbackModule(CallbackModule_default):
msg += f" | item: {item_value}" msg += f" | item: {item_value}"
task_result = self._process_result_output(result, msg) task_result = self._process_result_output(result, msg)
self._display.display(f" {task_result}", display_color, stderr=self.get_option('display_failed_stderr')) self._display.display(f" {task_result}", display_color, stderr=self.get_option("display_failed_stderr"))
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK): def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
self._preprocess_result(result) self._preprocess_result(result)
result_was_changed = ('changed' in result._result and result._result['changed']) result_was_changed = "changed" in result._result and result._result["changed"]
if result_was_changed: if result_was_changed:
msg = "done" msg = "done"
item_value = self._get_item_label(result._result) item_value = self._get_item_label(result._result)
@@ -151,7 +152,7 @@ class CallbackModule(CallbackModule_default):
display_color = C.COLOR_CHANGED display_color = C.COLOR_CHANGED
task_result = self._process_result_output(result, msg) task_result = self._process_result_output(result, msg)
self._display.display(f" {task_result}", display_color) self._display.display(f" {task_result}", display_color)
elif self.get_option('display_ok_hosts'): elif self.get_option("display_ok_hosts"):
task_result = self._process_result_output(result, msg) task_result = self._process_result_output(result, msg)
self._display.display(f" {task_result}", display_color) self._display.display(f" {task_result}", display_color)
@@ -171,17 +172,17 @@ class CallbackModule(CallbackModule_default):
display_color = C.COLOR_UNREACHABLE display_color = C.COLOR_UNREACHABLE
task_result = self._process_result_output(result, msg) task_result = self._process_result_output(result, msg)
self._display.display(f" {task_result}", display_color, stderr=self.get_option('display_failed_stderr')) self._display.display(f" {task_result}", display_color, stderr=self.get_option("display_failed_stderr"))
def v2_on_file_diff(self, result): def v2_on_file_diff(self, result):
if result._task.loop and 'results' in result._result: if result._task.loop and "results" in result._result:
for res in result._result['results']: for res in result._result["results"]:
if 'diff' in res and res['diff'] and res.get('changed', False): if "diff" in res and res["diff"] and res.get("changed", False):
diff = self._get_diff(res['diff']) diff = self._get_diff(res["diff"])
if diff: if diff:
self._display.display(diff) self._display.display(diff)
elif 'diff' in result._result and result._result['diff'] and result._result.get('changed', False): elif "diff" in result._result and result._result["diff"] and result._result.get("changed", False):
diff = self._get_diff(result._result['diff']) diff = self._get_diff(result._result["diff"])
if diff: if diff:
self._display.display(diff) self._display.display(diff)
@@ -197,30 +198,30 @@ class CallbackModule(CallbackModule_default):
f" {hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} " f" {hostcolor(h, t)} : {colorize('ok', t['ok'], C.COLOR_OK)} {colorize('changed', t['changed'], C.COLOR_CHANGED)} "
f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} " f"{colorize('unreachable', t['unreachable'], C.COLOR_UNREACHABLE)} {colorize('failed', t['failures'], C.COLOR_ERROR)} "
f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}", f"{colorize('rescued', t['rescued'], C.COLOR_OK)} {colorize('ignored', t['ignored'], C.COLOR_WARN)}",
screen_only=True screen_only=True,
) )
self._display.display( self._display.display(
f" {hostcolor(h, t, False)} : {colorize('ok', t['ok'], None)} {colorize('changed', t['changed'], None)} " f" {hostcolor(h, t, False)} : {colorize('ok', t['ok'], None)} {colorize('changed', t['changed'], None)} "
f"{colorize('unreachable', t['unreachable'], None)} {colorize('failed', t['failures'], None)} {colorize('rescued', t['rescued'], None)} " f"{colorize('unreachable', t['unreachable'], None)} {colorize('failed', t['failures'], None)} {colorize('rescued', t['rescued'], None)} "
f"{colorize('ignored', t['ignored'], None)}", f"{colorize('ignored', t['ignored'], None)}",
log_only=True log_only=True,
) )
if stats.custom and self.get_option('show_custom_stats'): if stats.custom and self.get_option("show_custom_stats"):
self._display.banner("CUSTOM STATS: ") self._display.banner("CUSTOM STATS: ")
# per host # per host
# TODO: come up with 'pretty format' # TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()): for k in sorted(stats.custom.keys()):
if k == '_run': if k == "_run":
continue continue
stat_val = self._dump_results(stats.custom[k], indent=1).replace('\n', '') stat_val = self._dump_results(stats.custom[k], indent=1).replace("\n", "")
self._display.display(f'\t{k}: {stat_val}') self._display.display(f"\t{k}: {stat_val}")
# print per run custom stats # print per run custom stats
if '_run' in stats.custom: if "_run" in stats.custom:
self._display.display("", screen_only=True) self._display.display("", screen_only=True)
stat_val_run = self._dump_results(stats.custom['_run'], indent=1).replace('\n', '') stat_val_run = self._dump_results(stats.custom["_run"], indent=1).replace("\n", "")
self._display.display(f'\tRUN: {stat_val_run}') self._display.display(f"\tRUN: {stat_val_run}")
self._display.display("", screen_only=True) self._display.display("", screen_only=True)
def v2_playbook_on_no_hosts_matched(self): def v2_playbook_on_no_hosts_matched(self):
@@ -230,21 +231,24 @@ class CallbackModule(CallbackModule_default):
self._display.display(" Ran out of hosts!", color=C.COLOR_ERROR) self._display.display(" Ran out of hosts!", color=C.COLOR_ERROR)
def v2_playbook_on_start(self, playbook): def v2_playbook_on_start(self, playbook):
if context.CLIARGS['check'] and self.get_option('check_mode_markers'): if context.CLIARGS["check"] and self.get_option("check_mode_markers"):
self._display.display(f"Executing playbook {basename(playbook._file_name)} in check mode") self._display.display(f"Executing playbook {basename(playbook._file_name)} in check mode")
else: else:
self._display.display(f"Executing playbook {basename(playbook._file_name)}") self._display.display(f"Executing playbook {basename(playbook._file_name)}")
# show CLI arguments # show CLI arguments
if self._display.verbosity > 3: if self._display.verbosity > 3:
if context.CLIARGS.get('args'): if context.CLIARGS.get("args"):
self._display.display(f"Positional arguments: {' '.join(context.CLIARGS['args'])}", self._display.display(
color=C.COLOR_VERBOSE, screen_only=True) f"Positional arguments: {' '.join(context.CLIARGS['args'])}",
color=C.COLOR_VERBOSE,
screen_only=True,
)
for argument in (a for a in context.CLIARGS if a != 'args'): for argument in (a for a in context.CLIARGS if a != "args"):
val = context.CLIARGS[argument] val = context.CLIARGS[argument]
if val: if val:
self._display.vvvv(f'{argument}: {val}') self._display.vvvv(f"{argument}: {val}")
def v2_runner_retry(self, result): def v2_runner_retry(self, result):
msg = f" Retrying... ({result._result['attempts']} of {result._result['retries']})" msg = f" Retrying... ({result._result['attempts']} of {result._result['retries']})"

View File

@@ -87,16 +87,16 @@ display = Display()
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Local chroot based connections """ """Local chroot based connections"""
transport = 'community.general.chroot' transport = "community.general.chroot"
has_pipelining = True has_pipelining = True
# su currently has an undiagnosed issue with calculating the file # su currently has an undiagnosed issue with calculating the file
# checksums (so copy, for instance, doesn't work right) # checksums (so copy, for instance, doesn't work right)
# Have to look into that before re-enabling this # Have to look into that before re-enabling this
has_tty = False has_tty = False
default_user = 'root' default_user = "root"
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
super().__init__(play_context, new_stdin, *args, **kwargs) super().__init__(play_context, new_stdin, *args, **kwargs)
@@ -107,7 +107,7 @@ class Connection(ConnectionBase):
if not os.path.isdir(self.chroot): if not os.path.isdir(self.chroot):
raise AnsibleError(f"{self.chroot} is not a directory") raise AnsibleError(f"{self.chroot} is not a directory")
chrootsh = os.path.join(self.chroot, 'bin/sh') chrootsh = os.path.join(self.chroot, "bin/sh")
# Want to check for a usable bourne shell inside the chroot. # Want to check for a usable bourne shell inside the chroot.
# is_executable() == True is sufficient. For symlinks it # is_executable() == True is sufficient. For symlinks it
# gets really complicated really fast. So we punt on finding that # gets really complicated really fast. So we punt on finding that
@@ -116,17 +116,18 @@ class Connection(ConnectionBase):
raise AnsibleError(f"{self.chroot} does not look like a chrootable dir (/bin/sh missing)") raise AnsibleError(f"{self.chroot} does not look like a chrootable dir (/bin/sh missing)")
def _connect(self): def _connect(self):
""" connect to the chroot """ """connect to the chroot"""
if not self.get_option('disable_root_check') and os.geteuid() != 0: if not self.get_option("disable_root_check") and os.geteuid() != 0:
raise AnsibleError( raise AnsibleError(
"chroot connection requires running as root. " "chroot connection requires running as root. "
"You can override this check with the `disable_root_check` option.") "You can override this check with the `disable_root_check` option."
)
if os.path.isabs(self.get_option('chroot_exe')): if os.path.isabs(self.get_option("chroot_exe")):
self.chroot_cmd = self.get_option('chroot_exe') self.chroot_cmd = self.get_option("chroot_exe")
else: else:
try: try:
self.chroot_cmd = get_bin_path(self.get_option('chroot_exe')) self.chroot_cmd = get_bin_path(self.get_option("chroot_exe"))
except ValueError as e: except ValueError as e:
raise AnsibleError(str(e)) raise AnsibleError(str(e))
@@ -136,25 +137,24 @@ class Connection(ConnectionBase):
self._connected = True self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE): def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
""" run a command on the chroot. This is only needed for implementing """run a command on the chroot. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file put_file() get_file() so that we don't have to read the whole file
into memory. into memory.
compared to exec_command() it looses some niceties like being able to compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately. return the process's exit code immediately.
""" """
executable = self.get_option('executable') executable = self.get_option("executable")
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd] local_cmd = [self.chroot_cmd, self.chroot, executable, "-c", cmd]
display.vvv(f"EXEC {local_cmd}", host=self.chroot) display.vvv(f"EXEC {local_cmd}", host=self.chroot)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p return p
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" run a command on the chroot """ """run a command on the chroot"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd) p = self._buffered_exec_command(cmd)
@@ -164,33 +164,33 @@ class Connection(ConnectionBase):
@staticmethod @staticmethod
def _prefix_login_path(remote_path): def _prefix_login_path(remote_path):
""" Make sure that we put files into a standard path """Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it. If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead. exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default. This also happens to be the former default.
Can revisit using $HOME instead if it is a problem Can revisit using $HOME instead if it is a problem
""" """
if not remote_path.startswith(os.path.sep): if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path) remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path) return os.path.normpath(remote_path)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" transfer a file from local to chroot """ """transfer a file from local to chroot"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self.chroot) display.vvv(f"PUT {in_path} TO {out_path}", host=self.chroot)
out_path = shlex_quote(self._prefix_login_path(out_path)) out_path = shlex_quote(self._prefix_login_path(out_path))
try: try:
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file: with open(to_bytes(in_path, errors="surrogate_or_strict"), "rb") as in_file:
if not os.fstat(in_file.fileno()).st_size: if not os.fstat(in_file.fileno()).st_size:
count = ' count=0' count = " count=0"
else: else:
count = '' count = ""
try: try:
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file) p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError: except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot") raise AnsibleError("chroot connection requires dd command in the chroot")
try: try:
@@ -204,17 +204,17 @@ class Connection(ConnectionBase):
raise AnsibleError(f"file or module does not exist at: {in_path}") raise AnsibleError(f"file or module does not exist at: {in_path}")
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from chroot to local """ """fetch a file from chroot to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.chroot) display.vvv(f"FETCH {in_path} TO {out_path}", host=self.chroot)
in_path = shlex_quote(self._prefix_login_path(in_path)) in_path = shlex_quote(self._prefix_login_path(in_path))
try: try:
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}') p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError: except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot") raise AnsibleError("chroot connection requires dd command in the chroot")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file: with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
try: try:
chunk = p.stdout.read(BUFSIZE) chunk = p.stdout.read(BUFSIZE)
while chunk: while chunk:
@@ -228,6 +228,6 @@ class Connection(ConnectionBase):
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}") raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
def close(self): def close(self):
""" terminate the connection; nothing to do here """ """terminate the connection; nothing to do here"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -29,6 +29,7 @@ options:
HAVE_FUNC = False HAVE_FUNC = False
try: try:
import func.overlord.client as fc import func.overlord.client as fc
HAVE_FUNC = True HAVE_FUNC = True
except ImportError: except ImportError:
pass pass
@@ -45,7 +46,7 @@ display = Display()
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Func-based connections """ """Func-based connections"""
has_pipelining = False has_pipelining = False
@@ -64,7 +65,7 @@ class Connection(ConnectionBase):
return self return self
def exec_command(self, cmd, in_data=None, sudoable=True): def exec_command(self, cmd, in_data=None, sudoable=True):
""" run a command on the remote minion """ """run a command on the remote minion"""
if in_data: if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining") raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
@@ -82,16 +83,16 @@ class Connection(ConnectionBase):
return os.path.join(prefix, normpath[1:]) return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" transfer a file from local to remote """ """transfer a file from local to remote"""
out_path = self._normalize_path(out_path, '/') out_path = self._normalize_path(out_path, "/")
display.vvv(f"PUT {in_path} TO {out_path}", host=self.host) display.vvv(f"PUT {in_path} TO {out_path}", host=self.host)
self.client.local.copyfile.send(in_path, out_path) self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from remote to local """ """fetch a file from remote to local"""
in_path = self._normalize_path(in_path, '/') in_path = self._normalize_path(in_path, "/")
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host) display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host)
# need to use a tmp dir due to difference of semantic for getfile # need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who # ( who take a # directory as destination) and fetch_file, who
@@ -102,5 +103,5 @@ class Connection(ConnectionBase):
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
def close(self): def close(self):
""" terminate the connection; nothing to do here """ """terminate the connection; nothing to do here"""
pass pass

View File

@@ -84,7 +84,7 @@ from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Incus based connections """ """Incus based connections"""
transport = "incus" transport = "incus"
has_pipelining = True has_pipelining = True
@@ -98,12 +98,13 @@ class Connection(ConnectionBase):
raise AnsibleError("incus command not found in PATH") raise AnsibleError("incus command not found in PATH")
def _connect(self): def _connect(self):
"""connect to Incus (nothing to do here) """ """connect to Incus (nothing to do here)"""
super()._connect() super()._connect()
if not self._connected: if not self._connected:
self._display.vvv(f"ESTABLISH Incus CONNECTION FOR USER: {self.get_option('remote_user')}", self._display.vvv(
host=self._instance()) f"ESTABLISH Incus CONNECTION FOR USER: {self.get_option('remote_user')}", host=self._instance()
)
self._connected = True self._connected = True
def _build_command(self, cmd) -> list[str]: def _build_command(self, cmd) -> list[str]:
@@ -111,10 +112,12 @@ class Connection(ConnectionBase):
exec_cmd: list[str] = [ exec_cmd: list[str] = [
self._incus_cmd, self._incus_cmd,
"--project", self.get_option("project"), "--project",
self.get_option("project"),
"exec", "exec",
f"{self.get_option('remote')}:{self._instance()}", f"{self.get_option('remote')}:{self._instance()}",
"--"] "--",
]
if self.get_option("remote_user") != "root": if self.get_option("remote_user") != "root":
self._display.vvv( self._display.vvv(
@@ -122,9 +125,7 @@ class Connection(ConnectionBase):
trying to run 'incus exec' with become method: {self.get_option('incus_become_method')}", trying to run 'incus exec' with become method: {self.get_option('incus_become_method')}",
host=self._instance(), host=self._instance(),
) )
exec_cmd.extend( exec_cmd.extend([self.get_option("incus_become_method"), self.get_option("remote_user"), "-c"])
[self.get_option("incus_become_method"), self.get_option("remote_user"), "-c"]
)
exec_cmd.extend([self.get_option("executable"), "-c", cmd]) exec_cmd.extend([self.get_option("executable"), "-c", cmd])
@@ -133,20 +134,19 @@ class Connection(ConnectionBase):
def _instance(self): def _instance(self):
# Return only the leading part of the FQDN as the instance name # Return only the leading part of the FQDN as the instance name
# as Incus instance names cannot be a FQDN. # as Incus instance names cannot be a FQDN.
return self.get_option('remote_addr').split(".")[0] return self.get_option("remote_addr").split(".")[0]
def exec_command(self, cmd, in_data=None, sudoable=True): def exec_command(self, cmd, in_data=None, sudoable=True):
""" execute a command on the Incus host """ """execute a command on the Incus host"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv(f"EXEC {cmd}", self._display.vvv(f"EXEC {cmd}", host=self._instance())
host=self._instance())
local_cmd = self._build_command(cmd) local_cmd = self._build_command(cmd)
self._display.vvvvv(f"EXEC {local_cmd}", host=self._instance()) self._display.vvvvv(f"EXEC {local_cmd}", host=self._instance())
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru') in_data = to_bytes(in_data, errors="surrogate_or_strict", nonstring="passthru")
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data) stdout, stderr = process.communicate(in_data)
@@ -154,32 +154,22 @@ class Connection(ConnectionBase):
stdout = to_text(stdout) stdout = to_text(stdout)
stderr = to_text(stderr) stderr = to_text(stderr)
if stderr.startswith("Error: ") and stderr.rstrip().endswith( if stderr.startswith("Error: ") and stderr.rstrip().endswith(": Instance is not running"):
": Instance is not running"
):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
f"instance not running: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})" f"instance not running: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
) )
if stderr.startswith("Error: ") and stderr.rstrip().endswith( if stderr.startswith("Error: ") and stderr.rstrip().endswith(": Instance not found"):
": Instance not found"
):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
f"instance not found: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})" f"instance not found: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
) )
if ( if stderr.startswith("Error: ") and ": User does not have permission " in stderr:
stderr.startswith("Error: ")
and ": User does not have permission " in stderr
):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})" f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
) )
if ( if stderr.startswith("Error: ") and ": User does not have entitlement " in stderr:
stderr.startswith("Error: ")
and ": User does not have entitlement " in stderr
):
raise AnsibleConnectionFailure( raise AnsibleConnectionFailure(
f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})" f"instance access denied: {self._instance()} (remote={self.get_option('remote')}, project={self.get_option('project')})"
) )
@@ -191,28 +181,23 @@ class Connection(ConnectionBase):
rc, uid_out, err = self.exec_command("/bin/id -u") rc, uid_out, err = self.exec_command("/bin/id -u")
if rc != 0: if rc != 0:
raise AnsibleError( raise AnsibleError(f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}")
f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}"
)
uid = uid_out.strip() uid = uid_out.strip()
rc, gid_out, err = self.exec_command("/bin/id -g") rc, gid_out, err = self.exec_command("/bin/id -g")
if rc != 0: if rc != 0:
raise AnsibleError( raise AnsibleError(f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}")
f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}"
)
gid = gid_out.strip() gid = gid_out.strip()
return int(uid), int(gid) return int(uid), int(gid)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" put a file from local to Incus """ """put a file from local to Incus"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
self._display.vvv(f"PUT {in_path} TO {out_path}", self._display.vvv(f"PUT {in_path} TO {out_path}", host=self._instance())
host=self._instance())
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')): if not os.path.isfile(to_bytes(in_path, errors="surrogate_or_strict")):
raise AnsibleFileNotFound(f"input path is not a file: {in_path}") raise AnsibleFileNotFound(f"input path is not a file: {in_path}")
if self.get_option("remote_user") != "root": if self.get_option("remote_user") != "root":
@@ -245,30 +230,33 @@ class Connection(ConnectionBase):
self._display.vvvvv(f"PUT {local_cmd}", host=self._instance()) self._display.vvvvv(f"PUT {local_cmd}", host=self._instance())
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
call(local_cmd) call(local_cmd)
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from Incus to local """ """fetch a file from Incus to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
self._display.vvv(f"FETCH {in_path} TO {out_path}", self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self._instance())
host=self._instance())
local_cmd = [ local_cmd = [
self._incus_cmd, self._incus_cmd,
"--project", self.get_option("project"), "--project",
"file", "pull", "--quiet", self.get_option("project"),
"file",
"pull",
"--quiet",
f"{self.get_option('remote')}:{self._instance()}/{in_path}", f"{self.get_option('remote')}:{self._instance()}/{in_path}",
out_path] out_path,
]
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
call(local_cmd) call(local_cmd)
def close(self): def close(self):
""" close the connection (nothing to do here) """ """close the connection (nothing to do here)"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -42,31 +42,33 @@ display = Display()
class Connection(Jail): class Connection(Jail):
""" Local iocage based connections """ """Local iocage based connections"""
transport = 'community.general.iocage' transport = "community.general.iocage"
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
self.ioc_jail = play_context.remote_addr self.ioc_jail = play_context.remote_addr
self.iocage_cmd = Jail._search_executable('iocage') self.iocage_cmd = Jail._search_executable("iocage")
jail_uuid = self.get_jail_uuid() jail_uuid = self.get_jail_uuid()
kwargs[Jail.modified_jailname_key] = f'ioc-{jail_uuid}' kwargs[Jail.modified_jailname_key] = f"ioc-{jail_uuid}"
display.vvv( display.vvv(
f"Jail {self.ioc_jail} has been translated to {kwargs[Jail.modified_jailname_key]}", f"Jail {self.ioc_jail} has been translated to {kwargs[Jail.modified_jailname_key]}",
host=kwargs[Jail.modified_jailname_key] host=kwargs[Jail.modified_jailname_key],
) )
super().__init__(play_context, new_stdin, *args, **kwargs) super().__init__(play_context, new_stdin, *args, **kwargs)
def get_jail_uuid(self): def get_jail_uuid(self):
p = subprocess.Popen([self.iocage_cmd, 'get', 'host_hostuuid', self.ioc_jail], p = subprocess.Popen(
stdin=subprocess.PIPE, [self.iocage_cmd, "get", "host_hostuuid", self.ioc_jail],
stdout=subprocess.PIPE, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT) stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
@@ -82,4 +84,4 @@ class Connection(Jail):
if p.returncode != 0: if p.returncode != 0:
raise AnsibleError(f"iocage returned an error: {stdout}") raise AnsibleError(f"iocage returned an error: {stdout}")
return stdout.strip('\n') return stdout.strip("\n")

View File

@@ -49,11 +49,11 @@ display = Display()
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Local BSD Jail based connections """ """Local BSD Jail based connections"""
modified_jailname_key = 'conn_jail_name' modified_jailname_key = "conn_jail_name"
transport = 'community.general.jail' transport = "community.general.jail"
# Pipelining may work. Someone needs to test by setting this to True and # Pipelining may work. Someone needs to test by setting this to True and
# having pipelining=True in their ansible.cfg # having pipelining=True in their ansible.cfg
has_pipelining = True has_pipelining = True
@@ -69,8 +69,8 @@ class Connection(ConnectionBase):
if os.geteuid() != 0: if os.geteuid() != 0:
raise AnsibleError("jail connection requires running as root") raise AnsibleError("jail connection requires running as root")
self.jls_cmd = self._search_executable('jls') self.jls_cmd = self._search_executable("jls")
self.jexec_cmd = self._search_executable('jexec') self.jexec_cmd = self._search_executable("jexec")
if self.jail not in self.list_jails(): if self.jail not in self.list_jails():
raise AnsibleError(f"incorrect jail name {self.jail}") raise AnsibleError(f"incorrect jail name {self.jail}")
@@ -83,23 +83,23 @@ class Connection(ConnectionBase):
raise AnsibleError(f"{executable} command not found in PATH") raise AnsibleError(f"{executable} command not found in PATH")
def list_jails(self): def list_jails(self):
p = subprocess.Popen([self.jls_cmd, '-q', 'name'], p = subprocess.Popen(
stdin=subprocess.PIPE, [self.jls_cmd, "-q", "name"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
stdout=subprocess.PIPE, stderr=subprocess.PIPE) )
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
return to_text(stdout, errors='surrogate_or_strict').split() return to_text(stdout, errors="surrogate_or_strict").split()
def _connect(self): def _connect(self):
""" connect to the jail; nothing to do here """ """connect to the jail; nothing to do here"""
super()._connect() super()._connect()
if not self._connected: if not self._connected:
display.vvv(f"ESTABLISH JAIL CONNECTION FOR USER: {self._play_context.remote_user}", host=self.jail) display.vvv(f"ESTABLISH JAIL CONNECTION FOR USER: {self._play_context.remote_user}", host=self.jail)
self._connected = True self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE): def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
""" run a command on the jail. This is only needed for implementing """run a command on the jail. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file put_file() get_file() so that we don't have to read the whole file
into memory. into memory.
@@ -108,24 +108,23 @@ class Connection(ConnectionBase):
""" """
local_cmd = [self.jexec_cmd] local_cmd = [self.jexec_cmd]
set_env = '' set_env = ""
if self._play_context.remote_user is not None: if self._play_context.remote_user is not None:
local_cmd += ['-U', self._play_context.remote_user] local_cmd += ["-U", self._play_context.remote_user]
# update HOME since -U does not update the jail environment # update HOME since -U does not update the jail environment
set_env = f"HOME=~{self._play_context.remote_user} " set_env = f"HOME=~{self._play_context.remote_user} "
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd] local_cmd += [self.jail, self._play_context.executable, "-c", set_env + cmd]
display.vvv(f"EXEC {local_cmd}", host=self.jail) display.vvv(f"EXEC {local_cmd}", host=self.jail)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p return p
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" run a command on the jail """ """run a command on the jail"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd) p = self._buffered_exec_command(cmd)
@@ -135,33 +134,33 @@ class Connection(ConnectionBase):
@staticmethod @staticmethod
def _prefix_login_path(remote_path): def _prefix_login_path(remote_path):
""" Make sure that we put files into a standard path """Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it. If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead. exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default. This also happens to be the former default.
Can revisit using $HOME instead if it is a problem Can revisit using $HOME instead if it is a problem
""" """
if not remote_path.startswith(os.path.sep): if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path) remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path) return os.path.normpath(remote_path)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" transfer a file from local to jail """ """transfer a file from local to jail"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self.jail) display.vvv(f"PUT {in_path} TO {out_path}", host=self.jail)
out_path = shlex_quote(self._prefix_login_path(out_path)) out_path = shlex_quote(self._prefix_login_path(out_path))
try: try:
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file: with open(to_bytes(in_path, errors="surrogate_or_strict"), "rb") as in_file:
if not os.fstat(in_file.fileno()).st_size: if not os.fstat(in_file.fileno()).st_size:
count = ' count=0' count = " count=0"
else: else:
count = '' count = ""
try: try:
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file) p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError: except OSError:
raise AnsibleError("jail connection requires dd command in the jail") raise AnsibleError("jail connection requires dd command in the jail")
try: try:
@@ -170,22 +169,24 @@ class Connection(ConnectionBase):
traceback.print_exc() traceback.print_exc()
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
if p.returncode != 0: if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}") raise AnsibleError(
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
)
except IOError: except IOError:
raise AnsibleError(f"file or module does not exist at: {in_path}") raise AnsibleError(f"file or module does not exist at: {in_path}")
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from jail to local """ """fetch a file from jail to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.jail) display.vvv(f"FETCH {in_path} TO {out_path}", host=self.jail)
in_path = shlex_quote(self._prefix_login_path(in_path)) in_path = shlex_quote(self._prefix_login_path(in_path))
try: try:
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}') p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError: except OSError:
raise AnsibleError("jail connection requires dd command in the jail") raise AnsibleError("jail connection requires dd command in the jail")
with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file: with open(to_bytes(out_path, errors="surrogate_or_strict"), "wb+") as out_file:
try: try:
chunk = p.stdout.read(BUFSIZE) chunk = p.stdout.read(BUFSIZE)
while chunk: while chunk:
@@ -196,9 +197,11 @@ class Connection(ConnectionBase):
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}") raise AnsibleError(f"failed to transfer file {in_path} to {out_path}")
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
if p.returncode != 0: if p.returncode != 0:
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}") raise AnsibleError(
f"failed to transfer file {in_path} to {out_path}:\n{to_native(stdout)}\n{to_native(stderr)}"
)
def close(self): def close(self):
""" terminate the connection; nothing to do here """ """terminate the connection; nothing to do here"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -41,6 +41,7 @@ import errno
HAS_LIBLXC = False HAS_LIBLXC = False
try: try:
import lxc as _lxc import lxc as _lxc
HAS_LIBLXC = True HAS_LIBLXC = True
except ImportError: except ImportError:
pass pass
@@ -51,11 +52,11 @@ from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Local lxc based connections """ """Local lxc based connections"""
transport = 'community.general.lxc' transport = "community.general.lxc"
has_pipelining = True has_pipelining = True
default_user = 'root' default_user = "root"
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
super().__init__(play_context, new_stdin, *args, **kwargs) super().__init__(play_context, new_stdin, *args, **kwargs)
@@ -64,14 +65,14 @@ class Connection(ConnectionBase):
self.container = None self.container = None
def _connect(self): def _connect(self):
""" connect to the lxc; nothing to do here """ """connect to the lxc; nothing to do here"""
super()._connect() super()._connect()
if not HAS_LIBLXC: if not HAS_LIBLXC:
msg = "lxc python bindings are not installed" msg = "lxc python bindings are not installed"
raise errors.AnsibleError(msg) raise errors.AnsibleError(msg)
container_name = self.get_option('remote_addr') container_name = self.get_option("remote_addr")
if self.container and self.container_name == container_name: if self.container and self.container_name == container_name:
return return
@@ -98,7 +99,7 @@ class Connection(ConnectionBase):
continue continue
raise raise
for fd in ready_writes: for fd in ready_writes:
in_data = in_data[os.write(fd, in_data):] in_data = in_data[os.write(fd, in_data) :]
if len(in_data) == 0: if len(in_data) == 0:
write_fds.remove(fd) write_fds.remove(fd)
for fd in ready_reads: for fd in ready_reads:
@@ -117,12 +118,12 @@ class Connection(ConnectionBase):
return fd return fd
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" run a command on the chroot """ """run a command on the chroot"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
# python2-lxc needs bytes. python3-lxc needs text. # python2-lxc needs bytes. python3-lxc needs text.
executable = to_native(self.get_option('executable'), errors='surrogate_or_strict') executable = to_native(self.get_option("executable"), errors="surrogate_or_strict")
local_cmd = [executable, '-c', to_native(cmd, errors='surrogate_or_strict')] local_cmd = [executable, "-c", to_native(cmd, errors="surrogate_or_strict")]
read_stdout, write_stdout = None, None read_stdout, write_stdout = None, None
read_stderr, write_stderr = None, None read_stderr, write_stderr = None, None
@@ -133,14 +134,14 @@ class Connection(ConnectionBase):
read_stderr, write_stderr = os.pipe() read_stderr, write_stderr = os.pipe()
kwargs = { kwargs = {
'stdout': self._set_nonblocking(write_stdout), "stdout": self._set_nonblocking(write_stdout),
'stderr': self._set_nonblocking(write_stderr), "stderr": self._set_nonblocking(write_stderr),
'env_policy': _lxc.LXC_ATTACH_CLEAR_ENV "env_policy": _lxc.LXC_ATTACH_CLEAR_ENV,
} }
if in_data: if in_data:
read_stdin, write_stdin = os.pipe() read_stdin, write_stdin = os.pipe()
kwargs['stdin'] = self._set_nonblocking(read_stdin) kwargs["stdin"] = self._set_nonblocking(read_stdin)
self._display.vvv(f"EXEC {local_cmd}", host=self.container_name) self._display.vvv(f"EXEC {local_cmd}", host=self.container_name)
pid = self.container.attach(_lxc.attach_run_command, local_cmd, **kwargs) pid = self.container.attach(_lxc.attach_run_command, local_cmd, **kwargs)
@@ -153,28 +154,19 @@ class Connection(ConnectionBase):
if read_stdin: if read_stdin:
read_stdin = os.close(read_stdin) read_stdin = os.close(read_stdin)
return self._communicate(pid, return self._communicate(pid, in_data, write_stdin, read_stdout, read_stderr)
in_data,
write_stdin,
read_stdout,
read_stderr)
finally: finally:
fds = [read_stdout, fds = [read_stdout, write_stdout, read_stderr, write_stderr, read_stdin, write_stdin]
write_stdout,
read_stderr,
write_stderr,
read_stdin,
write_stdin]
for fd in fds: for fd in fds:
if fd: if fd:
os.close(fd) os.close(fd)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
''' transfer a file from local to lxc ''' """transfer a file from local to lxc"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.container_name) self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.container_name)
in_path = to_bytes(in_path, errors='surrogate_or_strict') in_path = to_bytes(in_path, errors="surrogate_or_strict")
out_path = to_bytes(out_path, errors='surrogate_or_strict') out_path = to_bytes(out_path, errors="surrogate_or_strict")
if not os.path.exists(in_path): if not os.path.exists(in_path):
msg = f"file or module does not exist: {in_path}" msg = f"file or module does not exist: {in_path}"
@@ -185,9 +177,11 @@ class Connection(ConnectionBase):
traceback.print_exc() traceback.print_exc()
raise errors.AnsibleError(f"failed to open input file to {in_path}") raise errors.AnsibleError(f"failed to open input file to {in_path}")
try: try:
def write_file(args): def write_file(args):
with open(out_path, 'wb+') as dst_file: with open(out_path, "wb+") as dst_file:
shutil.copyfileobj(src_file, dst_file) shutil.copyfileobj(src_file, dst_file)
try: try:
self.container.attach_wait(write_file, None) self.container.attach_wait(write_file, None)
except IOError: except IOError:
@@ -198,11 +192,11 @@ class Connection(ConnectionBase):
src_file.close() src_file.close()
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
''' fetch a file from lxc to local ''' """fetch a file from lxc to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.container_name) self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.container_name)
in_path = to_bytes(in_path, errors='surrogate_or_strict') in_path = to_bytes(in_path, errors="surrogate_or_strict")
out_path = to_bytes(out_path, errors='surrogate_or_strict') out_path = to_bytes(out_path, errors="surrogate_or_strict")
try: try:
dst_file = open(out_path, "wb") dst_file = open(out_path, "wb")
@@ -211,14 +205,16 @@ class Connection(ConnectionBase):
msg = f"failed to open output file {out_path}" msg = f"failed to open output file {out_path}"
raise errors.AnsibleError(msg) raise errors.AnsibleError(msg)
try: try:
def write_file(args): def write_file(args):
try: try:
with open(in_path, 'rb') as src_file: with open(in_path, "rb") as src_file:
shutil.copyfileobj(src_file, dst_file) shutil.copyfileobj(src_file, dst_file)
finally: finally:
# this is needed in the lxc child process # this is needed in the lxc child process
# to flush internal python buffers # to flush internal python buffers
dst_file.close() dst_file.close()
try: try:
self.container.attach_wait(write_file, None) self.container.attach_wait(write_file, None)
except IOError: except IOError:
@@ -229,6 +225,6 @@ class Connection(ConnectionBase):
dst_file.close() dst_file.close()
def close(self): def close(self):
''' terminate the connection; nothing to do here ''' """terminate the connection; nothing to do here"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -83,9 +83,9 @@ from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" lxd based connections """ """lxd based connections"""
transport = 'community.general.lxd' transport = "community.general.lxd"
has_pipelining = True has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
@@ -97,11 +97,11 @@ class Connection(ConnectionBase):
raise AnsibleError("lxc command not found in PATH") raise AnsibleError("lxc command not found in PATH")
def _host(self): def _host(self):
""" translate remote_addr to lxd (short) hostname """ """translate remote_addr to lxd (short) hostname"""
return self.get_option("remote_addr").split(".", 1)[0] return self.get_option("remote_addr").split(".", 1)[0]
def _connect(self): def _connect(self):
"""connect to lxd (nothing to do here) """ """connect to lxd (nothing to do here)"""
super()._connect() super()._connect()
if not self._connected: if not self._connected:
@@ -124,16 +124,14 @@ class Connection(ConnectionBase):
trying to run 'lxc exec' with become method: {self.get_option('lxd_become_method')}", trying to run 'lxc exec' with become method: {self.get_option('lxd_become_method')}",
host=self._host(), host=self._host(),
) )
exec_cmd.extend( exec_cmd.extend([self.get_option("lxd_become_method"), self.get_option("remote_user"), "-c"])
[self.get_option("lxd_become_method"), self.get_option("remote_user"), "-c"]
)
exec_cmd.extend([self.get_option("executable"), "-c", cmd]) exec_cmd.extend([self.get_option("executable"), "-c", cmd])
return exec_cmd return exec_cmd
def exec_command(self, cmd, in_data=None, sudoable=True): def exec_command(self, cmd, in_data=None, sudoable=True):
""" execute a command on the lxd host """ """execute a command on the lxd host"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
self._display.vvv(f"EXEC {cmd}", host=self._host()) self._display.vvv(f"EXEC {cmd}", host=self._host())
@@ -141,8 +139,8 @@ class Connection(ConnectionBase):
local_cmd = self._build_command(cmd) local_cmd = self._build_command(cmd)
self._display.vvvvv(f"EXEC {local_cmd}", host=self._host()) self._display.vvvvv(f"EXEC {local_cmd}", host=self._host())
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru') in_data = to_bytes(in_data, errors="surrogate_or_strict", nonstring="passthru")
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data) stdout, stderr = process.communicate(in_data)
@@ -165,27 +163,23 @@ class Connection(ConnectionBase):
rc, uid_out, err = self.exec_command("/bin/id -u") rc, uid_out, err = self.exec_command("/bin/id -u")
if rc != 0: if rc != 0:
raise AnsibleError( raise AnsibleError(f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}")
f"Failed to get remote uid for user {self.get_option('remote_user')}: {err}"
)
uid = uid_out.strip() uid = uid_out.strip()
rc, gid_out, err = self.exec_command("/bin/id -g") rc, gid_out, err = self.exec_command("/bin/id -g")
if rc != 0: if rc != 0:
raise AnsibleError( raise AnsibleError(f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}")
f"Failed to get remote gid for user {self.get_option('remote_user')}: {err}"
)
gid = gid_out.strip() gid = gid_out.strip()
return int(uid), int(gid) return int(uid), int(gid)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" put a file from local to lxd """ """put a file from local to lxd"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self._host()) self._display.vvv(f"PUT {in_path} TO {out_path}", host=self._host())
if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')): if not os.path.isfile(to_bytes(in_path, errors="surrogate_or_strict")):
raise AnsibleFileNotFound(f"input path is not a file: {in_path}") raise AnsibleFileNotFound(f"input path is not a file: {in_path}")
local_cmd = [self._lxc_cmd] local_cmd = [self._lxc_cmd]
@@ -218,13 +212,13 @@ class Connection(ConnectionBase):
self._display.vvvvv(f"PUT {local_cmd}", host=self._host()) self._display.vvvvv(f"PUT {local_cmd}", host=self._host())
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
process.communicate() process.communicate()
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from lxd to local """ """fetch a file from lxd to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self._host()) self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self._host())
@@ -232,19 +226,15 @@ class Connection(ConnectionBase):
local_cmd = [self._lxc_cmd] local_cmd = [self._lxc_cmd]
if self.get_option("project"): if self.get_option("project"):
local_cmd.extend(["--project", self.get_option("project")]) local_cmd.extend(["--project", self.get_option("project")])
local_cmd.extend([ local_cmd.extend(["file", "pull", f"{self.get_option('remote')}:{self._host()}/{in_path}", out_path])
"file", "pull",
f"{self.get_option('remote')}:{self._host()}/{in_path}",
out_path
])
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
process.communicate() process.communicate()
def close(self): def close(self):
""" close the connection (nothing to do here) """ """close the connection (nothing to do here)"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -53,7 +53,7 @@ class Connection(ConnectionBase):
"""This is a connection plugin for qubes: it uses qubes-run-vm binary to interact with the containers.""" """This is a connection plugin for qubes: it uses qubes-run-vm binary to interact with the containers."""
# String used to identify this Connection class from other classes # String used to identify this Connection class from other classes
transport = 'community.general.qubes' transport = "community.general.qubes"
has_pipelining = True has_pipelining = True
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
@@ -88,16 +88,17 @@ class Connection(ConnectionBase):
local_cmd.append(shell) local_cmd.append(shell)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd] local_cmd = [to_bytes(i, errors="surrogate_or_strict") for i in local_cmd]
display.vvvv("Local cmd: ", local_cmd) display.vvvv("Local cmd: ", local_cmd)
display.vvv(f"RUN {local_cmd}", host=self._remote_vmname) display.vvv(f"RUN {local_cmd}", host=self._remote_vmname)
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE, p = subprocess.Popen(
stdout=subprocess.PIPE, stderr=subprocess.PIPE) local_cmd, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
# Here we are writing the actual command to the remote bash # Here we are writing the actual command to the remote bash
p.stdin.write(to_bytes(cmd, errors='surrogate_or_strict')) p.stdin.write(to_bytes(cmd, errors="surrogate_or_strict"))
stdout, stderr = p.communicate(input=in_data) stdout, stderr = p.communicate(input=in_data)
return p.returncode, stdout, stderr return p.returncode, stdout, stderr
@@ -108,7 +109,7 @@ class Connection(ConnectionBase):
@ensure_connect # type: ignore # TODO: for some reason, the type infos for ensure_connect suck... @ensure_connect # type: ignore # TODO: for some reason, the type infos for ensure_connect suck...
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
"""Run specified command in a running QubesVM """ """Run specified command in a running QubesVM"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
display.vvvv(f"CMD IS: {cmd}") display.vvvv(f"CMD IS: {cmd}")
@@ -119,24 +120,24 @@ class Connection(ConnectionBase):
return rc, stdout, stderr return rc, stdout, stderr
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" Place a local file located in 'in_path' inside VM at 'out_path' """ """Place a local file located in 'in_path' inside VM at 'out_path'"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self._remote_vmname) display.vvv(f"PUT {in_path} TO {out_path}", host=self._remote_vmname)
with open(in_path, "rb") as fobj: with open(in_path, "rb") as fobj:
source_data = fobj.read() source_data = fobj.read()
retcode, dummy, dummy = self._qubes(f'cat > "{out_path}\"\n', source_data, "qubes.VMRootShell") retcode, dummy, dummy = self._qubes(f'cat > "{out_path}"\n', source_data, "qubes.VMRootShell")
# if qubes.VMRootShell service not supported, fallback to qubes.VMShell and # if qubes.VMRootShell service not supported, fallback to qubes.VMShell and
# hope it will have appropriate permissions # hope it will have appropriate permissions
if retcode == 127: if retcode == 127:
retcode, dummy, dummy = self._qubes(f'cat > "{out_path}\"\n', source_data) retcode, dummy, dummy = self._qubes(f'cat > "{out_path}"\n', source_data)
if retcode != 0: if retcode != 0:
raise AnsibleConnectionFailure(f'Failed to put_file to {out_path}') raise AnsibleConnectionFailure(f"Failed to put_file to {out_path}")
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
"""Obtain file specified via 'in_path' from the container and place it at 'out_path' """ """Obtain file specified via 'in_path' from the container and place it at 'out_path'"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
display.vvv(f"FETCH {in_path} TO {out_path}", host=self._remote_vmname) display.vvv(f"FETCH {in_path} TO {out_path}", host=self._remote_vmname)
@@ -146,9 +147,9 @@ class Connection(ConnectionBase):
p = subprocess.Popen(cmd_args_list, shell=False, stdout=fobj) p = subprocess.Popen(cmd_args_list, shell=False, stdout=fobj)
p.communicate() p.communicate()
if p.returncode != 0: if p.returncode != 0:
raise AnsibleConnectionFailure(f'Failed to fetch file to {out_path}') raise AnsibleConnectionFailure(f"Failed to fetch file to {out_path}")
def close(self): def close(self):
""" Closing the connection """ """Closing the connection"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -25,18 +25,19 @@ from ansible.plugins.connection import ConnectionBase
HAVE_SALTSTACK = False HAVE_SALTSTACK = False
try: try:
import salt.client as sc import salt.client as sc
HAVE_SALTSTACK = True HAVE_SALTSTACK = True
except ImportError: except ImportError:
pass pass
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Salt-based connections """ """Salt-based connections"""
has_pipelining = False has_pipelining = False
# while the name of the product is salt, naming that module salt cause # while the name of the product is salt, naming that module salt cause
# trouble with module import # trouble with module import
transport = 'community.general.saltstack' transport = "community.general.saltstack"
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, *args, **kwargs):
super().__init__(play_context, new_stdin, *args, **kwargs) super().__init__(play_context, new_stdin, *args, **kwargs)
@@ -51,7 +52,7 @@ class Connection(ConnectionBase):
return self return self
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" run a command on the remote minion """ """run a command on the remote minion"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
if in_data: if in_data:
@@ -59,12 +60,14 @@ class Connection(ConnectionBase):
self._display.vvv(f"EXEC {cmd}", host=self.host) self._display.vvv(f"EXEC {cmd}", host=self.host)
# need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077 # need to add 'true;' to work around https://github.com/saltstack/salt/issues/28077
res = self.client.cmd(self.host, 'cmd.exec_code_all', ['bash', f"true;{cmd}"]) res = self.client.cmd(self.host, "cmd.exec_code_all", ["bash", f"true;{cmd}"])
if self.host not in res: if self.host not in res:
raise errors.AnsibleError(f"Minion {self.host} didn't answer, check if salt-minion is running and the name is correct") raise errors.AnsibleError(
f"Minion {self.host} didn't answer, check if salt-minion is running and the name is correct"
)
p = res[self.host] p = res[self.host]
return p['retcode'], p['stdout'], p['stderr'] return p["retcode"], p["stdout"], p["stderr"]
@staticmethod @staticmethod
def _normalize_path(path, prefix): def _normalize_path(path, prefix):
@@ -74,27 +77,27 @@ class Connection(ConnectionBase):
return os.path.join(prefix, normpath[1:]) return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" transfer a file from local to remote """ """transfer a file from local to remote"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
out_path = self._normalize_path(out_path, '/') out_path = self._normalize_path(out_path, "/")
self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.host) self._display.vvv(f"PUT {in_path} TO {out_path}", host=self.host)
with open(in_path, 'rb') as in_fh: with open(in_path, "rb") as in_fh:
content = in_fh.read() content = in_fh.read()
self.client.cmd(self.host, 'hashutil.base64_decodefile', [base64.b64encode(content), out_path]) self.client.cmd(self.host, "hashutil.base64_decodefile", [base64.b64encode(content), out_path])
# TODO test it # TODO test it
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from remote to local """ """fetch a file from remote to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
in_path = self._normalize_path(in_path, '/') in_path = self._normalize_path(in_path, "/")
self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host) self._display.vvv(f"FETCH {in_path} TO {out_path}", host=self.host)
content = self.client.cmd(self.host, 'cp.get_file_str', [in_path])[self.host] content = self.client.cmd(self.host, "cp.get_file_str", [in_path])[self.host]
open(out_path, 'wb').write(content) open(out_path, "wb").write(content)
def close(self): def close(self):
""" terminate the connection; nothing to do here """ """terminate the connection; nothing to do here"""
pass pass

View File

@@ -336,6 +336,7 @@ PARAMIKO_IMPORT_ERR: str | None
try: try:
import paramiko import paramiko
from paramiko import MissingHostKeyPolicy from paramiko import MissingHostKeyPolicy
PARAMIKO_IMPORT_ERR = None PARAMIKO_IMPORT_ERR = None
except ImportError: except ImportError:
PARAMIKO_IMPORT_ERR = traceback.format_exc() PARAMIKO_IMPORT_ERR = traceback.format_exc()
@@ -369,24 +370,22 @@ class MyAddPolicy(MissingHostKeyPolicy):
self._options = connection._options self._options = connection._options
def missing_host_key(self, client: paramiko.SSHClient, hostname: str, key: paramiko.PKey) -> None: def missing_host_key(self, client: paramiko.SSHClient, hostname: str, key: paramiko.PKey) -> None:
if all((self.connection.get_option("host_key_checking"), not self.connection.get_option("host_key_auto_add"))):
if all((self.connection.get_option('host_key_checking'), not self.connection.get_option('host_key_auto_add'))):
fingerprint = hexlify(key.get_fingerprint()) fingerprint = hexlify(key.get_fingerprint())
ktype = key.get_name() ktype = key.get_name()
if self.connection.get_option('use_persistent_connections') or self.connection.force_persistence: if self.connection.get_option("use_persistent_connections") or self.connection.force_persistence:
# don't print the prompt string since the user cannot respond # don't print the prompt string since the user cannot respond
# to the question anyway # to the question anyway
raise AnsibleError(authenticity_msg(hostname, ktype, fingerprint)[1:92]) raise AnsibleError(authenticity_msg(hostname, ktype, fingerprint)[1:92])
inp = to_text( inp = to_text(
display.prompt_until(authenticity_msg(hostname, ktype, fingerprint), private=False), display.prompt_until(authenticity_msg(hostname, ktype, fingerprint), private=False),
errors='surrogate_or_strict' errors="surrogate_or_strict",
) )
if inp.lower() not in ['yes', 'y', '']: if inp.lower() not in ["yes", "y", ""]:
raise AnsibleError('host connection rejected by user') raise AnsibleError("host connection rejected by user")
key._added_by_ansible_this_time = True # type: ignore key._added_by_ansible_this_time = True # type: ignore
@@ -398,88 +397,96 @@ class MyAddPolicy(MissingHostKeyPolicy):
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" SSH based connections (paramiko) to WSL """ """SSH based connections (paramiko) to WSL"""
transport = 'community.general.wsl' transport = "community.general.wsl"
_log_channel: str | None = None _log_channel: str | None = None
def __init__(self, play_context: PlayContext, new_stdin: io.TextIOWrapper | None = None, *args: t.Any, **kwargs: t.Any): def __init__(
self, play_context: PlayContext, new_stdin: io.TextIOWrapper | None = None, *args: t.Any, **kwargs: t.Any
):
super().__init__(play_context, new_stdin, *args, **kwargs) super().__init__(play_context, new_stdin, *args, **kwargs)
def _set_log_channel(self, name: str) -> None: def _set_log_channel(self, name: str) -> None:
""" Mimic paramiko.SSHClient.set_log_channel """ """Mimic paramiko.SSHClient.set_log_channel"""
self._log_channel = name self._log_channel = name
def _parse_proxy_command(self, port: int = 22) -> dict[str, t.Any]: def _parse_proxy_command(self, port: int = 22) -> dict[str, t.Any]:
proxy_command = self.get_option('proxy_command') or None proxy_command = self.get_option("proxy_command") or None
sock_kwarg = {} sock_kwarg = {}
if proxy_command: if proxy_command:
replacers: t.Dict[str, str] = { replacers: t.Dict[str, str] = {
'%h': self.get_option('remote_addr'), "%h": self.get_option("remote_addr"),
'%p': str(port), "%p": str(port),
'%r': self.get_option('remote_user') "%r": self.get_option("remote_user"),
} }
for find, replace in replacers.items(): for find, replace in replacers.items():
proxy_command = proxy_command.replace(find, replace) proxy_command = proxy_command.replace(find, replace)
try: try:
sock_kwarg = {'sock': paramiko.ProxyCommand(proxy_command)} sock_kwarg = {"sock": paramiko.ProxyCommand(proxy_command)}
display.vvv(f'CONFIGURE PROXY COMMAND FOR CONNECTION: {proxy_command}', host=self.get_option('remote_addr')) display.vvv(
f"CONFIGURE PROXY COMMAND FOR CONNECTION: {proxy_command}", host=self.get_option("remote_addr")
)
except AttributeError: except AttributeError:
display.warning('Paramiko ProxyCommand support unavailable. ' display.warning(
'Please upgrade to Paramiko 1.9.0 or newer. ' "Paramiko ProxyCommand support unavailable. "
'Not using configured ProxyCommand') "Please upgrade to Paramiko 1.9.0 or newer. "
"Not using configured ProxyCommand"
)
return sock_kwarg return sock_kwarg
def _connect(self) -> Connection: def _connect(self) -> Connection:
""" activates the connection object """ """activates the connection object"""
if PARAMIKO_IMPORT_ERR is not None: if PARAMIKO_IMPORT_ERR is not None:
raise AnsibleError(f'paramiko is not installed: {to_native(PARAMIKO_IMPORT_ERR)}') raise AnsibleError(f"paramiko is not installed: {to_native(PARAMIKO_IMPORT_ERR)}")
port = self.get_option('port') port = self.get_option("port")
display.vvv(f'ESTABLISH PARAMIKO SSH CONNECTION FOR USER: {self.get_option("remote_user")} on PORT {to_text(port)} TO {self.get_option("remote_addr")}', display.vvv(
host=self.get_option('remote_addr')) f"ESTABLISH PARAMIKO SSH CONNECTION FOR USER: {self.get_option('remote_user')} on PORT {to_text(port)} TO {self.get_option('remote_addr')}",
host=self.get_option("remote_addr"),
)
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
# Set pubkey and hostkey algorithms to disable, the only manipulation allowed currently # Set pubkey and hostkey algorithms to disable, the only manipulation allowed currently
# is keeping or omitting rsa-sha2 algorithms # is keeping or omitting rsa-sha2 algorithms
# default_keys: t.Tuple[str] = () # default_keys: t.Tuple[str] = ()
paramiko_preferred_pubkeys = getattr(paramiko.Transport, '_preferred_pubkeys', ()) paramiko_preferred_pubkeys = getattr(paramiko.Transport, "_preferred_pubkeys", ())
paramiko_preferred_hostkeys = getattr(paramiko.Transport, '_preferred_keys', ()) paramiko_preferred_hostkeys = getattr(paramiko.Transport, "_preferred_keys", ())
use_rsa_sha2_algorithms = self.get_option('use_rsa_sha2_algorithms') use_rsa_sha2_algorithms = self.get_option("use_rsa_sha2_algorithms")
disabled_algorithms: t.Dict[str, t.Iterable[str]] = {} disabled_algorithms: t.Dict[str, t.Iterable[str]] = {}
if not use_rsa_sha2_algorithms: if not use_rsa_sha2_algorithms:
if paramiko_preferred_pubkeys: if paramiko_preferred_pubkeys:
disabled_algorithms['pubkeys'] = tuple(a for a in paramiko_preferred_pubkeys if 'rsa-sha2' in a) disabled_algorithms["pubkeys"] = tuple(a for a in paramiko_preferred_pubkeys if "rsa-sha2" in a)
if paramiko_preferred_hostkeys: if paramiko_preferred_hostkeys:
disabled_algorithms['keys'] = tuple(a for a in paramiko_preferred_hostkeys if 'rsa-sha2' in a) disabled_algorithms["keys"] = tuple(a for a in paramiko_preferred_hostkeys if "rsa-sha2" in a)
# override paramiko's default logger name # override paramiko's default logger name
if self._log_channel is not None: if self._log_channel is not None:
ssh.set_log_channel(self._log_channel) ssh.set_log_channel(self._log_channel)
self.keyfile = os.path.expanduser(self.get_option('user_known_hosts_file')) self.keyfile = os.path.expanduser(self.get_option("user_known_hosts_file"))
if self.get_option('host_key_checking'): if self.get_option("host_key_checking"):
for ssh_known_hosts in ('/etc/ssh/ssh_known_hosts', '/etc/openssh/ssh_known_hosts', self.keyfile): for ssh_known_hosts in ("/etc/ssh/ssh_known_hosts", "/etc/openssh/ssh_known_hosts", self.keyfile):
try: try:
ssh.load_system_host_keys(ssh_known_hosts) ssh.load_system_host_keys(ssh_known_hosts)
break break
except IOError: except IOError:
pass # file was not found, but not required to function pass # file was not found, but not required to function
except paramiko.hostkeys.InvalidHostKey as e: except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}') raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
try: try:
ssh.load_system_host_keys() ssh.load_system_host_keys()
except paramiko.hostkeys.InvalidHostKey as e: except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}') raise AnsibleConnectionFailure(f"Invalid host key: {to_text(e.line)}")
ssh_connect_kwargs = self._parse_proxy_command(port) ssh_connect_kwargs = self._parse_proxy_command(port)
ssh.set_missing_host_key_policy(MyAddPolicy(self)) ssh.set_missing_host_key_policy(MyAddPolicy(self))
conn_password = self.get_option('password') conn_password = self.get_option("password")
allow_agent = True allow_agent = True
if conn_password is not None: if conn_password is not None:
@@ -487,42 +494,42 @@ class Connection(ConnectionBase):
try: try:
key_filename = None key_filename = None
if self.get_option('private_key_file'): if self.get_option("private_key_file"):
key_filename = os.path.expanduser(self.get_option('private_key_file')) key_filename = os.path.expanduser(self.get_option("private_key_file"))
# paramiko 2.2 introduced auth_timeout parameter # paramiko 2.2 introduced auth_timeout parameter
if LooseVersion(paramiko.__version__) >= LooseVersion('2.2.0'): if LooseVersion(paramiko.__version__) >= LooseVersion("2.2.0"):
ssh_connect_kwargs['auth_timeout'] = self.get_option('timeout') ssh_connect_kwargs["auth_timeout"] = self.get_option("timeout")
# paramiko 1.15 introduced banner timeout parameter # paramiko 1.15 introduced banner timeout parameter
if LooseVersion(paramiko.__version__) >= LooseVersion('1.15.0'): if LooseVersion(paramiko.__version__) >= LooseVersion("1.15.0"):
ssh_connect_kwargs['banner_timeout'] = self.get_option('banner_timeout') ssh_connect_kwargs["banner_timeout"] = self.get_option("banner_timeout")
ssh.connect( ssh.connect(
self.get_option('remote_addr').lower(), self.get_option("remote_addr").lower(),
username=self.get_option('remote_user'), username=self.get_option("remote_user"),
allow_agent=allow_agent, allow_agent=allow_agent,
look_for_keys=self.get_option('look_for_keys'), look_for_keys=self.get_option("look_for_keys"),
key_filename=key_filename, key_filename=key_filename,
password=conn_password, password=conn_password,
timeout=self.get_option('timeout'), timeout=self.get_option("timeout"),
port=port, port=port,
disabled_algorithms=disabled_algorithms, disabled_algorithms=disabled_algorithms,
**ssh_connect_kwargs, **ssh_connect_kwargs,
) )
except paramiko.ssh_exception.BadHostKeyException as e: except paramiko.ssh_exception.BadHostKeyException as e:
raise AnsibleConnectionFailure(f'host key mismatch for {to_text(e.hostname)}') raise AnsibleConnectionFailure(f"host key mismatch for {to_text(e.hostname)}")
except paramiko.ssh_exception.AuthenticationException as e: except paramiko.ssh_exception.AuthenticationException as e:
msg = f'Failed to authenticate: {e}' msg = f"Failed to authenticate: {e}"
raise AnsibleAuthenticationFailure(msg) raise AnsibleAuthenticationFailure(msg)
except Exception as e: except Exception as e:
msg = to_text(e) msg = to_text(e)
if 'PID check failed' in msg: if "PID check failed" in msg:
raise AnsibleError('paramiko version issue, please upgrade paramiko on the machine running ansible') raise AnsibleError("paramiko version issue, please upgrade paramiko on the machine running ansible")
elif 'Private key file is encrypted' in msg: elif "Private key file is encrypted" in msg:
msg = ( msg = (
f'ssh {self.get_option("remote_user")}@{self.get_options("remote_addr")}:{port} : ' f"ssh {self.get_option('remote_user')}@{self.get_options('remote_addr')}:{port} : "
f'{msg}\nTo connect as a different user, use -u <username>.' f"{msg}\nTo connect as a different user, use -u <username>."
) )
raise AnsibleConnectionFailure(msg) raise AnsibleConnectionFailure(msg)
else: else:
@@ -534,7 +541,7 @@ class Connection(ConnectionBase):
def _any_keys_added(self) -> bool: def _any_keys_added(self) -> bool:
for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is! for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is!
for keytype, key in keys.items(): for keytype, key in keys.items():
added_this_time = getattr(key, '_added_by_ansible_this_time', False) added_this_time = getattr(key, "_added_by_ansible_this_time", False)
if added_this_time: if added_this_time:
return True return True
return False return False
@@ -548,42 +555,42 @@ class Connection(ConnectionBase):
if not self._any_keys_added(): if not self._any_keys_added():
return return
path = os.path.expanduser('~/.ssh') path = os.path.expanduser("~/.ssh")
makedirs_safe(path) makedirs_safe(path)
with open(filename, 'w') as f: with open(filename, "w") as f:
for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is! for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is!
for keytype, key in keys.items(): for keytype, key in keys.items():
# was f.write # was f.write
added_this_time = getattr(key, '_added_by_ansible_this_time', False) added_this_time = getattr(key, "_added_by_ansible_this_time", False)
if not added_this_time: if not added_this_time:
f.write(f'{hostname} {keytype} {key.get_base64()}\n') f.write(f"{hostname} {keytype} {key.get_base64()}\n")
for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is! for hostname, keys in self.ssh._host_keys.items(): # type: ignore[attr-defined] # TODO: figure out what _host_keys is!
for keytype, key in keys.items(): for keytype, key in keys.items():
added_this_time = getattr(key, '_added_by_ansible_this_time', False) added_this_time = getattr(key, "_added_by_ansible_this_time", False)
if added_this_time: if added_this_time:
f.write(f'{hostname} {keytype} {key.get_base64()}\n') f.write(f"{hostname} {keytype} {key.get_base64()}\n")
def _build_wsl_command(self, cmd: str) -> str: def _build_wsl_command(self, cmd: str) -> str:
wsl_distribution = self.get_option('wsl_distribution') wsl_distribution = self.get_option("wsl_distribution")
become = self.get_option('become') become = self.get_option("become")
become_user = self.get_option('become_user') become_user = self.get_option("become_user")
if become and become_user: if become and become_user:
wsl_user = become_user wsl_user = become_user
else: else:
wsl_user = self.get_option('wsl_user') wsl_user = self.get_option("wsl_user")
args = ['wsl.exe', '--distribution', wsl_distribution] args = ["wsl.exe", "--distribution", wsl_distribution]
if wsl_user: if wsl_user:
args.extend(['--user', wsl_user]) args.extend(["--user", wsl_user])
args.extend(['--']) args.extend(["--"])
args.extend(shlex.split(cmd)) args.extend(shlex.split(cmd))
if os.getenv('_ANSIBLE_TEST_WSL_CONNECTION_PLUGIN_Waeri5tepheeSha2fae8'): if os.getenv("_ANSIBLE_TEST_WSL_CONNECTION_PLUGIN_Waeri5tepheeSha2fae8"):
return shlex.join(args) return shlex.join(args)
return list2cmdline(args) # see https://github.com/python/cpython/blob/3.11/Lib/subprocess.py#L576 return list2cmdline(args) # see https://github.com/python/cpython/blob/3.11/Lib/subprocess.py#L576
def exec_command(self, cmd: str, in_data: bytes | None = None, sudoable: bool = True) -> tuple[int, bytes, bytes]: def exec_command(self, cmd: str, in_data: bytes | None = None, sudoable: bool = True) -> tuple[int, bytes, bytes]:
""" run a command on inside a WSL distribution """ """run a command on inside a WSL distribution"""
cmd = self._build_wsl_command(cmd) cmd = self._build_wsl_command(cmd)
@@ -599,18 +606,18 @@ class Connection(ConnectionBase):
chan = transport.open_session() chan = transport.open_session()
except Exception as e: except Exception as e:
text_e = to_text(e) text_e = to_text(e)
msg = 'Failed to open session' msg = "Failed to open session"
if text_e: if text_e:
msg += f': {text_e}' msg += f": {text_e}"
raise AnsibleConnectionFailure(to_native(msg)) raise AnsibleConnectionFailure(to_native(msg))
display.vvv(f'EXEC {cmd}', host=self.get_option('remote_addr')) display.vvv(f"EXEC {cmd}", host=self.get_option("remote_addr"))
cmd = to_bytes(cmd, errors='surrogate_or_strict') cmd = to_bytes(cmd, errors="surrogate_or_strict")
no_prompt_out = b'' no_prompt_out = b""
no_prompt_err = b'' no_prompt_err = b""
become_output = b'' become_output = b""
try: try:
chan.exec_command(cmd) chan.exec_command(cmd)
@@ -618,14 +625,14 @@ class Connection(ConnectionBase):
password_prompt = False password_prompt = False
become_success = False become_success = False
while not (become_success or password_prompt): while not (become_success or password_prompt):
display.debug('Waiting for Privilege Escalation input') display.debug("Waiting for Privilege Escalation input")
chunk = chan.recv(bufsize) chunk = chan.recv(bufsize)
display.debug(f'chunk is: {to_text(chunk)}') display.debug(f"chunk is: {to_text(chunk)}")
if not chunk: if not chunk:
if b'unknown user' in become_output: if b"unknown user" in become_output:
n_become_user = to_native(self.become.get_option('become_user')) n_become_user = to_native(self.become.get_option("become_user"))
raise AnsibleError(f'user {n_become_user} does not exist') raise AnsibleError(f"user {n_become_user} does not exist")
else: else:
break break
# raise AnsibleError('ssh connection closed waiting for password prompt') # raise AnsibleError('ssh connection closed waiting for password prompt')
@@ -643,80 +650,78 @@ class Connection(ConnectionBase):
if password_prompt: if password_prompt:
if self.become: if self.become:
become_pass = self.become.get_option('become_pass') become_pass = self.become.get_option("become_pass")
chan.sendall(to_bytes(f"{become_pass}\n", errors='surrogate_or_strict')) chan.sendall(to_bytes(f"{become_pass}\n", errors="surrogate_or_strict"))
else: else:
raise AnsibleError('A password is required but none was supplied') raise AnsibleError("A password is required but none was supplied")
else: else:
no_prompt_out += become_output no_prompt_out += become_output
no_prompt_err += become_output no_prompt_err += become_output
if in_data: if in_data:
for i in range(0, len(in_data), bufsize): for i in range(0, len(in_data), bufsize):
chan.send(in_data[i:i + bufsize]) chan.send(in_data[i : i + bufsize])
chan.shutdown_write() chan.shutdown_write()
elif in_data == b'': elif in_data == b"":
chan.shutdown_write() chan.shutdown_write()
except socket.timeout: except socket.timeout:
raise AnsibleError(f'ssh timed out waiting for privilege escalation.\n{to_text(become_output)}') raise AnsibleError(f"ssh timed out waiting for privilege escalation.\n{to_text(become_output)}")
stdout = b''.join(chan.makefile('rb', bufsize)) stdout = b"".join(chan.makefile("rb", bufsize))
stderr = b''.join(chan.makefile_stderr('rb', bufsize)) stderr = b"".join(chan.makefile_stderr("rb", bufsize))
returncode = chan.recv_exit_status() returncode = chan.recv_exit_status()
# NB the full english error message is: # NB the full english error message is:
# 'wsl.exe' is not recognized as an internal or external command, # 'wsl.exe' is not recognized as an internal or external command,
# operable program or batch file. # operable program or batch file.
if "'wsl.exe' is not recognized" in stderr.decode('utf-8'): if "'wsl.exe' is not recognized" in stderr.decode("utf-8"):
raise AnsibleError( raise AnsibleError(f"wsl.exe not found in path of host: {to_text(self.get_option('remote_addr'))}")
f'wsl.exe not found in path of host: {to_text(self.get_option("remote_addr"))}')
return (returncode, no_prompt_out + stdout, no_prompt_out + stderr) return (returncode, no_prompt_out + stdout, no_prompt_out + stderr)
def put_file(self, in_path: str, out_path: str) -> None: def put_file(self, in_path: str, out_path: str) -> None:
""" transfer a file from local to remote """ """transfer a file from local to remote"""
display.vvv(f'PUT {in_path} TO {out_path}', host=self.get_option('remote_addr')) display.vvv(f"PUT {in_path} TO {out_path}", host=self.get_option("remote_addr"))
try: try:
with open(in_path, 'rb') as f: with open(in_path, "rb") as f:
data = f.read() data = f.read()
returncode, stdout, stderr = self.exec_command( returncode, stdout, stderr = self.exec_command(
f"{self._shell.executable} -c {self._shell.quote(f'cat > {out_path}')}", f"{self._shell.executable} -c {self._shell.quote(f'cat > {out_path}')}",
in_data=data, in_data=data,
sudoable=False) sudoable=False,
)
if returncode != 0: if returncode != 0:
if 'cat: not found' in stderr.decode('utf-8'): if "cat: not found" in stderr.decode("utf-8"):
raise AnsibleError( raise AnsibleError(
f'cat not found in path of WSL distribution: {to_text(self.get_option("wsl_distribution"))}') f"cat not found in path of WSL distribution: {to_text(self.get_option('wsl_distribution'))}"
raise AnsibleError( )
f'{to_text(stdout)}\n{to_text(stderr)}') raise AnsibleError(f"{to_text(stdout)}\n{to_text(stderr)}")
except Exception as e: except Exception as e:
raise AnsibleError( raise AnsibleError(f"error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}")
f'error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}')
def fetch_file(self, in_path: str, out_path: str) -> None: def fetch_file(self, in_path: str, out_path: str) -> None:
""" save a remote file to the specified path """ """save a remote file to the specified path"""
display.vvv(f'FETCH {in_path} TO {out_path}', host=self.get_option('remote_addr')) display.vvv(f"FETCH {in_path} TO {out_path}", host=self.get_option("remote_addr"))
try: try:
returncode, stdout, stderr = self.exec_command( returncode, stdout, stderr = self.exec_command(
f"{self._shell.executable} -c {self._shell.quote(f'cat {in_path}')}", f"{self._shell.executable} -c {self._shell.quote(f'cat {in_path}')}", sudoable=False
sudoable=False) )
if returncode != 0: if returncode != 0:
if 'cat: not found' in stderr.decode('utf-8'): if "cat: not found" in stderr.decode("utf-8"):
raise AnsibleError( raise AnsibleError(
f'cat not found in path of WSL distribution: {to_text(self.get_option("wsl_distribution"))}') f"cat not found in path of WSL distribution: {to_text(self.get_option('wsl_distribution'))}"
raise AnsibleError( )
f'{to_text(stdout)}\n{to_text(stderr)}') raise AnsibleError(f"{to_text(stdout)}\n{to_text(stderr)}")
with open(out_path, 'wb') as f: with open(out_path, "wb") as f:
f.write(stdout) f.write(stdout)
except Exception as e: except Exception as e:
raise AnsibleError( raise AnsibleError(f"error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}")
f'error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}')
def reset(self) -> None: def reset(self) -> None:
""" reset the connection """ """reset the connection"""
if not self._connected: if not self._connected:
return return
@@ -724,9 +729,9 @@ class Connection(ConnectionBase):
self._connect() self._connect()
def close(self) -> None: def close(self) -> None:
""" terminate the connection """ """terminate the connection"""
if self.get_option('host_key_checking') and self.get_option('record_host_keys') and self._any_keys_added(): if self.get_option("host_key_checking") and self.get_option("record_host_keys") and self._any_keys_added():
# add any new SSH host keys -- warning -- this could be slow # add any new SSH host keys -- warning -- this could be slow
# (This doesn't acquire the connection lock because it needs # (This doesn't acquire the connection lock because it needs
# to exclude only other known_hosts writers, not connections # to exclude only other known_hosts writers, not connections
@@ -736,7 +741,7 @@ class Connection(ConnectionBase):
makedirs_safe(dirname) makedirs_safe(dirname)
tmp_keyfile_name = None tmp_keyfile_name = None
try: try:
with FileLock().lock_file(lockfile, dirname, self.get_option('lock_file_timeout')): with FileLock().lock_file(lockfile, dirname, self.get_option("lock_file_timeout")):
# just in case any were added recently # just in case any were added recently
self.ssh.load_system_host_keys() self.ssh.load_system_host_keys()
@@ -769,14 +774,14 @@ class Connection(ConnectionBase):
os.rename(tmp_keyfile_name, self.keyfile) os.rename(tmp_keyfile_name, self.keyfile)
except LockTimeout: except LockTimeout:
raise AnsibleError( raise AnsibleError(
f'writing lock file for {self.keyfile} ran in to the timeout of {self.get_option("lock_file_timeout")}s') f"writing lock file for {self.keyfile} ran in to the timeout of {self.get_option('lock_file_timeout')}s"
)
except paramiko.hostkeys.InvalidHostKey as e: except paramiko.hostkeys.InvalidHostKey as e:
raise AnsibleConnectionFailure(f'Invalid host key: {e.line}') raise AnsibleConnectionFailure(f"Invalid host key: {e.line}")
except Exception as e: except Exception as e:
# unable to save keys, including scenario when key was invalid # unable to save keys, including scenario when key was invalid
# and caught earlier # and caught earlier
raise AnsibleError( raise AnsibleError(f"error occurred while writing SSH host keys!\n{to_text(e)}")
f'error occurred while writing SSH host keys!\n{to_text(e)}')
finally: finally:
if tmp_keyfile_name is not None: if tmp_keyfile_name is not None:
pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True) pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True)

View File

@@ -42,9 +42,9 @@ display = Display()
class Connection(ConnectionBase): class Connection(ConnectionBase):
""" Local zone based connections """ """Local zone based connections"""
transport = 'community.general.zone' transport = "community.general.zone"
has_pipelining = True has_pipelining = True
has_tty = False has_tty = False
@@ -56,8 +56,8 @@ class Connection(ConnectionBase):
if os.geteuid() != 0: if os.geteuid() != 0:
raise AnsibleError("zone connection requires running as root") raise AnsibleError("zone connection requires running as root")
self.zoneadm_cmd = to_bytes(self._search_executable('zoneadm')) self.zoneadm_cmd = to_bytes(self._search_executable("zoneadm"))
self.zlogin_cmd = to_bytes(self._search_executable('zlogin')) self.zlogin_cmd = to_bytes(self._search_executable("zlogin"))
if self.zone not in self.list_zones(): if self.zone not in self.list_zones():
raise AnsibleError(f"incorrect zone name {self.zone}") raise AnsibleError(f"incorrect zone name {self.zone}")
@@ -70,15 +70,15 @@ class Connection(ConnectionBase):
raise AnsibleError(f"{executable} command not found in PATH") raise AnsibleError(f"{executable} command not found in PATH")
def list_zones(self): def list_zones(self):
process = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'], process = subprocess.Popen(
stdin=subprocess.PIPE, [self.zoneadm_cmd, "list", "-ip"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
stdout=subprocess.PIPE, stderr=subprocess.PIPE) )
zones = [] zones = []
for line in process.stdout.readlines(): for line in process.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared # 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = line.split(':') s = line.split(":")
if s[1] != 'global': if s[1] != "global":
zones.append(s[1]) zones.append(s[1])
return zones return zones
@@ -86,23 +86,26 @@ class Connection(ConnectionBase):
def get_zone_path(self): def get_zone_path(self):
# solaris10vm# zoneadm -z cswbuild list -p # solaris10vm# zoneadm -z cswbuild list -p
# -:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared # -:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared
process = subprocess.Popen([self.zoneadm_cmd, '-z', to_bytes(self.zone), 'list', '-p'], process = subprocess.Popen(
stdin=subprocess.PIPE, [self.zoneadm_cmd, "-z", to_bytes(self.zone), "list", "-p"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# stdout, stderr = p.communicate() # stdout, stderr = p.communicate()
path = process.stdout.readlines()[0].split(':')[3] path = process.stdout.readlines()[0].split(":")[3]
return f"{path}/root" return f"{path}/root"
def _connect(self): def _connect(self):
""" connect to the zone; nothing to do here """ """connect to the zone; nothing to do here"""
super()._connect() super()._connect()
if not self._connected: if not self._connected:
display.vvv("THIS IS A LOCAL ZONE DIR", host=self.zone) display.vvv("THIS IS A LOCAL ZONE DIR", host=self.zone)
self._connected = True self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE): def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
""" run a command on the zone. This is only needed for implementing """run a command on the zone. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file put_file() get_file() so that we don't have to read the whole file
into memory. into memory.
@@ -116,13 +119,12 @@ class Connection(ConnectionBase):
local_cmd = map(to_bytes, local_cmd) local_cmd = map(to_bytes, local_cmd)
display.vvv(f"EXEC {local_cmd}", host=self.zone) display.vvv(f"EXEC {local_cmd}", host=self.zone)
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, p = subprocess.Popen(local_cmd, shell=False, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p return p
def exec_command(self, cmd, in_data=None, sudoable=False): def exec_command(self, cmd, in_data=None, sudoable=False):
""" run a command on the zone """ """run a command on the zone"""
super().exec_command(cmd, in_data=in_data, sudoable=sudoable) super().exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd) p = self._buffered_exec_command(cmd)
@@ -131,33 +133,33 @@ class Connection(ConnectionBase):
return p.returncode, stdout, stderr return p.returncode, stdout, stderr
def _prefix_login_path(self, remote_path): def _prefix_login_path(self, remote_path):
""" Make sure that we put files into a standard path """Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it. If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead. exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default. This also happens to be the former default.
Can revisit using $HOME instead if it is a problem Can revisit using $HOME instead if it is a problem
""" """
if not remote_path.startswith(os.path.sep): if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path) remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path) return os.path.normpath(remote_path)
def put_file(self, in_path, out_path): def put_file(self, in_path, out_path):
""" transfer a file from local to zone """ """transfer a file from local to zone"""
super().put_file(in_path, out_path) super().put_file(in_path, out_path)
display.vvv(f"PUT {in_path} TO {out_path}", host=self.zone) display.vvv(f"PUT {in_path} TO {out_path}", host=self.zone)
out_path = shlex_quote(self._prefix_login_path(out_path)) out_path = shlex_quote(self._prefix_login_path(out_path))
try: try:
with open(in_path, 'rb') as in_file: with open(in_path, "rb") as in_file:
if not os.fstat(in_file.fileno()).st_size: if not os.fstat(in_file.fileno()).st_size:
count = ' count=0' count = " count=0"
else: else:
count = '' count = ""
try: try:
p = self._buffered_exec_command(f'dd of={out_path} bs={BUFSIZE}{count}', stdin=in_file) p = self._buffered_exec_command(f"dd of={out_path} bs={BUFSIZE}{count}", stdin=in_file)
except OSError: except OSError:
raise AnsibleError("jail connection requires dd command in the jail") raise AnsibleError("jail connection requires dd command in the jail")
try: try:
@@ -171,17 +173,17 @@ class Connection(ConnectionBase):
raise AnsibleError(f"file or module does not exist at: {in_path}") raise AnsibleError(f"file or module does not exist at: {in_path}")
def fetch_file(self, in_path, out_path): def fetch_file(self, in_path, out_path):
""" fetch a file from zone to local """ """fetch a file from zone to local"""
super().fetch_file(in_path, out_path) super().fetch_file(in_path, out_path)
display.vvv(f"FETCH {in_path} TO {out_path}", host=self.zone) display.vvv(f"FETCH {in_path} TO {out_path}", host=self.zone)
in_path = shlex_quote(self._prefix_login_path(in_path)) in_path = shlex_quote(self._prefix_login_path(in_path))
try: try:
p = self._buffered_exec_command(f'dd if={in_path} bs={BUFSIZE}') p = self._buffered_exec_command(f"dd if={in_path} bs={BUFSIZE}")
except OSError: except OSError:
raise AnsibleError("zone connection requires dd command in the zone") raise AnsibleError("zone connection requires dd command in the zone")
with open(out_path, 'wb+') as out_file: with open(out_path, "wb+") as out_file:
try: try:
chunk = p.stdout.read(BUFSIZE) chunk = p.stdout.read(BUFSIZE)
while chunk: while chunk:
@@ -195,6 +197,6 @@ class Connection(ConnectionBase):
raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}") raise AnsibleError(f"failed to transfer file {in_path} to {out_path}:\n{stdout}\n{stderr}")
def close(self): def close(self):
""" terminate the connection; nothing to do here """ """terminate the connection; nothing to do here"""
super().close() super().close()
self._connected = False self._connected = False

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2017-present Alibaba Group Holding Limited. He Guimin <heguimin36@163.com> # Copyright (c) 2017-present Alibaba Group Holding Limited. He Guimin <heguimin36@163.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Alicloud only documentation fragment # Alicloud only documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) Ansible Project # Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: {} options: {}
@@ -27,7 +25,7 @@ attributes:
""" """
# Should be used together with the standard fragment # Should be used together with the standard fragment
INFO_MODULE = r''' INFO_MODULE = r"""
options: {} options: {}
attributes: attributes:
check_mode: check_mode:
@@ -38,7 +36,7 @@ attributes:
support: N/A support: N/A
details: details:
- This action does not modify state. - This action does not modify state.
''' """
CONN = r""" CONN = r"""
options: {} options: {}
@@ -59,7 +57,7 @@ attributes:
""" """
# Should be used together with the standard fragment and the FACTS fragment # Should be used together with the standard fragment and the FACTS fragment
FACTS_MODULE = r''' FACTS_MODULE = r"""
options: {} options: {}
attributes: attributes:
check_mode: check_mode:
@@ -72,7 +70,7 @@ attributes:
- This action does not modify state. - This action does not modify state.
facts: facts:
support: full support: full
''' """
FILES = r""" FILES = r"""
options: {} options: {}

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard files documentation fragment # Standard files documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2019, Evgeniy Krysanov <evgeniy.krysanov@gmail.com> # Copyright (c) 2019, Evgeniy Krysanov <evgeniy.krysanov@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -16,7 +16,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Dimension Data doc fragment # Dimension Data doc fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -16,7 +16,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Dimension Data ("wait-for-completion" parameters) doc fragment # Dimension Data ("wait-for-completion" parameters) doc fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, Luca Lorenzetto (@remix_tj) <lorenzetto.luca@gmail.com> # Copyright (c) 2018, Luca Lorenzetto (@remix_tj) <lorenzetto.luca@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,9 +6,8 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Documentation fragment for VNX (emc_vnx) # Documentation fragment for VNX (emc_vnx)
EMC_VNX = r''' EMC_VNX = r"""
options: options:
sp_address: sp_address:
description: description:
@@ -31,4 +29,4 @@ requirements:
- storops (0.5.10 or greater). Install using C(pip install storops). - storops (0.5.10 or greater). Install using C(pip install storops).
notes: notes:
- The modules prefixed with C(emc_vnx) are built to support the EMC VNX storage platform. - The modules prefixed with C(emc_vnx) are built to support the EMC VNX storage platform.
''' """

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard files documentation fragment # Standard files documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
requirements: requirements:

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# HPE 3PAR doc fragment # HPE 3PAR doc fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# HWC doc fragment. # HWC doc fragment.
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, IBM CORPORATION # Copyright (c) 2018, IBM CORPORATION
# Author(s): Tzur Eliyahu <tzure@il.ibm.com> # Author(s): Tzur Eliyahu <tzure@il.ibm.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -8,7 +7,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# ibm_storage documentation fragment # ibm_storage documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2017, Ansible Project # Copyright (c) 2017, Ansible Project
# Copyright (c) 2017, Abhijeet Kasurde (akasurde@redhat.com) # Copyright (c) 2017, Abhijeet Kasurde (akasurde@redhat.com)
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2017-18, Ansible Project # Copyright (c) 2017-18, Ansible Project
# Copyright (c) 2017-18, Abhijeet Kasurde (akasurde@redhat.com) # Copyright (c) 2017-18, Abhijeet Kasurde (akasurde@redhat.com)
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2017, Eike Frost <ei@kefro.st> # Copyright (c) 2017, Eike Frost <ei@kefro.st>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2016, Peter Sagerson <psagers@ignorare.net> # Copyright (c) 2016, Peter Sagerson <psagers@ignorare.net>
# Copyright (c) 2016, Jiri Tyr <jiri.tyr@gmail.com> # Copyright (c) 2016, Jiri Tyr <jiri.tyr@gmail.com>
# Copyright (c) 2017-2018 Keller Fuchs (@KellerFuchs) <kellerfuchs@hashbang.sh> # Copyright (c) 2017-2018 Keller Fuchs (@KellerFuchs) <kellerfuchs@hashbang.sh>

View File

@@ -1,4 +1,3 @@
# Copyright (C) 2017 Lenovo, Inc. # Copyright (C) 2017 Lenovo, Inc.
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause) # Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
# SPDX-License-Identifier: BSD-2-Clause # SPDX-License-Identifier: BSD-2-Clause

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2017, Daniel Korn <korndaniel1@gmail.com> # Copyright (c) 2017, Daniel Korn <korndaniel1@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard ManageIQ documentation fragment # Standard ManageIQ documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2020 FERREIRA Christophe <christophe.ferreira@cnaf.fr> # Copyright (c) 2020 FERREIRA Christophe <christophe.ferreira@cnaf.fr>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard files documentation fragment # Standard files documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2023, Ansible Project # Copyright (c) 2023, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -7,7 +7,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# OneView doc fragment # OneView doc fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, www.privaz.io Valletech AB # Copyright (c) 2018, www.privaz.io Valletech AB
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2015, Peter Sprygada <psprygada@ansible.com> # Copyright (c) 2015, Peter Sprygada <psprygada@ansible.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard files documentation fragment # Standard files documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com> # Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Florian Dambrine <android.florian@gmail.com> # Copyright (c) 2021, Florian Dambrine <android.florian@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:
pritunl_url: pritunl_url:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2025 Ansible community # Copyright (c) 2025 Ansible community
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Use together with the community.general.redfish module utils' REDFISH_COMMON_ARGUMENT_SPEC # Use together with the community.general.redfish module utils' REDFISH_COMMON_ARGUMENT_SPEC
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Andreas Botzner <andreas at botzner dot com> # Copyright (c) 2021, Andreas Botzner <andreas at botzner dot com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Phillipe Smith <phsmithcc@gmail.com> # Copyright (c) 2021, Phillipe Smith <phsmithcc@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard files documentation fragment # Standard files documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, Yanis Guenane <yanis+ansible@guenane.org> # Copyright (c) 2018, Yanis Guenane <yanis+ansible@guenane.org>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2022, Guillaume MARTINEZ <lunik@tiwabbit.fr> # Copyright (c) 2022, Guillaume MARTINEZ <lunik@tiwabbit.fr>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -7,7 +6,6 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Standard documentation fragment # Standard documentation fragment
DOCUMENTATION = r""" DOCUMENTATION = r"""
options: options:

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com> # Copyright (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -7,9 +7,8 @@ from __future__ import annotations
class ModuleDocFragment: class ModuleDocFragment:
# Documentation fragment for Vexata VX100 series # Documentation fragment for Vexata VX100 series
VX100 = r''' VX100 = r"""
options: options:
array: array:
description: description:
@@ -41,4 +40,4 @@ requirements:
- vexatapi >= 0.0.1 - vexatapi >= 0.0.1
- E(VEXATA_USER) and E(VEXATA_PASSWORD) environment variables must be set if - E(VEXATA_USER) and E(VEXATA_PASSWORD) environment variables must be set if
user and password arguments are not passed to the module directly. user and password arguments are not passed to the module directly.
''' """

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2018, Bojan Vitnik <bvitnik@mainstream.rs> # Copyright (c) 2018, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later

View File

@@ -49,14 +49,13 @@ from ansible.errors import AnsibleFilterError
def list_accumulate(sequence): def list_accumulate(sequence):
if not isinstance(sequence, Sequence): if not isinstance(sequence, Sequence):
raise AnsibleFilterError(f'Invalid value type ({type(sequence)}) for accumulate ({sequence!r})') raise AnsibleFilterError(f"Invalid value type ({type(sequence)}) for accumulate ({sequence!r})")
return accumulate(sequence) return accumulate(sequence)
class FilterModule: class FilterModule:
def filters(self): def filters(self):
return { return {
'accumulate': list_accumulate, "accumulate": list_accumulate,
} }

View File

@@ -40,9 +40,11 @@ from collections import Counter
def counter(sequence): def counter(sequence):
''' Count elements in a sequence. Returns dict with count result. ''' """Count elements in a sequence. Returns dict with count result."""
if not isinstance(sequence, Sequence): if not isinstance(sequence, Sequence):
raise AnsibleFilterError(f'Argument for community.general.counter must be a sequence (string or list). {sequence} is {type(sequence)}') raise AnsibleFilterError(
f"Argument for community.general.counter must be a sequence (string or list). {sequence} is {type(sequence)}"
)
try: try:
result = dict(Counter(sequence)) result = dict(Counter(sequence))
@@ -54,11 +56,11 @@ def counter(sequence):
class FilterModule: class FilterModule:
''' Ansible counter jinja2 filters ''' """Ansible counter jinja2 filters"""
def filters(self): def filters(self):
filters = { filters = {
'counter': counter, "counter": counter,
} }
return filters return filters

View File

@@ -9,6 +9,7 @@ from ansible.module_utils.common.collections import is_string
try: try:
from zlib import crc32 from zlib import crc32
HAS_ZLIB = True HAS_ZLIB = True
except ImportError: except ImportError:
HAS_ZLIB = False HAS_ZLIB = False
@@ -45,17 +46,17 @@ _value:
def crc32s(value): def crc32s(value):
if not is_string(value): if not is_string(value):
raise AnsibleFilterError(f'Invalid value type ({type(value)}) for crc32 ({value!r})') raise AnsibleFilterError(f"Invalid value type ({type(value)}) for crc32 ({value!r})")
if not HAS_ZLIB: if not HAS_ZLIB:
raise AnsibleFilterError('Failed to import zlib module') raise AnsibleFilterError("Failed to import zlib module")
data = to_bytes(value, errors='surrogate_or_strict') data = to_bytes(value, errors="surrogate_or_strict")
return f"{crc32(data) & 0xffffffff:x}" return f"{crc32(data) & 0xFFFFFFFF:x}"
class FilterModule: class FilterModule:
def filters(self): def filters(self):
return { return {
'crc32': crc32s, "crc32": crc32s,
} }

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Felix Fontein <felix@fontein.de> # Copyright (c) 2021, Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -61,17 +60,17 @@ _value:
def dict_filter(sequence): def dict_filter(sequence):
'''Convert a list of tuples to a dictionary. """Convert a list of tuples to a dictionary.
Example: ``[[1, 2], ['a', 'b']] | community.general.dict`` results in ``{1: 2, 'a': 'b'}`` Example: ``[[1, 2], ['a', 'b']] | community.general.dict`` results in ``{1: 2, 'a': 'b'}``
''' """
return dict(sequence) return dict(sequence)
class FilterModule: class FilterModule:
'''Ansible jinja2 filters''' """Ansible jinja2 filters"""
def filters(self): def filters(self):
return { return {
'dict': dict_filter, "dict": dict_filter,
} }

View File

@@ -38,7 +38,7 @@ _value:
def dict_kv(value, key): def dict_kv(value, key):
'''Return a dictionary with a single key-value pair """Return a dictionary with a single key-value pair
Example: Example:
@@ -89,14 +89,12 @@ def dict_kv(value, key):
} }
] ]
} }
''' """
return {key: value} return {key: value}
class FilterModule: class FilterModule:
''' Query filter ''' """Query filter"""
def filters(self): def filters(self):
return { return {"dict_kv": dict_kv}
'dict_kv': dict_kv
}

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com> # Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
# Copyright (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com> # Copyright (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -80,13 +79,16 @@ _value:
from ansible.errors import AnsibleFilterError from ansible.errors import AnsibleFilterError
from ansible_collections.community.general.plugins.module_utils.csv import (initialize_dialect, read_csv, CSVError, from ansible_collections.community.general.plugins.module_utils.csv import (
DialectNotAvailableError, initialize_dialect,
CustomDialectFailureError) read_csv,
CSVError,
DialectNotAvailableError,
CustomDialectFailureError,
)
def from_csv(data, dialect='excel', fieldnames=None, delimiter=None, skipinitialspace=None, strict=None): def from_csv(data, dialect="excel", fieldnames=None, delimiter=None, skipinitialspace=None, strict=None):
dialect_params = { dialect_params = {
"delimiter": delimiter, "delimiter": delimiter,
"skipinitialspace": skipinitialspace, "skipinitialspace": skipinitialspace,
@@ -112,8 +114,5 @@ def from_csv(data, dialect='excel', fieldnames=None, delimiter=None, skipinitial
class FilterModule: class FilterModule:
def filters(self): def filters(self):
return { return {"from_csv": from_csv}
'from_csv': from_csv
}

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2023, Steffen Scheib <steffen@scheib.me> # Copyright (c) 2023, Steffen Scheib <steffen@scheib.me>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -51,7 +50,7 @@ from ansible.errors import AnsibleFilterError
class IniParser(ConfigParser): class IniParser(ConfigParser):
''' Implements a configparser which is able to return a dict ''' """Implements a configparser which is able to return a dict"""
def __init__(self): def __init__(self):
super().__init__(interpolation=None) super().__init__(interpolation=None)
@@ -61,35 +60,32 @@ class IniParser(ConfigParser):
d = dict(self._sections) d = dict(self._sections)
for k in d: for k in d:
d[k] = dict(self._defaults, **d[k]) d[k] = dict(self._defaults, **d[k])
d[k].pop('__name__', None) d[k].pop("__name__", None)
if self._defaults: if self._defaults:
d['DEFAULT'] = dict(self._defaults) d["DEFAULT"] = dict(self._defaults)
return d return d
def from_ini(obj): def from_ini(obj):
''' Read the given string as INI file and return a dict ''' """Read the given string as INI file and return a dict"""
if not isinstance(obj, str): if not isinstance(obj, str):
raise AnsibleFilterError(f'from_ini requires a str, got {type(obj)}') raise AnsibleFilterError(f"from_ini requires a str, got {type(obj)}")
parser = IniParser() parser = IniParser()
try: try:
parser.read_file(StringIO(obj)) parser.read_file(StringIO(obj))
except Exception as ex: except Exception as ex:
raise AnsibleFilterError(f'from_ini failed to parse given string: {ex}', orig_exc=ex) raise AnsibleFilterError(f"from_ini failed to parse given string: {ex}", orig_exc=ex)
return parser.as_dict() return parser.as_dict()
class FilterModule: class FilterModule:
''' Query filter ''' """Query filter"""
def filters(self): def filters(self):
return {"from_ini": from_ini}
return {
'from_ini': from_ini
}

View File

@@ -57,33 +57,33 @@ from collections.abc import Mapping, Sequence
def groupby_as_dict(sequence, attribute): def groupby_as_dict(sequence, attribute):
''' """
Given a sequence of dictionaries and an attribute name, returns a dictionary mapping Given a sequence of dictionaries and an attribute name, returns a dictionary mapping
the value of this attribute to the dictionary. the value of this attribute to the dictionary.
If multiple dictionaries in the sequence have the same value for this attribute, If multiple dictionaries in the sequence have the same value for this attribute,
the filter will fail. the filter will fail.
''' """
if not isinstance(sequence, Sequence): if not isinstance(sequence, Sequence):
raise AnsibleFilterError('Input is not a sequence') raise AnsibleFilterError("Input is not a sequence")
result = dict() result = dict()
for list_index, element in enumerate(sequence): for list_index, element in enumerate(sequence):
if not isinstance(element, Mapping): if not isinstance(element, Mapping):
raise AnsibleFilterError(f'Sequence element #{list_index} is not a mapping') raise AnsibleFilterError(f"Sequence element #{list_index} is not a mapping")
if attribute not in element: if attribute not in element:
raise AnsibleFilterError(f'Attribute not contained in element #{list_index} of sequence') raise AnsibleFilterError(f"Attribute not contained in element #{list_index} of sequence")
result_index = element[attribute] result_index = element[attribute]
if result_index in result: if result_index in result:
raise AnsibleFilterError(f'Multiple sequence entries have attribute value {result_index!r}') raise AnsibleFilterError(f"Multiple sequence entries have attribute value {result_index!r}")
result[result_index] = element result[result_index] = element
return result return result
class FilterModule: class FilterModule:
''' Ansible list filters ''' """Ansible list filters"""
def filters(self): def filters(self):
return { return {
'groupby_as_dict': groupby_as_dict, "groupby_as_dict": groupby_as_dict,
} }

View File

@@ -1,4 +1,3 @@
# Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com> # Copyright (c) 2021, Andrew Pantuso (@ajpantuso) <ajpantuso@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later # SPDX-License-Identifier: GPL-3.0-or-later
@@ -20,6 +19,7 @@ except ImportError:
try: try:
from hashids import Hashids from hashids import Hashids
HAS_HASHIDS = True HAS_HASHIDS = True
except ImportError: except ImportError:
HAS_HASHIDS = False HAS_HASHIDS = False
@@ -35,27 +35,21 @@ def initialize_hashids(**kwargs):
return Hashids(**params) return Hashids(**params)
except TypeError as e: except TypeError as e:
raise AnsibleFilterError( raise AnsibleFilterError(
"The provided parameters %s are invalid: %s" % ( "The provided parameters %s are invalid: %s"
', '.join(["%s=%s" % (k, v) for k, v in params.items()]), % (", ".join(["%s=%s" % (k, v) for k, v in params.items()]), to_native(e))
to_native(e)
)
) )
def hashids_encode(nums, salt=None, alphabet=None, min_length=None): def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
"""Generates a YouTube-like hash from a sequence of ints """Generates a YouTube-like hash from a sequence of ints
:nums: Sequence of one or more ints to hash :nums: Sequence of one or more ints to hash
:salt: String to use as salt when hashing :salt: String to use as salt when hashing
:alphabet: String of 16 or more unique characters to produce a hash :alphabet: String of 16 or more unique characters to produce a hash
:min_length: Minimum length of hash produced :min_length: Minimum length of hash produced
""" """
hashids = initialize_hashids( hashids = initialize_hashids(salt=salt, alphabet=alphabet, min_length=min_length)
salt=salt,
alphabet=alphabet,
min_length=min_length
)
# Handles the case where a single int is not encapsulated in a list or tuple. # Handles the case where a single int is not encapsulated in a list or tuple.
# User convenience seems preferable to strict typing in this case # User convenience seems preferable to strict typing in this case
@@ -74,25 +68,20 @@ def hashids_encode(nums, salt=None, alphabet=None, min_length=None):
def hashids_decode(hashid, salt=None, alphabet=None, min_length=None): def hashids_decode(hashid, salt=None, alphabet=None, min_length=None):
"""Decodes a YouTube-like hash to a sequence of ints """Decodes a YouTube-like hash to a sequence of ints
:hashid: Hash string to decode :hashid: Hash string to decode
:salt: String to use as salt when hashing :salt: String to use as salt when hashing
:alphabet: String of 16 or more unique characters to produce a hash :alphabet: String of 16 or more unique characters to produce a hash
:min_length: Minimum length of hash produced :min_length: Minimum length of hash produced
""" """
hashids = initialize_hashids( hashids = initialize_hashids(salt=salt, alphabet=alphabet, min_length=min_length)
salt=salt,
alphabet=alphabet,
min_length=min_length
)
nums = hashids.decode(hashid) nums = hashids.decode(hashid)
return list(nums) return list(nums)
class FilterModule: class FilterModule:
def filters(self): def filters(self):
return { return {
'hashids_encode': hashids_encode, "hashids_encode": hashids_encode,
'hashids_decode': hashids_decode, "hashids_decode": hashids_decode,
} }

View File

@@ -79,6 +79,7 @@ import importlib
try: try:
import jc import jc
HAS_LIB = True HAS_LIB = True
except ImportError: except ImportError:
HAS_LIB = False HAS_LIB = False
@@ -133,26 +134,28 @@ def jc_filter(data, parser, quiet=True, raw=False):
""" """
if not HAS_LIB: if not HAS_LIB:
raise AnsibleError('You need to install "jc" as a Python library on the Ansible controller prior to running jc filter') raise AnsibleError(
'You need to install "jc" as a Python library on the Ansible controller prior to running jc filter'
)
try: try:
# new API (jc v1.18.0 and higher) allows use of plugin parsers # new API (jc v1.18.0 and higher) allows use of plugin parsers
if hasattr(jc, 'parse'): if hasattr(jc, "parse"):
return jc.parse(parser, data, quiet=quiet, raw=raw) return jc.parse(parser, data, quiet=quiet, raw=raw)
# old API (jc v1.17.7 and lower) # old API (jc v1.17.7 and lower)
else: else:
jc_parser = importlib.import_module(f'jc.parsers.{parser}') jc_parser = importlib.import_module(f"jc.parsers.{parser}")
return jc_parser.parse(data, quiet=quiet, raw=raw) return jc_parser.parse(data, quiet=quiet, raw=raw)
except Exception as e: except Exception as e:
raise AnsibleFilterError(f'Error in jc filter plugin: {e}') raise AnsibleFilterError(f"Error in jc filter plugin: {e}")
class FilterModule: class FilterModule:
''' Query filter ''' """Query filter"""
def filters(self): def filters(self):
return { return {
'jc': jc_filter, "jc": jc_filter,
} }

View File

@@ -35,39 +35,28 @@ class FilterModule:
try: try:
return loads(inp) return loads(inp)
except Exception as e: except Exception as e:
raise AnsibleFilterError( raise AnsibleFilterError(f"{filter_name}: could not decode JSON from {object_name}: {e}") from e
f"{filter_name}: could not decode JSON from {object_name}: {e}"
) from e
if not isinstance(inp, (list, dict)): if not isinstance(inp, (list, dict)):
raise AnsibleFilterError( raise AnsibleFilterError(f"{filter_name}: {object_name} is not dictionary, list or string")
f"{filter_name}: {object_name} is not dictionary, list or string"
)
return inp return inp
def check_patch_arguments(self, filter_name: str, args: dict): def check_patch_arguments(self, filter_name: str, args: dict):
if "op" not in args or not isinstance(args["op"], str): if "op" not in args or not isinstance(args["op"], str):
raise AnsibleFilterError(f"{filter_name}: 'op' argument is not a string") raise AnsibleFilterError(f"{filter_name}: 'op' argument is not a string")
if args["op"] not in OPERATIONS_AVAILABLE: if args["op"] not in OPERATIONS_AVAILABLE:
raise AnsibleFilterError( raise AnsibleFilterError(f"{filter_name}: unsupported 'op' argument: {args['op']}")
f"{filter_name}: unsupported 'op' argument: {args['op']}"
)
if "path" not in args or not isinstance(args["path"], str): if "path" not in args or not isinstance(args["path"], str):
raise AnsibleFilterError(f"{filter_name}: 'path' argument is not a string") raise AnsibleFilterError(f"{filter_name}: 'path' argument is not a string")
if args["op"] in OPERATIONS_NEEDING_FROM: if args["op"] in OPERATIONS_NEEDING_FROM:
if "from" not in args: if "from" not in args:
raise AnsibleFilterError( raise AnsibleFilterError(f"{filter_name}: 'from' argument missing for '{args['op']}' operation")
f"{filter_name}: 'from' argument missing for '{args['op']}' operation"
)
if not isinstance(args["from"], str): if not isinstance(args["from"], str):
raise AnsibleFilterError( raise AnsibleFilterError(f"{filter_name}: 'from' argument is not a string")
f"{filter_name}: 'from' argument is not a string"
)
def json_patch( def json_patch(
self, self,
@@ -77,7 +66,6 @@ class FilterModule:
value: Any = None, value: Any = None,
**kwargs: dict, **kwargs: dict,
) -> Any: ) -> Any:
if not HAS_LIB: if not HAS_LIB:
raise AnsibleFilterError( raise AnsibleFilterError(
"You need to install 'jsonpatch' package prior to running 'json_patch' filter" "You need to install 'jsonpatch' package prior to running 'json_patch' filter"
@@ -88,9 +76,7 @@ class FilterModule:
fail_test = kwargs.pop("fail_test", False) fail_test = kwargs.pop("fail_test", False)
if kwargs: if kwargs:
raise AnsibleFilterError( raise AnsibleFilterError(f"json_patch: unexpected keywords arguments: {', '.join(sorted(kwargs))}")
f"json_patch: unexpected keywords arguments: {', '.join(sorted(kwargs))}"
)
if not isinstance(fail_test, bool): if not isinstance(fail_test, bool):
raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool") raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool")
@@ -109,9 +95,7 @@ class FilterModule:
result = jsonpatch.apply_patch(inp, [args]) result = jsonpatch.apply_patch(inp, [args])
except jsonpatch.JsonPatchTestFailed as e: except jsonpatch.JsonPatchTestFailed as e:
if fail_test: if fail_test:
raise AnsibleFilterError( raise AnsibleFilterError(f"json_patch: test operation failed: {e}") from e
f"json_patch: test operation failed: {e}"
) from e
else: else:
pass pass
except Exception as e: except Exception as e:
@@ -126,16 +110,13 @@ class FilterModule:
/, /,
fail_test: bool = False, fail_test: bool = False,
) -> Any: ) -> Any:
if not HAS_LIB: if not HAS_LIB:
raise AnsibleFilterError( raise AnsibleFilterError(
"You need to install 'jsonpatch' package prior to running 'json_patch_recipe' filter" "You need to install 'jsonpatch' package prior to running 'json_patch_recipe' filter"
) from JSONPATCH_IMPORT_ERROR ) from JSONPATCH_IMPORT_ERROR
if not isinstance(operations, list): if not isinstance(operations, list):
raise AnsibleFilterError( raise AnsibleFilterError("json_patch_recipe: 'operations' needs to be a list")
"json_patch_recipe: 'operations' needs to be a list"
)
if not isinstance(fail_test, bool): if not isinstance(fail_test, bool):
raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool") raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool")
@@ -150,9 +131,7 @@ class FilterModule:
result = jsonpatch.apply_patch(inp, operations) result = jsonpatch.apply_patch(inp, operations)
except jsonpatch.JsonPatchTestFailed as e: except jsonpatch.JsonPatchTestFailed as e:
if fail_test: if fail_test:
raise AnsibleFilterError( raise AnsibleFilterError(f"json_patch_recipe: test operation failed: {e}") from e
f"json_patch_recipe: test operation failed: {e}"
) from e
else: else:
pass pass
except Exception as e: except Exception as e:
@@ -165,7 +144,6 @@ class FilterModule:
inp: Union[str, list, dict, bytes, bytearray], inp: Union[str, list, dict, bytes, bytearray],
target: Union[str, list, dict, bytes, bytearray], target: Union[str, list, dict, bytes, bytearray],
) -> list: ) -> list:
if not HAS_LIB: if not HAS_LIB:
raise AnsibleFilterError( raise AnsibleFilterError(
"You need to install 'jsonpatch' package prior to running 'json_diff' filter" "You need to install 'jsonpatch' package prior to running 'json_diff' filter"

View File

@@ -109,44 +109,46 @@ from ansible.errors import AnsibleError, AnsibleFilterError
try: try:
import jmespath import jmespath
HAS_LIB = True HAS_LIB = True
except ImportError: except ImportError:
HAS_LIB = False HAS_LIB = False
def json_query(data, expr): def json_query(data, expr):
'''Query data using jmespath query language ( http://jmespath.org ). Example: """Query data using jmespath query language ( http://jmespath.org ). Example:
- ansible.builtin.debug: msg="{{ instance | json_query(tagged_instances[*].block_device_mapping.*.volume_id') }}" - ansible.builtin.debug: msg="{{ instance | json_query(tagged_instances[*].block_device_mapping.*.volume_id') }}"
''' """
if not HAS_LIB: if not HAS_LIB:
raise AnsibleError('You need to install "jmespath" prior to running ' raise AnsibleError('You need to install "jmespath" prior to running json_query filter')
'json_query filter')
# Hack to handle Ansible Unsafe text, AnsibleMapping and AnsibleSequence # Hack to handle Ansible Unsafe text, AnsibleMapping and AnsibleSequence
# See issues https://github.com/ansible-collections/community.general/issues/320 # See issues https://github.com/ansible-collections/community.general/issues/320
# and https://github.com/ansible/ansible/issues/85600. # and https://github.com/ansible/ansible/issues/85600.
jmespath.functions.REVERSE_TYPES_MAP['string'] = jmespath.functions.REVERSE_TYPES_MAP['string'] + ( jmespath.functions.REVERSE_TYPES_MAP["string"] = jmespath.functions.REVERSE_TYPES_MAP["string"] + (
'AnsibleUnicode', 'AnsibleUnsafeText', '_AnsibleTaggedStr', "AnsibleUnicode",
"AnsibleUnsafeText",
"_AnsibleTaggedStr",
) )
jmespath.functions.REVERSE_TYPES_MAP['array'] = jmespath.functions.REVERSE_TYPES_MAP['array'] + ( jmespath.functions.REVERSE_TYPES_MAP["array"] = jmespath.functions.REVERSE_TYPES_MAP["array"] + (
'AnsibleSequence', '_AnsibleLazyTemplateList', "AnsibleSequence",
"_AnsibleLazyTemplateList",
) )
jmespath.functions.REVERSE_TYPES_MAP['object'] = jmespath.functions.REVERSE_TYPES_MAP['object'] + ( jmespath.functions.REVERSE_TYPES_MAP["object"] = jmespath.functions.REVERSE_TYPES_MAP["object"] + (
'AnsibleMapping', '_AnsibleLazyTemplateDict', "AnsibleMapping",
"_AnsibleLazyTemplateDict",
) )
try: try:
return jmespath.search(expr, data) return jmespath.search(expr, data)
except jmespath.exceptions.JMESPathError as e: except jmespath.exceptions.JMESPathError as e:
raise AnsibleFilterError(f'JMESPathError in json_query filter plugin:\n{e}') raise AnsibleFilterError(f"JMESPathError in json_query filter plugin:\n{e}")
except Exception as e: except Exception as e:
# For older jmespath, we can get ValueError and TypeError without much info. # For older jmespath, we can get ValueError and TypeError without much info.
raise AnsibleFilterError(f'Error in jmespath.search in json_query filter plugin:\n{e}') raise AnsibleFilterError(f"Error in jmespath.search in json_query filter plugin:\n{e}")
class FilterModule: class FilterModule:
''' Query filter ''' """Query filter"""
def filters(self): def filters(self):
return { return {"json_query": json_query}
'json_query': json_query
}

View File

@@ -101,10 +101,11 @@ _value:
from ansible_collections.community.general.plugins.plugin_utils.keys_filter import ( from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
_keys_filter_params, _keys_filter_params,
_keys_filter_target_str) _keys_filter_target_str,
)
def keep_keys(data, target=None, matching_parameter='equal'): def keep_keys(data, target=None, matching_parameter="equal"):
"""keep specific keys from dictionaries in a list""" """keep specific keys from dictionaries in a list"""
# test parameters # test parameters
@@ -112,16 +113,20 @@ def keep_keys(data, target=None, matching_parameter='equal'):
# test and transform target # test and transform target
tt = _keys_filter_target_str(target, matching_parameter) tt = _keys_filter_target_str(target, matching_parameter)
if matching_parameter == 'equal': if matching_parameter == "equal":
def keep_key(key): def keep_key(key):
return key in tt return key in tt
elif matching_parameter == 'starts_with': elif matching_parameter == "starts_with":
def keep_key(key): def keep_key(key):
return key.startswith(tt) return key.startswith(tt)
elif matching_parameter == 'ends_with': elif matching_parameter == "ends_with":
def keep_key(key): def keep_key(key):
return key.endswith(tt) return key.endswith(tt)
elif matching_parameter == 'regex': elif matching_parameter == "regex":
def keep_key(key): def keep_key(key):
return tt.match(key) is not None return tt.match(key) is not None
@@ -129,8 +134,7 @@ def keep_keys(data, target=None, matching_parameter='equal'):
class FilterModule: class FilterModule:
def filters(self): def filters(self):
return { return {
'keep_keys': keep_keys, "keep_keys": keep_keys,
} }

Some files were not shown because too many files have changed in this diff Show More