modules p*: use f-strings (#10974)

* modules p*: use f-strings

* add changelog frag
This commit is contained in:
Alexei Znamensky
2025-10-26 19:48:51 +13:00
committed by GitHub
parent d51e4c188b
commit 8120e9347e
42 changed files with 299 additions and 300 deletions

View File

@@ -0,0 +1,42 @@
minor_changes:
- pacemaker_cluster - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pacemaker_info - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pacemaker_resource - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pacemaker_stonith - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_device - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_ip_subnet - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_project - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_sshkey - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_volume - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- packet_volume_attachment - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pacman - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pacman_key - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pagerduty - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pagerduty_alert - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pagerduty_change - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pagerduty_user - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pam_limits - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pamd - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- parted - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pear - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pids - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pip_package_info - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pipx - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pkg5 - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pkg5_publisher - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pkgin - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pkgng - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pmem - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pnpm - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- portage - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- portinstall - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pritunl_org - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pritunl_org_info - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pritunl_user - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pritunl_user_info - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pubnub_blocks - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pulp_repo - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- puppet - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pushbullet - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- pushover - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).
- python_requirements_info - use f-strings for string templating (https://github.com/ansible-collections/community.general/pull/10974).

View File

@@ -111,7 +111,7 @@ class PacemakerCluster(StateModuleHelper):
def _process_command_output(self, fail_on_err, ignore_err_msg=""):
def process(rc, out, err):
if fail_on_err and rc != 0 and err and ignore_err_msg not in err:
self.do_raise('pcs failed with error (rc={0}): {1}'.format(rc, err))
self.do_raise(f'pcs failed with error (rc={rc}): {err}')
out = out.rstrip()
return None if out == "" else out
return process

View File

@@ -85,7 +85,7 @@ class PacemakerInfo(ModuleHelper):
def _process_command_output(self, cli_action=""):
def process(rc, out, err):
if rc != 0:
self.do_raise('pcs {0} config failed with error (rc={1}): {2}'.format(cli_action, rc, err))
self.do_raise(f'pcs {cli_action} config failed with error (rc={rc}): {err}')
out = json.loads(out)
return None if out == "" else out
return process

View File

@@ -195,7 +195,7 @@ class PacemakerResource(StateModuleHelper):
def _process_command_output(self, fail_on_err, ignore_err_msg=""):
def process(rc, out, err):
if fail_on_err and rc != 0 and err and ignore_err_msg not in err:
self.do_raise('pcs failed with error (rc={0}): {1}'.format(rc, err))
self.do_raise(f'pcs failed with error (rc={rc}): {err}')
out = out.rstrip()
return None if out == "" else out
return process

View File

@@ -162,7 +162,7 @@ class PacemakerStonith(StateModuleHelper):
def _process_command_output(self, fail_on_err, ignore_err_msg=""):
def process(rc, out, err):
if fail_on_err and rc != 0 and err and ignore_err_msg not in err:
self.do_raise('pcs failed with error (rc={0}): {1}'.format(rc, err))
self.do_raise(f'pcs failed with error (rc={rc}): {err}')
out = out.rstrip()
return None if out == "" else out
return process

View File

@@ -279,7 +279,6 @@ import uuid
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_native
HAS_PACKET_SDK = True
try:
@@ -289,7 +288,7 @@ except ImportError:
NAME_RE = r'({0}|{0}{1}*{0})'.format(r'[a-zA-Z0-9]', r'[a-zA-Z0-9\-]')
HOSTNAME_RE = r'({0}\.)*{0}$'.format(NAME_RE)
HOSTNAME_RE = rf'({NAME_RE}\.)*{NAME_RE}$'
MAX_DEVICES = 100
PACKET_DEVICE_STATES = (
@@ -407,7 +406,7 @@ def get_hostname_list(module):
if isinstance(hostnames, str):
hostnames = listify_string_name_or_id(hostnames)
if not isinstance(hostnames, list):
raise Exception("name %s is not convertible to list" % hostnames)
raise Exception(f"name {hostnames} is not convertible to list")
# at this point, hostnames is a list
hostnames = [h.strip() for h in hostnames]
@@ -428,11 +427,10 @@ def get_hostname_list(module):
for hn in hostnames:
if not is_valid_hostname(hn):
raise Exception("Hostname '%s' does not seem to be valid" % hn)
raise Exception(f"Hostname '{hn}' does not seem to be valid")
if len(hostnames) > MAX_DEVICES:
raise Exception("You specified too many hostnames, max is %d" %
MAX_DEVICES)
raise Exception(f"You specified too many hostnames, max is {MAX_DEVICES}")
return hostnames
@@ -446,11 +444,10 @@ def get_device_id_list(module):
for di in device_ids:
if not is_valid_uuid(di):
raise Exception("Device ID '%s' does not seem to be valid" % di)
raise Exception(f"Device ID '{di}' does not seem to be valid")
if len(device_ids) > MAX_DEVICES:
raise Exception("You specified too many devices, max is %d" %
MAX_DEVICES)
raise Exception(f"You specified too many devices, max is {MAX_DEVICES}")
return device_ids
@@ -458,8 +455,7 @@ def create_single_device(module, packet_conn, hostname):
for param in ('hostnames', 'operating_system', 'plan'):
if not module.params.get(param):
raise Exception("%s parameter is required for new device."
% param)
raise Exception(f"{param} parameter is required for new device.")
project_id = module.params.get('project_id')
plan = module.params.get('plan')
tags = module.params.get('tags')
@@ -472,7 +468,7 @@ def create_single_device(module, packet_conn, hostname):
if operating_system != 'custom_ipxe':
for param in ('ipxe_script_url', 'always_pxe'):
if module.params.get(param):
raise Exception('%s parameter is not valid for non custom_ipxe operating_system.' % param)
raise Exception(f'{param} parameter is not valid for non custom_ipxe operating_system.')
device = packet_conn.create_device(
project_id=project_id,
@@ -525,8 +521,7 @@ def wait_for_public_IPv(module, packet_conn, created_devices):
return refreshed
time.sleep(5)
raise Exception("Waiting for IPv%d address timed out. Hostnames: %s"
% (address_family, [d.hostname for d in created_devices]))
raise Exception(f"Waiting for IPv{address_family} address timed out. Hostnames: {[d.hostname for d in created_devices]}")
def get_existing_devices(module, packet_conn):
@@ -588,9 +583,7 @@ def act_on_devices(module, packet_conn, target_state):
changed = True
else:
_msg = (
"I don't know how to process existing device %s from state %s "
"to state %s" %
(d.hostname, d.state, target_state))
f"I don't know how to process existing device {d.hostname} from state {d.state} to state {target_state}")
raise Exception(_msg)
# At last create missing devices
@@ -649,9 +642,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable %s, "
"the auth_token parameter is required" %
PACKET_API_TOKEN_ENV_VAR)
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -663,8 +654,7 @@ def main():
try:
module.exit_json(**act_on_devices(module, packet_conn, state))
except Exception as e:
module.fail_json(msg='failed to set device state %s, error: %s' %
(state, to_native(e)), exception=traceback.format_exc())
module.fail_json(msg=f'failed to set device state {state}, error: {e}', exception=traceback.format_exc())
if __name__ == '__main__':

View File

@@ -152,7 +152,6 @@ import uuid
import re
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.common.text.converters import to_native
HAS_PACKET_SDK = True
@@ -163,7 +162,7 @@ except ImportError:
NAME_RE = r'({0}|{0}{1}*{0})'.format(r'[a-zA-Z0-9]', r'[a-zA-Z0-9\-]')
HOSTNAME_RE = r'({0}\.)*{0}$'.format(NAME_RE)
HOSTNAME_RE = rf'({NAME_RE}\.)*{NAME_RE}$'
PROJECT_MAX_DEVICES = 100
@@ -188,7 +187,7 @@ def is_valid_uuid(myuuid):
def get_existing_devices(module, packet_conn):
project_id = module.params.get('project_id')
if not is_valid_uuid(project_id):
raise Exception("Project ID {0} does not seem to be valid".format(project_id))
raise Exception(f"Project ID {project_id} does not seem to be valid")
per_page = module.params.get('device_count')
return packet_conn.list_devices(
@@ -199,12 +198,12 @@ def get_specified_device_identifiers(module):
if module.params.get('device_id'):
_d_id = module.params.get('device_id')
if not is_valid_uuid(_d_id):
raise Exception("Device ID '{0}' does not seem to be valid".format(_d_id))
raise Exception(f"Device ID '{_d_id}' does not seem to be valid")
return {'device_id': _d_id, 'hostname': None}
elif module.params.get('hostname'):
_hn = module.params.get('hostname')
if not is_valid_hostname(_hn):
raise Exception("Hostname '{0}' does not seem to be valid".format(_hn))
raise Exception(f"Hostname '{_hn}' does not seem to be valid")
return {'hostname': _hn, 'device_id': None}
else:
return {'hostname': None, 'device_id': None}
@@ -217,7 +216,7 @@ def parse_subnet_cidr(cidr):
try:
prefixlen = int(prefixlen)
except ValueError:
raise Exception("Wrong prefix length in CIDR expression {0}".format(cidr))
raise Exception(f"Wrong prefix length in CIDR expression {cidr}")
return addr, prefixlen
@@ -253,17 +252,16 @@ def act_on_assignment(target_state, module, packet_conn):
hn = specified_identifier['hostname']
matching_devices = [d for d in all_devices if d.hostname == hn]
if len(matching_devices) > 1:
raise Exception("There are more than one devices matching given hostname {0}".format(hn))
raise Exception(f"There are more than one devices matching given hostname {hn}")
if len(matching_devices) == 0:
raise Exception("There is no device matching given hostname {0}".format(hn))
raise Exception(f"There is no device matching given hostname {hn}")
device = matching_devices[0]
return_dict['device_id'] = device.id
assignment_dicts = [i for i in device.ip_addresses
if i['address'] == address and i['cidr'] == prefixlen]
if len(assignment_dicts) > 1:
raise Exception("IP address {0} is assigned more than once for device {1}".format(
specified_cidr, device.hostname))
raise Exception(f"IP address {specified_cidr} is assigned more than once for device {device.hostname}")
if target_state == "absent":
if len(assignment_dicts) == 1:
@@ -273,7 +271,7 @@ def act_on_assignment(target_state, module, packet_conn):
elif target_state == "present":
if len(assignment_dicts) == 0:
new_assignment = packet_conn.call_api(
"devices/{0}/ips".format(device.id), "POST", {"address": "{0}".format(specified_cidr)})
f"devices/{device.id}/ips", "POST", {"address": f"{specified_cidr}"})
return_dict['changed'] = True
return_dict['subnet'] = new_assignment
return return_dict
@@ -306,8 +304,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable {0}, "
"the auth_token parameter is required".format(PACKET_API_TOKEN_ENV_VAR))
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -320,7 +317,7 @@ def main():
module.exit_json(**act_on_assignment(state, module, packet_conn))
except Exception as e:
module.fail_json(
msg="failed to set IP subnet to state {0}, error: {1}".format(state, to_native(e)))
msg=f"failed to set IP subnet to state {state}, error: {e}")
if __name__ == '__main__':

View File

@@ -172,8 +172,7 @@ def act_on_project(target_state, module, packet_conn):
result_dict['id'] = matching_projects[0].id
else:
if len(matching_projects) > 1:
_msg = ("More than projects matched for module call with state = absent: "
"{0}".format(to_native(matching_projects)))
_msg = f"More than projects matched for module call with state = absent: {to_native(matching_projects)}"
module.fail_json(msg=_msg)
if len(matching_projects) == 1:
@@ -184,8 +183,7 @@ def act_on_project(target_state, module, packet_conn):
try:
p.delete()
except Exception as e:
_msg = ("while trying to remove project {0}, id {1}, got error: {2}".format(
p.name, p.id, to_native(e)))
_msg = f"while trying to remove project {p.name}, id {p.id}, got error: {e}"
module.fail_json(msg=_msg)
return result_dict
@@ -215,8 +213,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable {0}, "
"the auth_token parameter is required".format(PACKET_API_TOKEN_ENV_VAR))
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -233,9 +230,9 @@ def main():
module.exit_json(**act_on_project(state, module, packet_conn))
except Exception as e:
module.fail_json(
msg="failed to set project state {0}: {1}".format(state, to_native(e)))
msg=f"failed to set project state {state}: {e}")
else:
module.fail_json(msg="{0} is not a valid state for this module".format(state))
module.fail_json(msg=f"{state} is not a valid state for this module")
if __name__ == '__main__':

View File

@@ -138,7 +138,7 @@ def load_key_string(key_str):
if len(cut_key) == 3:
ret_dict['label'] = cut_key[2]
else:
raise Exception("Public key %s is in wrong format" % key_str)
raise Exception(f"Public key {key_str} is in wrong format")
return ret_dict
@@ -146,7 +146,7 @@ def get_sshkey_selector(module):
key_id = module.params.get('id')
if key_id:
if not is_valid_uuid(key_id):
raise Exception("sshkey ID %s is not valid UUID" % key_id)
raise Exception(f"sshkey ID {key_id} is not valid UUID")
selecting_fields = ['label', 'fingerprint', 'id', 'key']
select_dict = {}
for f in selecting_fields:
@@ -192,8 +192,7 @@ def act_on_sshkeys(target_state, module, packet_conn):
if param not in newkey:
_msg = ("If you want to ensure a key is present, you must "
"supply both a label and a key string, either in "
"module params, or in a key file. %s is missing"
% param)
f"module params, or in a key file. {param} is missing")
raise Exception(_msg)
matching_sshkeys = []
new_key_response = packet_conn.create_ssh_key(
@@ -208,9 +207,7 @@ def act_on_sshkeys(target_state, module, packet_conn):
k.delete()
changed = True
except Exception as e:
_msg = ("while trying to remove sshkey %s, id %s %s, "
"got error: %s" %
(k.label, k.id, target_state, e))
_msg = f"while trying to remove sshkey {k.label}, id {k.id} {target_state}, got error: {e}"
raise Exception(_msg)
return {
@@ -245,9 +242,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable %s, "
"the auth_token parameter is required" %
PACKET_API_TOKEN_ENV_VAR)
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -260,9 +255,9 @@ def main():
try:
module.exit_json(**act_on_sshkeys(state, module, packet_conn))
except Exception as e:
module.fail_json(msg='failed to set sshkey state: %s' % str(e))
module.fail_json(msg=f'failed to set sshkey state: {e}')
else:
module.fail_json(msg='%s is not a valid state for this module' % state)
module.fail_json(msg=f'{state} is not a valid state for this module')
if __name__ == '__main__':

View File

@@ -201,7 +201,7 @@ def get_volume_selector(module):
if module.params.get('id'):
i = module.params.get('id')
if not is_valid_uuid(i):
raise Exception("Volume ID '{0}' is not a valid UUID".format(i))
raise Exception(f"Volume ID '{i}' is not a valid UUID")
return lambda v: v['id'] == i
elif module.params.get('name'):
n = module.params.get('name')
@@ -214,7 +214,7 @@ def get_volume_selector(module):
def get_or_fail(params, key):
item = params.get(key)
if item is None:
raise Exception("{0} must be specified for new volume".format(key))
raise Exception(f"{key} must be specified for new volume")
return item
@@ -222,7 +222,7 @@ def act_on_volume(target_state, module, packet_conn):
return_dict = {'changed': False}
s = get_volume_selector(module)
project_id = module.params.get("project_id")
api_method = "projects/{0}/storage".format(project_id)
api_method = f"projects/{project_id}/storage"
all_volumes = packet_conn.call_api(api_method, "GET")['volumes']
matching_volumes = [v for v in all_volumes if s(v)]
@@ -249,13 +249,12 @@ def act_on_volume(target_state, module, packet_conn):
else:
if len(matching_volumes) > 1:
_msg = ("More than one volume matches in module call for absent state: {0}".format(
to_native(matching_volumes)))
_msg = f"More than one volume matches in module call for absent state: {to_native(matching_volumes)}"
module.fail_json(msg=_msg)
if len(matching_volumes) == 1:
volume = matching_volumes[0]
packet_conn.call_api("storage/{0}".format(volume['id']), "DELETE")
packet_conn.call_api(f"storage/{volume['id']}", "DELETE")
return_dict['changed'] = True
for k in ['id', 'name', 'description']:
return_dict[k] = volume[k]
@@ -296,8 +295,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable {0}, "
"the auth_token parameter is required".format(PACKET_API_TOKEN_ENV_VAR))
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -314,10 +312,9 @@ def main():
module.exit_json(**act_on_volume(state, module, packet_conn))
except Exception as e:
module.fail_json(
msg="failed to set volume state {0}: {1}".format(
state, to_native(e)))
msg=f"failed to set volume state {state}: {to_native(e)}")
else:
module.fail_json(msg="{0} is not a valid state for this module".format(state))
module.fail_json(msg=f"{state} is not a valid state for this module")
if __name__ == '__main__':

View File

@@ -133,7 +133,6 @@ device_id:
import uuid
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.common.text.converters import to_native
HAS_PACKET_SDK = True
@@ -172,7 +171,7 @@ def get_device_selector(spec):
def do_attach(packet_conn, vol_id, dev_id):
api_method = "storage/{0}/attachments".format(vol_id)
api_method = f"storage/{vol_id}/attachments"
packet_conn.call_api(
api_method,
params={"device_id": dev_id},
@@ -189,11 +188,10 @@ def do_detach(packet_conn, vol, dev_id=None):
def validate_selected(l, resource_type, spec):
if len(l) > 1:
_msg = ("more than one {0} matches specification {1}: {2}".format(
resource_type, spec, l))
_msg = f"more than one {resource_type} matches specification {spec}: {l}"
raise Exception(_msg)
if len(l) == 0:
_msg = "no {0} matches specification: {1}".format(resource_type, spec)
_msg = f"no {resource_type} matches specification: {spec}"
raise Exception(_msg)
@@ -211,7 +209,7 @@ def act_on_volume_attachment(target_state, module, packet_conn):
if devspec is None and target_state == 'present':
raise Exception("If you want to attach a volume, you must specify a device.")
project_id = module.params.get("project_id")
volumes_api_method = "projects/{0}/storage".format(project_id)
volumes_api_method = f"projects/{project_id}/storage"
volumes = packet_conn.call_api(volumes_api_method,
params={'include': 'facility,attachments.device'})['volumes']
v_match = get_volume_selector(volspec)
@@ -222,7 +220,7 @@ def act_on_volume_attachment(target_state, module, packet_conn):
device = None
if devspec is not None:
devices_api_method = "projects/{0}/devices".format(project_id)
devices_api_method = f"projects/{project_id}/devices"
devices = packet_conn.call_api(devices_api_method)['devices']
d_match = get_device_selector(devspec)
matching_devices = [d for d in devices if d_match(d)]
@@ -239,8 +237,7 @@ def act_on_volume_attachment(target_state, module, packet_conn):
elif device['id'] not in attached_device_ids:
# Don't reattach volume which is attached to a different device.
# Rather fail than force remove a device on state == 'present'.
raise Exception("volume {0} is already attached to device {1}".format(
volume, attached_device_ids))
raise Exception(f"volume {volume} is already attached to device {attached_device_ids}")
else:
if device is None:
if len(attached_device_ids) > 0:
@@ -273,8 +270,7 @@ def main():
module.fail_json(msg='packet required for this module')
if not module.params.get('auth_token'):
_fail_msg = ("if Packet API token is not in environment variable {0}, "
"the auth_token parameter is required".format(PACKET_API_TOKEN_ENV_VAR))
_fail_msg = f"if Packet API token is not in environment variable {PACKET_API_TOKEN_ENV_VAR}, the auth_token parameter is required"
module.fail_json(msg=_fail_msg)
auth_token = module.params.get('auth_token')
@@ -292,9 +288,9 @@ def main():
**act_on_volume_attachment(state, module, packet_conn))
except Exception as e:
module.fail_json(
msg="failed to set volume_attachment state {0}: {1}".format(state, to_native(e)))
msg=f"failed to set volume_attachment state {state}: {e}")
else:
module.fail_json(msg="{0} is not a valid state for this module".format(state))
module.fail_json(msg=f"{state} is not a valid state for this module")
if __name__ == '__main__':

View File

@@ -261,7 +261,7 @@ class Package(object):
return self.name < o.name
def __repr__(self):
return 'Package("%s", "%s", %s)' % (self.name, self.source, self.source_is_URL)
return f'Package("{self.name}", "{self.source}", {self.source_is_URL})'
VersionTuple = namedtuple("VersionTuple", ["current", "latest"])
@@ -403,13 +403,13 @@ class Pacman(object):
continue
name, version = p.split()
if name in self.inventory["installed_pkgs"]:
before.append("%s-%s-%s" % (name, self.inventory["installed_pkgs"][name], self.inventory["pkg_reasons"][name]))
before.append(f"{name}-{self.inventory['installed_pkgs'][name]}-{self.inventory['pkg_reasons'][name]}")
if name in pkgs_to_set_reason:
after.append("%s-%s-%s" % (name, version, self.m.params["reason"]))
after.append(f"{name}-{version}-{self.m.params['reason']}")
elif name in self.inventory["pkg_reasons"]:
after.append("%s-%s-%s" % (name, version, self.inventory["pkg_reasons"][name]))
after.append(f"{name}-{version}-{self.inventory['pkg_reasons'][name]}")
else:
after.append("%s-%s" % (name, version))
after.append(f"{name}-{version}")
to_be_installed.append(name)
return (to_be_installed, before, after)
@@ -437,15 +437,17 @@ class Pacman(object):
self.changed = True
_before_joined = '\n'.join(sorted(before))
_after_joined = '\n'.join(sorted(after))
self.exit_params["diff"] = {
"before": "\n".join(sorted(before)) + "\n" if before else "",
"after": "\n".join(sorted(after)) + "\n" if after else "",
"before": f"{_before_joined}\n" if before else "",
"after": f"{_after_joined}\n" if after else "",
}
changed_reason_pkgs = [p for p in pkgs_to_set_reason if p not in installed_pkgs]
if self.m.check_mode:
self.add_exit_infos("Would have installed %d packages" % (len(installed_pkgs) + len(changed_reason_pkgs)))
self.add_exit_infos(f"Would have installed {len(installed_pkgs) + len(changed_reason_pkgs)} packages")
self.exit_params["packages"] = sorted(installed_pkgs + changed_reason_pkgs)
return
@@ -478,7 +480,7 @@ class Pacman(object):
self.add_exit_infos(stdout=stdout, stderr=stderr)
self.exit_params["packages"] = sorted(installed_pkgs + changed_reason_pkgs)
self.add_exit_infos("Installed %d package(s)" % (len(installed_pkgs) + len(changed_reason_pkgs)))
self.add_exit_infos(f"Installed {len(installed_pkgs) + len(changed_reason_pkgs)} package(s)")
def remove_packages(self, pkgs):
# filter out pkgs that are already absent
@@ -509,14 +511,15 @@ class Pacman(object):
removed_pkgs = stdout.split()
self.exit_params["packages"] = removed_pkgs
_remove_pkgs_joined = '\n'.join(removed_pkgs)
self.exit_params["diff"] = {
"before": "\n".join(removed_pkgs) + "\n", # trailing \n to avoid diff complaints
"before": f"{_remove_pkgs_joined}\n", # trailing \n to avoid diff complaints
"after": "",
}
if self.m.check_mode:
self.exit_params["packages"] = removed_pkgs
self.add_exit_infos("Would have removed %d packages" % len(removed_pkgs))
self.add_exit_infos(f"Would have removed {len(removed_pkgs)} packages")
return
nosave_args = ["--nosave"] if self.m.params["remove_nosave"] else []
@@ -527,7 +530,7 @@ class Pacman(object):
self.fail("failed to remove package(s)", cmd=cmd, stdout=stdout, stderr=stderr)
self._invalidate_database()
self.exit_params["packages"] = removed_pkgs
self.add_exit_infos("Removed %d package(s)" % len(removed_pkgs), stdout=stdout, stderr=stderr)
self.add_exit_infos(f"Removed {len(removed_pkgs)} package(s)", stdout=stdout, stderr=stderr)
def upgrade(self):
"""Runs pacman --sync --sysupgrade if there are upgradable packages"""
@@ -541,14 +544,14 @@ class Pacman(object):
# Build diff based on inventory first.
diff = {"before": "", "after": ""}
for pkg, versions in self.inventory["upgradable_pkgs"].items():
diff["before"] += "%s-%s\n" % (pkg, versions.current)
diff["after"] += "%s-%s\n" % (pkg, versions.latest)
diff["before"] += f"{pkg}-{versions.current}\n"
diff["after"] += f"{pkg}-{versions.latest}\n"
self.exit_params["diff"] = diff
self.exit_params["packages"] = self.inventory["upgradable_pkgs"].keys()
if self.m.check_mode:
self.add_exit_infos(
"%d packages would have been upgraded" % (len(self.inventory["upgradable_pkgs"]))
f"{len(self.inventory['upgradable_pkgs'])} packages would have been upgraded"
)
else:
cmd = [
@@ -654,7 +657,7 @@ class Pacman(object):
continue # Don't bark for unavailable packages when trying to remove them
else:
self.fail(
msg="Failed to list package %s" % (pkg),
msg=f"Failed to list package {pkg}",
cmd=cmd,
stdout=stdout,
stderr=stderr,
@@ -764,7 +767,7 @@ class Pacman(object):
continue
s = l.split()
if len(s) != 4:
self.fail(msg="Invalid line: %s" % l)
self.fail(msg=f"Invalid line: {l}")
pkg = s[0]
current = s[1]

View File

@@ -245,7 +245,7 @@ class PacmanKey(object):
def gpg(self, args, keyring=None, **kwargs):
cmd = [self.gpg_binary]
if keyring:
cmd.append('--homedir={keyring}'.format(keyring=keyring))
cmd.append(f'--homedir={keyring}')
cmd.extend(['--no-permission-warning', '--with-colons', '--quiet', '--batch', '--no-tty'])
return self.module.run_command(cmd + args, **kwargs)
@@ -273,16 +273,16 @@ class PacmanKey(object):
"""
sanitised_keyid = keyid.strip().upper().replace(' ', '').replace('0X', '')
if len(sanitised_keyid) != self.keylength:
self.module.fail_json(msg="key ID is not full-length: %s" % sanitised_keyid)
self.module.fail_json(msg=f"key ID is not full-length: {sanitised_keyid}")
if not self.is_hexadecimal(sanitised_keyid):
self.module.fail_json(msg="key ID is not hexadecimal: %s" % sanitised_keyid)
self.module.fail_json(msg=f"key ID is not hexadecimal: {sanitised_keyid}")
return sanitised_keyid
def fetch_key(self, url):
"""Downloads a key from url"""
response, info = fetch_url(self.module, url)
if info['status'] != 200:
self.module.fail_json(msg="failed to fetch key at %s, error was %s" % (url, info['msg']))
self.module.fail_json(msg=f"failed to fetch key at {url}, error was {info['msg']}")
return to_native(response.read())
def recv_key(self, keyring, keyid, keyserver):
@@ -326,7 +326,7 @@ class PacmanKey(object):
extracted_keyid = gpg_get_first_attr_of_kind(stdout.splitlines(), 'fpr', 'user_id')
if extracted_keyid != keyid:
self.module.fail_json(msg="key ID does not match. expected %s, got %s" % (keyid, extracted_keyid))
self.module.fail_json(msg=f"key ID does not match. expected {keyid}, got {extracted_keyid}")
def key_validity(self, keyring, keyid):
"Check if the key ID is in pacman's keyring and not expired"
@@ -335,7 +335,7 @@ class PacmanKey(object):
if stderr.find("No public key") >= 0:
return []
else:
self.module.fail_json(msg="gpg returned an error: %s" % stderr)
self.module.fail_json(msg=f"gpg returned an error: {stderr}")
return gpg_get_all_attrs_of_kind(stdout.splitlines(), 'uid', 'is_fully_valid')
def key_is_trusted(self, keyring, keyid):

View File

@@ -171,7 +171,7 @@ class PagerDutyRequest(object):
response, info = http_call(self.module, url, headers=headers)
if info['status'] != 200:
self.module.fail_json(msg="failed to lookup the ongoing window: %s" % info['msg'])
self.module.fail_json(msg=f"failed to lookup the ongoing window: {info['msg']}")
json_out = self._read_response(response)
@@ -194,7 +194,7 @@ class PagerDutyRequest(object):
data = json.dumps(request_data)
response, info = http_call(self.module, url, data=data, headers=headers, method='POST')
if info['status'] != 201:
self.module.fail_json(msg="failed to create the window: %s" % info['msg'])
self.module.fail_json(msg=f"failed to create the window: {info['msg']}")
json_out = self._read_response(response)
@@ -214,19 +214,19 @@ class PagerDutyRequest(object):
return start, end
def absent(self, window_id, http_call=fetch_url):
url = "https://api.pagerduty.com/maintenance_windows/" + window_id
url = f"https://api.pagerduty.com/maintenance_windows/{window_id}"
headers = dict(self.headers)
response, info = http_call(self.module, url, headers=headers, method='DELETE')
if info['status'] != 204:
self.module.fail_json(msg="failed to delete the window: %s" % info['msg'])
self.module.fail_json(msg=f"failed to delete the window: {info['msg']}")
json_out = self._read_response(response)
return False, json_out, True
def _auth_header(self):
return "Token token=%s" % self.token
return f"Token token={self.token}"
def _read_response(self, response):
try:

View File

@@ -226,7 +226,7 @@ def check(module, name, state, service_id, integration_key, api_key, incident_ke
url = 'https://api.pagerduty.com/incidents'
headers = {
"Content-type": "application/json",
"Authorization": "Token token=%s" % api_key,
"Authorization": f"Token token={api_key}",
'Accept': 'application/vnd.pagerduty+json;version=2'
}
@@ -246,8 +246,7 @@ def check(module, name, state, service_id, integration_key, api_key, incident_ke
response, info = http_call(module, url, method='get', headers=headers)
if info['status'] != 200:
module.fail_json(msg="failed to check current incident status."
"Reason: %s" % info['msg'])
module.fail_json(msg=f"failed to check current incident status.Reason: {info['msg']}")
incidents = json.loads(response.read())["incidents"]
msg = "No corresponding incident"
@@ -281,8 +280,7 @@ def send_event_v1(module, service_key, event_type, desc,
response, info = fetch_url(module, url, method='post',
headers=headers, data=json.dumps(data))
if info['status'] != 200:
module.fail_json(msg="failed to %s. Reason: %s" %
(event_type, info['msg']))
module.fail_json(msg=f"failed to {event_type}. Reason: {info['msg']}")
json_out = json.loads(response.read())
return json_out
@@ -309,8 +307,7 @@ def send_event_v2(module, service_key, event_type, payload, link,
response, info = fetch_url(module, url, method="post",
headers=headers, data=json.dumps(data))
if info["status"] != 202:
module.fail_json(msg="failed to %s. Reason: %s" %
(event_type, info['msg']))
module.fail_json(msg=f"failed to {event_type}. Reason: {info['msg']}")
json_out = json.loads(response.read())
return json_out, True

View File

@@ -145,7 +145,7 @@ def main():
module.exit_json(changed=True)
else:
module.fail_json(
msg='Checking the PagerDuty change event API returned an unexpected response: %d' % (info['status']))
msg=f"Checking the PagerDuty change event API returned an unexpected response: {info['status']}")
custom_details = {}
@@ -192,7 +192,7 @@ def main():
module.exit_json(changed=True)
else:
module.fail_json(
msg='Creating PagerDuty change event failed with %d' % (info['status']))
msg=f"Creating PagerDuty change event failed with {info['status']}")
if __name__ == '__main__':

View File

@@ -115,18 +115,18 @@ class PagerDutyUser(object):
except PDClientError as e:
if e.response.status_code == 400:
self._module.fail_json(
msg="Failed to add %s due to invalid argument" % (pd_name))
msg=f"Failed to add {pd_name} due to invalid argument")
if e.response.status_code == 401:
self._module.fail_json(msg="Failed to add %s due to invalid API key" % (pd_name))
self._module.fail_json(msg=f"Failed to add {pd_name} due to invalid API key")
if e.response.status_code == 402:
self._module.fail_json(
msg="Failed to add %s due to inability to perform the action within the API token" % (pd_name))
msg=f"Failed to add {pd_name} due to inability to perform the action within the API token")
if e.response.status_code == 403:
self._module.fail_json(
msg="Failed to add %s due to inability to review the requested resource within the API token" % (pd_name))
msg=f"Failed to add {pd_name} due to inability to review the requested resource within the API token")
if e.response.status_code == 429:
self._module.fail_json(
msg="Failed to add %s due to reaching the limit of making requests" % (pd_name))
msg=f"Failed to add {pd_name} due to reaching the limit of making requests")
# delete a user account from PD
def delete_user(self, pd_user_id, pd_name):
@@ -137,17 +137,17 @@ class PagerDutyUser(object):
except PDClientError as e:
if e.response.status_code == 404:
self._module.fail_json(
msg="Failed to remove %s as user was not found" % (pd_name))
msg=f"Failed to remove {pd_name} as user was not found")
if e.response.status_code == 403:
self._module.fail_json(
msg="Failed to remove %s due to inability to review the requested resource within the API token" % (pd_name))
msg=f"Failed to remove {pd_name} due to inability to review the requested resource within the API token")
if e.response.status_code == 401:
# print out the list of incidents
pd_incidents = self.get_incidents_assigned_to_user(pd_user_id)
self._module.fail_json(msg="Failed to remove %s as user has assigned incidents %s" % (pd_name, pd_incidents))
self._module.fail_json(msg=f"Failed to remove {pd_name} as user has assigned incidents {pd_incidents}")
if e.response.status_code == 429:
self._module.fail_json(
msg="Failed to remove %s due to reaching the limit of making requests" % (pd_name))
msg=f"Failed to remove {pd_name} due to reaching the limit of making requests")
# get incidents assigned to a user
def get_incidents_assigned_to_user(self, pd_user_id):
@@ -169,7 +169,7 @@ class PagerDutyUser(object):
team_info = self._apisession.find('teams', team, attribute='name')
if team_info is not None:
try:
updated_team = self._apisession.rput('/teams/' + team_info['id'] + '/users/' + pd_user_id, json={
updated_team = self._apisession.rput(f"/teams/{team_info['id']}/users/{pd_user_id}", json={
'role': pd_role
})
except PDClientError:
@@ -216,7 +216,7 @@ def main():
try:
session = APISession(access_token)
except PDClientError as e:
module.fail_json(msg="Failed to authenticate with PagerDuty: %s" % e)
module.fail_json(msg=f"Failed to authenticate with PagerDuty: {e}")
user = PagerDutyUser(module, session)
@@ -227,14 +227,14 @@ def main():
# remove user
if not module.check_mode:
user.delete_user(user_exists, pd_user)
module.exit_json(changed=True, result="Successfully deleted user %s" % pd_user)
module.exit_json(changed=True, result=f"Successfully deleted user {pd_user}")
else:
module.exit_json(changed=False, result="User %s already exists." % pd_user)
module.exit_json(changed=False, result=f"User {pd_user} already exists.")
# in case that the user does not exist
else:
if state == "absent":
module.exit_json(changed=False, result="User %s was not found." % pd_user)
module.exit_json(changed=False, result=f"User {pd_user} was not found.")
else:
# add user, adds user with the default notification rule and contact info (email)
@@ -244,7 +244,7 @@ def main():
pd_user_id = user.does_user_exist(pd_email)
# add a user to the team/s
user.add_user_to_teams(pd_user_id, pd_teams, pd_role)
module.exit_json(changed=True, result="Successfully created & added user %s to team %s" % (pd_user, pd_teams))
module.exit_json(changed=True, result=f"Successfully created & added user {pd_user} to team {pd_teams}")
if __name__ == "__main__":

View File

@@ -155,10 +155,10 @@ def _assert_is_valid_value(module, item, value, prefix=''):
except ValueError:
valid = False
if not valid:
module.fail_json(msg="%s Value of %r for item %r is invalid. Value must be a number in the range -20 to 19 inclusive. "
"Refer to the limits.conf(5) manual pages for more details." % (prefix, value, item))
module.fail_json(msg=f"{prefix} Value of {value!r} for item {item!r} is invalid. Value must be a number in the range -20 to 19 inclusive. "
"Refer to the limits.conf(5) manual pages for more details.")
elif not (value in ['unlimited', 'infinity', '-1'] or value.isdigit()):
module.fail_json(msg="%s Value of %r for item %r is invalid. Value must either be 'unlimited', 'infinity' or -1, all of "
module.fail_json(msg=f"{prefix} Value of {value!r} for item {item!r} is invalid. Value must either be 'unlimited', 'infinity' or -1, all of "
"which indicate no limit, or a limit of 0 or larger. Refer to the limits.conf(5) manual pages for "
"more details." % (prefix, value, item))
@@ -201,14 +201,14 @@ def main():
if os.path.isfile(limits_conf):
if not os.access(limits_conf, os.W_OK):
module.fail_json(msg="%s is not writable. Use sudo" % limits_conf)
module.fail_json(msg=f"{limits_conf} is not writable. Use sudo")
else:
limits_conf_dir = os.path.dirname(limits_conf)
if os.path.isdir(limits_conf_dir) and os.access(limits_conf_dir, os.W_OK):
does_not_exist = True
changed = True
else:
module.fail_json(msg="directory %s is not writable (check presence, access rights, use sudo)" % limits_conf_dir)
module.fail_json(msg=f"directory {limits_conf_dir} is not writable (check presence, access rights, use sudo)")
if use_max and use_min:
module.fail_json(msg="Cannot use use_min and use_max at the same time.")
@@ -269,7 +269,7 @@ def main():
actual_value = line_fields[3]
_assert_is_valid_value(module, line_item, actual_value,
prefix="Invalid configuration found in '%s'." % limits_conf)
prefix=f"Invalid configuration found in '{limits_conf}'.")
# Found the line
if line_domain == domain and line_type == limit_type and line_item == limit_item:
@@ -307,8 +307,8 @@ def main():
if new_value != actual_value:
changed = True
if new_comment:
new_comment = "\t#" + new_comment
new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
new_comment = f"\t#{new_comment}"
new_limit = f"{domain}\t{limit_type}\t{limit_item}\t{new_value}{new_comment}\n"
message = new_limit
nf.write(new_limit)
else:
@@ -320,8 +320,8 @@ def main():
if not found:
changed = True
if new_comment:
new_comment = "\t#" + new_comment
new_limit = domain + "\t" + limit_type + "\t" + limit_item + "\t" + new_value + new_comment + "\n"
new_comment = f"\t#{new_comment}"
new_limit = f"{domain}\t{limit_type}\t{limit_item}\t{new_value}{new_comment}\n"
message = new_limit
nf.write(new_limit)

View File

@@ -259,8 +259,8 @@ class PamdLine(object):
def validate(self):
if not self.is_valid:
return False, "Rule is not valid " + self.line
return True, "Rule is valid " + self.line
return False, f"Rule is not valid {self.line}"
return True, f"Rule is valid {self.line}"
# Method to check if a rule matches the type, control and path.
def matches(self, rule_type, rule_control, rule_path, rule_args=None):
@@ -334,13 +334,13 @@ class PamdRule(PamdLine):
def __str__(self):
if self.rule_args:
return '{0: <11}{1} {2} {3}'.format(self.rule_type, self.rule_control, self.rule_path, ' '.join(self.rule_args))
return '{0: <11}{1} {2}'.format(self.rule_type, self.rule_control, self.rule_path)
return f"{self.rule_type: <11}{self.rule_control} {self.rule_path} {' '.join(self.rule_args)}"
return f'{self.rule_type: <11}{self.rule_control} {self.rule_path}'
@property
def rule_control(self):
if isinstance(self._control, list):
return '[' + ' '.join(self._control) + ']'
return f"[{' '.join(self._control)}]"
return self._control
@rule_control.setter
@@ -384,21 +384,21 @@ class PamdRule(PamdLine):
def validate(self):
# Validate the rule type
if self.rule_type not in VALID_TYPES:
return False, "Rule type, " + self.rule_type + ", is not valid in rule " + self.line
return False, f"Rule type, {self.rule_type}, is not valid in rule {self.line}"
# Validate the rule control
if isinstance(self._control, str) and self.rule_control not in PamdRule.valid_simple_controls:
return False, "Rule control, " + self.rule_control + ", is not valid in rule " + self.line
return False, f"Rule control, {self.rule_control}, is not valid in rule {self.line}"
elif isinstance(self._control, list):
for control in self._control:
value, action = control.split("=")
if value not in PamdRule.valid_control_values:
return False, "Rule control value, " + value + ", is not valid in rule " + self.line
return False, f"Rule control value, {value}, is not valid in rule {self.line}"
if action not in PamdRule.valid_control_actions and not PamdRule.is_action_unsigned_int(action):
return False, "Rule control action, " + action + ", is not valid in rule " + self.line
return False, f"Rule control action, {action}, is not valid in rule {self.line}"
# TODO: Validate path
return True, "Rule is valid " + self.line
return True, f"Rule is valid {self.line}"
# PamdService encapsulates an entire service and contains one or more rules. It seems the best way is to do this
@@ -638,7 +638,7 @@ class PamdService(object):
# Handle new key value arguments
if key_value_new_args_set.difference(key_value_current_args_set):
for key in key_value_new_args_set.difference(key_value_current_args_set):
new_args_to_add.append(key + '=' + key_value_new_args[key])
new_args_to_add.append(f"{key}={key_value_new_args[key]}")
if new_args_to_add:
current_rule.rule_args += new_args_to_add
@@ -648,8 +648,8 @@ class PamdService(object):
if key_value_new_args_set.intersection(key_value_current_args_set):
for key in key_value_new_args_set.intersection(key_value_current_args_set):
if key_value_current_args[key] != key_value_new_args[key]:
arg_index = current_rule.rule_args.index(key + '=' + key_value_current_args[key])
current_rule.rule_args[arg_index] = str(key + '=' + key_value_new_args[key])
arg_index = current_rule.rule_args.index(f"{key}={key_value_current_args[key]}")
current_rule.rule_args[arg_index] = str(f"{key}={key_value_new_args[key]}")
rule_changed = True
if rule_changed:
@@ -696,7 +696,7 @@ class PamdService(object):
lines = []
current_line = self._head
mark = "# Updated by Ansible - %s" % datetime.now().isoformat()
mark = f"# Updated by Ansible - {datetime.now().isoformat()}"
while current_line is not None:
lines.append(str(current_line))
current_line = current_line.next
@@ -710,7 +710,8 @@ class PamdService(object):
else:
lines.insert(1, mark)
return '\n'.join(lines) + '\n'
lines_joined = '\n'.join(lines)
return f"{lines_joined}\n"
def parse_module_arguments(module_arguments, return_none=False):
@@ -770,7 +771,7 @@ def main():
content = service_file_obj.read()
except IOError as e:
# If unable to read the file, fail out
module.fail_json(msg='Unable to open/read PAM module file %s with error %s.' % (fname, str(e)))
module.fail_json(msg=f'Unable to open/read PAM module file {fname} with error {e}.')
# Assuming we didn't fail, create the service
service = PamdService(content)
@@ -827,7 +828,7 @@ def main():
fd.write(str(service))
except IOError:
module.fail_json(msg='Unable to create temporary file %s' % temp_file)
module.fail_json(msg=f'Unable to create temporary file {temp_file}')
module.atomic_move(temp_file.name, os.path.realpath(fname))

View File

@@ -245,7 +245,7 @@ def parse_unit(size_str, unit=''):
matches = re.search(r'^(\d+),(\d+),(\d+)$', size_str)
if matches is None:
module.fail_json(
msg="Error interpreting parted size output: '%s'" % size_str
msg=f"Error interpreting parted size output: '{size_str}'"
)
size = {
@@ -427,13 +427,13 @@ def get_unlabeled_device_info(device, unit):
label.
"""
device_name = os.path.basename(device)
base = "/sys/block/%s" % device_name
base = f"/sys/block/{device_name}"
vendor = read_record(base + "/device/vendor", "Unknown")
model = read_record(base + "/device/model", "model")
logic_block = int(read_record(base + "/queue/logical_block_size", 0))
phys_block = int(read_record(base + "/queue/physical_block_size", 0))
size_bytes = int(read_record(base + "/size", 0)) * logic_block
vendor = read_record(f"{base}/device/vendor", "Unknown")
model = read_record(f"{base}/device/model", "model")
logic_block = int(read_record(f"{base}/queue/logical_block_size", 0))
phys_block = int(read_record(f"{base}/queue/physical_block_size", 0))
size_bytes = int(read_record(f"{base}/size", 0)) * logic_block
size, unit = format_disk_size(size_bytes, unit)
@@ -445,7 +445,7 @@ def get_unlabeled_device_info(device, unit):
'unit': unit,
'logical_block': logic_block,
'physical_block': phys_block,
'model': "%s %s" % (vendor, model),
'model': f"{vendor} {model}",
},
'partitions': []
}
@@ -469,8 +469,7 @@ def get_device_info(device, unit):
rc, out, err = module.run_command(command)
if rc != 0 and 'unrecognised disk label' not in err:
module.fail_json(msg=(
"Error while getting device information with parted "
"script: '%s'" % " ".join(command)),
f"Error while getting device information with parted script: '{' '.join(command)}'"),
rc=rc, out=out, err=err
)
@@ -570,7 +569,7 @@ def parted(script, device, align):
if rc != 0:
module.fail_json(
msg="Error while running parted script: %s" % " ".join(command).strip(),
msg=f"Error while running parted script: {' '.join(command).strip()}",
rc=rc, out=out, err=err
)
@@ -739,7 +738,7 @@ def main():
# Assign name to the partition
if name is not None and partition.get('name', None) != name:
# The double quotes need to be included in the arg passed to parted
script += ['name', str(number), '"%s"' % name]
script += ['name', str(number), f'"{name}"']
# Manage flags
if flags:

View File

@@ -190,13 +190,13 @@ def remove_packages(module, packages):
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="failed to remove %s: %s" % (package, to_text(stdout + stderr)))
module.fail_json(msg=f"failed to remove {package}: {to_text(stdout + stderr)}")
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=True, msg=f"removed {remove_c} package(s)")
module.exit_json(changed=False, msg="package(s) already absent")
@@ -213,11 +213,13 @@ def install_packages(module, state, packages, prompts):
if nb_prompts > 0 and (nb_prompts != nb_packages):
if nb_prompts > nb_packages:
diff = nb_prompts - nb_packages
msg = "%s packages to install but %s prompts to expect. %s prompts will be ignored" % (to_text(nb_packages), to_text(nb_prompts), to_text(diff))
msg = f"{to_text(nb_packages)} packages to install but {to_text(nb_prompts)} prompts to expect. {to_text(diff)} prompts will be ignored"
else:
diff = nb_packages - nb_prompts
msg = "%s packages to install but only %s prompts to expect. %s packages won't be expected to have a prompt" \
% (to_text(nb_packages), to_text(nb_prompts), to_text(diff))
msg = (
f"{to_text(nb_packages)} packages to install but only {to_text(nb_prompts)} prompts to expect. "
f"{to_text(diff)} packages won't be expected to have a prompt"
)
module.warn(msg)
# Preparing prompts answer according to item type
@@ -227,7 +229,7 @@ def install_packages(module, state, packages, prompts):
# We also expect here that the dict only has ONE key and the first key will be taken
if isinstance(_item, dict):
key = list(_item.keys())[0]
answer = _item[key] + "\n"
answer = f"{_item[key]}\n"
tmp_prompts.append((key, answer))
elif not _item:
@@ -258,12 +260,12 @@ def install_packages(module, state, packages, prompts):
cmd = [_get_pear_path(module), command, package]
rc, stdout, stderr = module.run_command(cmd, check_rc=False, prompt_regex=prompt_regex, data=data, binary_data=True)
if rc != 0:
module.fail_json(msg="failed to install %s: %s" % (package, to_text(stdout + stderr)))
module.fail_json(msg=f"failed to install {package}: {to_text(stdout + stderr)}")
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="installed %s package(s)" % (install_c))
module.exit_json(changed=True, msg=f"installed {install_c} package(s)")
module.exit_json(changed=False, msg="package(s) already installed")
@@ -279,10 +281,9 @@ def check_packages(module, packages, state):
if would_be_changed:
if state == "absent":
state = "removed"
module.exit_json(changed=True, msg="%s package(s) would be %s" % (
len(would_be_changed), state))
module.exit_json(changed=True, msg=f"{len(would_be_changed)} package(s) would be {state}")
else:
module.exit_json(change=False, msg="package(s) already %s" % state)
module.exit_json(change=False, msg=f"package(s) already {state}")
def main():

View File

@@ -123,7 +123,7 @@ class PSAdapter(object, metaclass=abc.ABCMeta):
try:
regex = re.compile(pattern, flags)
except re.error as e:
raise PSAdapterError("'%s' is not a valid regular expression: %s" % (pattern, to_native(e)))
raise PSAdapterError(f"'{pattern}' is not a valid regular expression: {e}")
return [p.pid for p in self._process_iter(*self.PATTERN_ATTRS) if self._matches_regex(p, regex)]

View File

@@ -96,7 +96,6 @@ packages:
import json
import os
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.facts.packages import CLIMgr
@@ -111,7 +110,7 @@ class PIP(CLIMgr):
def list_installed(self):
rc, out, err = self.module.run_command([self._cli, 'list', '-l', '--format=json'])
if rc != 0:
raise Exception("Unable to list packages rc=%s : %s" % (rc, err))
raise Exception(f"Unable to list packages rc={rc} : {err}")
return json.loads(out)
def get_package_details(self, package):
@@ -135,7 +134,7 @@ def main():
for pip in clients:
if not os.path.basename(pip).startswith('pip'):
module.warn('Skipping invalid pip client: %s' % (pip))
module.warn(f'Skipping invalid pip client: {pip}')
continue
try:
pip_mgr = PIP(pip, module)
@@ -143,11 +142,11 @@ def main():
found += 1
packages[pip] = pip_mgr.get_packages()
except Exception as e:
module.warn('Failed to retrieve packages with %s: %s' % (pip, to_text(e)))
module.warn(f'Failed to retrieve packages with {pip}: {e}')
continue
if found == 0:
module.fail_json(msg='Unable to use any of the supplied pip clients: %s' % clients)
module.fail_json(msg=f'Unable to use any of the supplied pip clients: {clients}')
# return info
results['packages'] = packages

View File

@@ -221,7 +221,7 @@ from ansible.module_utils.facts.compat import ansible_facts
def _make_name(name, suffix):
return name if suffix is None else "{0}{1}".format(name, suffix)
return name if suffix is None else f"{name}{suffix}"
class PipX(StateModuleHelper):
@@ -344,7 +344,7 @@ class PipX(StateModuleHelper):
def state_upgrade(self):
name = _make_name(self.vars.name, self.vars.suffix)
if not self.vars.application:
self.do_raise("Trying to upgrade a non-existent application: {0}".format(name))
self.do_raise(f"Trying to upgrade a non-existent application: {name}")
if self.vars.force:
self.changed = True
@@ -364,7 +364,7 @@ class PipX(StateModuleHelper):
def state_reinstall(self):
name = _make_name(self.vars.name, self.vars.suffix)
if not self.vars.application:
self.do_raise("Trying to reinstall a non-existent application: {0}".format(name))
self.do_raise(f"Trying to reinstall a non-existent application: {name}")
self.changed = True
with self.runner('state global name python', check_mode_skip=True) as ctx:
ctx.run(name=name)
@@ -373,7 +373,7 @@ class PipX(StateModuleHelper):
def state_inject(self):
name = _make_name(self.vars.name, self.vars.suffix)
if not self.vars.application:
self.do_raise("Trying to inject packages into a non-existent application: {0}".format(name))
self.do_raise(f"Trying to inject packages into a non-existent application: {name}")
if self.vars.force:
self.changed = True
with self.runner('state global index_url install_apps install_deps force editable pip_args name inject_packages', check_mode_skip=True) as ctx:
@@ -383,7 +383,7 @@ class PipX(StateModuleHelper):
def state_uninject(self):
name = _make_name(self.vars.name, self.vars.suffix)
if not self.vars.application:
self.do_raise("Trying to uninject packages into a non-existent application: {0}".format(name))
self.do_raise(f"Trying to uninject packages into a non-existent application: {name}")
with self.runner('state global name inject_packages', check_mode_skip=True) as ctx:
ctx.run(name=name)
self._capture_results(ctx)

View File

@@ -106,7 +106,7 @@ def main():
# Try to spot where this has happened and fix it.
for fragment in params['name']:
if re.search(r'^\d+(?:\.\d+)*', fragment) and packages and re.search(r'@[^,]*$', packages[-1]):
packages[-1] += ',' + fragment
packages[-1] += f",{fragment}"
else:
packages.append(fragment)
@@ -151,7 +151,7 @@ def ensure(module, state, packages, params):
accept_licenses = []
if params['be_name']:
beadm = ['--be-name=' + module.params['be_name']]
beadm = [f"--be-name={module.params['be_name']}"]
else:
beadm = []

View File

@@ -117,10 +117,10 @@ def set_publisher(module, params):
if params['origin'] is not None:
args.append('--remove-origin=*')
args.extend(['--add-origin=' + u for u in params['origin']])
args.extend([f"--add-origin={u}" for u in params['origin']])
if params['mirror'] is not None:
args.append('--remove-mirror=*')
args.extend(['--add-mirror=' + u for u in params['mirror']])
args.extend([f"--add-mirror={u}" for u in params['mirror']])
if params['sticky'] is not None and params['sticky']:
args.append('--sticky')

View File

@@ -151,7 +151,7 @@ def query_package(module, name):
# Use "pkgin search" to find the package. The regular expression will
# only match on the complete name.
rc, out, err = module.run_command([PKGIN_PATH] + pflag + ["search", "^%s$" % name])
rc, out, err = module.run_command([PKGIN_PATH] + pflag + ["search", f"^{name}$"])
# rc will not be 0 unless the search was a success
if rc == 0:
@@ -214,14 +214,14 @@ def format_action_message(module, action, count):
"count": count}
if module.check_mode:
message = "would have %(actioned)s %(count)d package" % vars
message = f"would have {vars['actioned']} {vars['count']} package"
else:
message = "%(actioned)s %(count)d package" % vars
message = f"{vars['actioned']} {vars['count']} package"
if count == 1:
return message
else:
return message + "s"
return f"{message}s"
def format_pkgin_command(module, command, package=None):
@@ -258,7 +258,7 @@ def remove_packages(module, packages):
format_pkgin_command(module, "remove", package))
if not module.check_mode and query_package(module, package) in [PackageState.PRESENT, PackageState.OUTDATED]:
module.fail_json(msg="failed to remove %s: %s" % (package, out), stdout=out, stderr=err)
module.fail_json(msg=f"failed to remove {package}: {out}", stdout=out, stderr=err)
remove_c += 1
@@ -277,13 +277,13 @@ def install_packages(module, packages):
if query_result in [PackageState.PRESENT, PackageState.OUTDATED]:
continue
elif query_result is PackageState.NOT_FOUND:
module.fail_json(msg="failed to find package %s for installation" % package)
module.fail_json(msg=f"failed to find package {package} for installation")
rc, out, err = module.run_command(
format_pkgin_command(module, "install", package))
if not module.check_mode and not query_package(module, package) in [PackageState.PRESENT, PackageState.OUTDATED]:
module.fail_json(msg="failed to install %s: %s" % (package, out), stdout=out, stderr=err)
module.fail_json(msg=f"failed to install {package}: {out}", stdout=out, stderr=err)
install_c += 1
@@ -319,7 +319,7 @@ def do_upgrade_packages(module, full=False):
if re.search('^(.*\n|)nothing to do.\n$', out):
module.exit_json(changed=False, msg="nothing left to upgrade")
else:
module.fail_json(msg="could not %s packages" % cmd, stdout=out, stderr=err)
module.fail_json(msg=f"could not {cmd} packages", stdout=out, stderr=err)
def upgrade_packages(module):

View File

@@ -196,7 +196,7 @@ def upgrade_packages(module, run_pkgng):
upgraded_c += int(match)
if upgraded_c > 0:
return (True, "updated %s package(s)" % upgraded_c, out, err)
return (True, f"updated {upgraded_c} package(s)", out, err)
return (False, "no packages need upgrades", out, err)
@@ -216,12 +216,12 @@ def remove_packages(module, run_pkgng, packages):
stderr += err
if not module.check_mode and query_package(module, run_pkgng, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out), stdout=stdout, stderr=stderr)
module.fail_json(msg=f"failed to remove {package}: {out}", stdout=stdout, stderr=stderr)
remove_c += 1
if remove_c > 0:
return (True, "removed %s package(s)" % remove_c, stdout, stderr)
return (True, f"removed {remove_c} package(s)", stdout, stderr)
return (False, "package(s) already absent", stdout, stderr)
@@ -237,7 +237,7 @@ def install_packages(module, run_pkgng, packages, cached, state):
stdout += out
stderr += err
if rc != 0:
module.fail_json(msg="Could not update catalogue [%d]: %s %s" % (rc, out, err), stdout=stdout, stderr=stderr)
module.fail_json(msg=f"Could not update catalogue [{rc}]: {out} {err}", stdout=stdout, stderr=stderr)
for package in packages:
already_installed = query_package(module, run_pkgng, package)
@@ -280,22 +280,22 @@ def install_packages(module, run_pkgng, packages, cached, state):
if verified:
action_count[action] += 1
else:
module.fail_json(msg="failed to %s %s" % (action, package), stdout=stdout, stderr=stderr)
module.fail_json(msg=f"failed to {action} {package}", stdout=stdout, stderr=stderr)
if sum(action_count.values()) > 0:
past_tense = {'install': 'installed', 'upgrade': 'upgraded'}
messages = []
for (action, count) in action_count.items():
messages.append("%s %s package%s" % (past_tense.get(action, action), count, "s" if count != 1 else ""))
messages.append(f"{past_tense.get(action, action)} {count} package{'s' if count != 1 else ''}")
return (True, '; '.join(messages), stdout, stderr)
return (False, "package(s) already %s" % (state), stdout, stderr)
return (False, f"package(s) already {state}", stdout, stderr)
def annotation_query(module, run_pkgng, package, tag):
rc, out, err = run_pkgng('info', '-A', package)
match = re.search(r'^\s*(?P<tag>%s)\s*:\s*(?P<value>\w+)' % tag, out, flags=re.MULTILINE)
match = re.search(rf'^\s*(?P<tag>{tag})\s*:\s*(?P<value>\w+)', out, flags=re.MULTILINE)
if match:
return match.group('value')
return False
@@ -308,14 +308,12 @@ def annotation_add(module, run_pkgng, package, tag, value):
if not module.check_mode:
rc, out, err = run_pkgng('annotate', '-y', '-A', package, tag, data=value, binary_data=True)
if rc != 0:
module.fail_json(msg="could not annotate %s: %s"
% (package, out), stderr=err)
module.fail_json(msg=f"could not annotate {package}: {out}", stderr=err)
return True
elif _value != value:
# Annotation exists, but value differs
module.fail_json(
msg="failed to annotate %s, because %s is already set to %s, but should be set to %s"
% (package, tag, _value, value))
msg=f"failed to annotate {package}, because {tag} is already set to {_value}, but should be set to {value}")
return False
else:
# Annotation exists, nothing to do
@@ -328,8 +326,7 @@ def annotation_delete(module, run_pkgng, package, tag, value):
if not module.check_mode:
rc, out, err = run_pkgng('annotate', '-y', '-D', package, tag)
if rc != 0:
module.fail_json(msg="could not delete annotation to %s: %s"
% (package, out), stderr=err)
module.fail_json(msg=f"could not delete annotation to {package}: {out}", stderr=err)
return True
return False
@@ -338,8 +335,7 @@ def annotation_modify(module, run_pkgng, package, tag, value):
_value = annotation_query(module, run_pkgng, package, tag)
if not _value:
# No such tag
module.fail_json(msg="could not change annotation to %s: tag %s does not exist"
% (package, tag))
module.fail_json(msg=f"could not change annotation to {package}: tag {tag} does not exist")
elif _value == value:
# No change in value
return False
@@ -351,10 +347,9 @@ def annotation_modify(module, run_pkgng, package, tag, value):
# Check the output for a success message
if (
rc != 0
and re.search(r'^%s-[^:]+: Modified annotation tagged: %s' % (package, tag), out, flags=re.MULTILINE) is None
and re.search(rf'^{package}-[^:]+: Modified annotation tagged: {tag}', out, flags=re.MULTILINE) is None
):
module.fail_json(msg="failed to annotate %s, could not change annotation %s to %s: %s"
% (package, tag, value, out), stderr=err)
module.fail_json(msg=f"failed to annotate {package}, could not change annotation {tag} to {value}: {out}", stderr=err)
return True
@@ -382,8 +377,7 @@ def annotate_packages(module, run_pkgng, packages, annotations):
if annotation is None:
module.fail_json(
msg="failed to annotate %s, invalid annotate string: %s"
% (package, annotation_string)
msg=f"failed to annotate {package}, invalid annotate string: {annotation_string}"
)
annotation = annotation.groupdict()
@@ -391,7 +385,7 @@ def annotate_packages(module, run_pkgng, packages, annotations):
annotate_c += 1
if annotate_c > 0:
return (True, "added %s annotations." % annotate_c)
return (True, f"added {annotate_c} annotations.")
return (False, "changed no annotations")
@@ -414,7 +408,7 @@ def autoremove_packages(module, run_pkgng):
stdout += out
stderr += err
return (True, "autoremoved %d package(s)" % (autoremove_c), stdout, stderr)
return (True, f"autoremoved {autoremove_c} package(s)", stdout, stderr)
def main():
@@ -452,7 +446,7 @@ def main():
if rootdir_not_supported:
module.fail_json(msg="To use option 'rootdir' pkg version must be 1.5 or greater")
else:
dir_arg = "--rootdir=%s" % (p["rootdir"])
dir_arg = f"--rootdir={p['rootdir']}"
if p["ignore_osver"]:
ignore_osver_not_supported = pkgng_older_than(module, pkgng_path, [1, 11, 0])
@@ -460,10 +454,10 @@ def main():
module.fail_json(msg="To use option 'ignore_osver' pkg version must be 1.11 or greater")
if p["chroot"] is not None:
dir_arg = '--chroot=%s' % (p["chroot"])
dir_arg = f"--chroot={p['chroot']}"
if p["jail"] is not None:
dir_arg = '--jail=%s' % (p["jail"])
dir_arg = f"--jail={p['jail']}"
# as of pkg-1.1.4, PACKAGESITE is deprecated in favor of repository definitions
# in /usr/local/etc/pkg/repos
@@ -484,7 +478,7 @@ def main():
if repo_flag_not_supported:
pkgng_env['PACKAGESITE'] = p['pkgsite']
else:
cmd.append('--repository=%s' % (p['pkgsite'],))
cmd.append(f"--repository={p['pkgsite']}")
# If environ_update is specified to be "passed through"
# to module.run_command, then merge its values into pkgng_env

View File

@@ -289,15 +289,14 @@ class PersistentMemory(object):
# in case command[] has number
cmd = [str(part) for part in command]
self.module.log(msg='pmem_run_command: execute: %s' % cmd)
self.module.log(msg=f'pmem_run_command: execute: {cmd}')
rc, out, err = self.module.run_command(cmd)
self.module.log(msg='pmem_run_command: result: %s' % out)
self.module.log(msg=f'pmem_run_command: result: {out}')
if returnCheck and rc != 0:
self.module.fail_json(msg='Error while running: %s' %
cmd, rc=rc, out=out, err=err)
self.module.fail_json(msg=f'Error while running: {cmd}', rc=rc, out=out, err=err)
return out
@@ -363,7 +362,7 @@ class PersistentMemory(object):
return 'The format of size: NNN TB|GB|MB|KB|T|G|M|K|B'
if size_byte % aligns[0] != 0:
return 'size: %s should be align with %d' % (ns['size'], aligns[0])
return f"size: {ns['size']} should be align with {aligns[0]}"
is_space_enough = False
for i, avail in enumerate(available_size):
@@ -373,7 +372,7 @@ class PersistentMemory(object):
break
if is_space_enough is False:
return 'There is not available region for size: %s' % ns['size']
return f"There is not available region for size: {ns['size']}"
ns['size_byte'] = size_byte
@@ -381,7 +380,7 @@ class PersistentMemory(object):
return 'size option is required to configure multiple namespaces'
if ns['type'] not in types:
return 'type %s is not supported in this system. Supported type: %s' % (ns['type'], types)
return f"type {ns['type']} is not supported in this system. Supported type: {types}"
return None
@@ -410,7 +409,7 @@ class PersistentMemory(object):
for skt in self.socket:
if skt['id'] not in socket_ids:
return 'Invalid socket number: %d' % skt['id']
return f"Invalid socket number: {skt['id']}"
return None
@@ -494,9 +493,9 @@ class PersistentMemory(object):
if reserved is None:
res = 100 - memmode - appdirect
ipmctl_opts += ['memorymode=%d' % memmode, 'reserved=%d' % res]
ipmctl_opts += [f'memorymode={memmode}', f'reserved={res}']
else:
ipmctl_opts += ['memorymode=%d' % memmode, 'reserved=%d' % reserved]
ipmctl_opts += [f'memorymode={memmode}', f'reserved={reserved}']
if self.interleaved:
ipmctl_opts += ['PersistentMemoryType=AppDirect']
@@ -515,12 +514,12 @@ class PersistentMemory(object):
rc = True
for line in ipmctl_out.splitlines():
if warning.match(line):
errmsg = '%s (command: %s)' % (line, command)
errmsg = f'{line} (command: {command})'
rc = False
break
elif error.match(line):
if not ignore_error:
errmsg = '%s (command: %s)' % (line, command)
errmsg = f'{line} (command: {command})'
rc = False
break

View File

@@ -157,7 +157,6 @@ import json
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_native
class Pnpm(object):
@@ -178,14 +177,14 @@ class Pnpm(object):
self.alias_name_ver = None
if self.alias is not None:
self.alias_name_ver = self.alias + "@npm:"
self.alias_name_ver = f"{self.alias}@npm:"
if self.name is not None:
self.alias_name_ver = (self.alias_name_ver or "") + self.name
if self.version is not None:
self.alias_name_ver = self.alias_name_ver + "@" + str(self.version)
self.alias_name_ver = f"{self.alias_name_ver}@{self.version!s}"
else:
self.alias_name_ver = self.alias_name_ver + "@latest"
self.alias_name_ver = f"{self.alias_name_ver}@latest"
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
@@ -216,7 +215,7 @@ class Pnpm(object):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="Path %s is not a directory" % self.path)
self.module.fail_json(msg=f"Path {self.path} is not a directory")
if not self.alias_name_ver and not os.path.isfile(
os.path.join(self.path, "package.json")
@@ -249,7 +248,7 @@ class Pnpm(object):
data = json.loads(out)
except Exception as e:
self.module.fail_json(
msg="Failed to parse pnpm output with error %s" % to_native(e)
msg=f"Failed to parse pnpm output with error {e}"
)
if "error" in data:
@@ -326,7 +325,7 @@ class Pnpm(object):
data = json.loads(out)
except Exception as e:
self.module.fail_json(
msg="Failed to parse pnpm output with error %s" % to_native(e)
msg=f"Failed to parse pnpm output with error {e}"
)
return data.keys()

View File

@@ -294,7 +294,7 @@ def query_set(module, set_, action):
if set_ in system_sets:
if action == 'unmerge':
module.fail_json(msg='set %s cannot be removed' % set_)
module.fail_json(msg=f'set {set_} cannot be removed')
return False
world_sets_path = '/var/lib/portage/world_sets'
@@ -440,7 +440,7 @@ def unmerge_packages(module, packages):
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
args.append(f'--{flag}')
cmd, (rc, out, err) = run_emerge(module, packages, *args)
@@ -470,7 +470,7 @@ def cleanup_packages(module, packages):
for flag in ['quiet', 'verbose']:
if p[flag]:
args.append('--%s' % flag)
args.append(f'--{flag}')
cmd, (rc, out, err) = run_emerge(module, packages, *args)
if rc != 0:
@@ -572,8 +572,7 @@ def main():
if p['depclean']:
if packages and p['state'] not in portage_absent_states:
module.fail_json(
msg='Depclean can only be used with package when the state is '
'one of: %s' % portage_absent_states,
msg=f'Depclean can only be used with package when the state is one of: {portage_absent_states}',
)
cleanup_packages(module, packages)

View File

@@ -77,7 +77,7 @@ def query_package(module, name):
pkgng = False
pkg_glob_path = module.get_bin_path('pkg_glob', True)
# TODO: convert run_comand() argument to list!
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, shlex_quote(name)), use_unsafe_shell=True)
rc, out, err = module.run_command(f"{pkg_info_path} -e `pkg_glob {shlex_quote(name)}`", use_unsafe_shell=True)
pkg_info_path = [pkg_info_path]
else:
pkgng = True
@@ -122,7 +122,7 @@ def remove_packages(module, packages):
pkg_delete_path = module.get_bin_path('pkg_delete', False)
if not pkg_delete_path:
pkg_delete_path = module.get_bin_path('pkg', True)
pkg_delete_path = pkg_delete_path + " delete -y"
pkg_delete_path = f"{pkg_delete_path} delete -y"
# Using a for loop in case of error, we can report the package that failed
for package in packages:
@@ -131,22 +131,21 @@ def remove_packages(module, packages):
continue
# TODO: convert run_comand() argument to list!
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, shlex_quote(package)), use_unsafe_shell=True)
rc, out, err = module.run_command(f"{pkg_delete_path} `{pkg_glob_path} {shlex_quote(package)}`", use_unsafe_shell=True)
if query_package(module, package):
name_without_digits = re.sub('[0-9]', '', package)
# TODO: convert run_comand() argument to list!
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path,
shlex_quote(name_without_digits)),
rc, out, err = module.run_command(f"{pkg_delete_path} `{pkg_glob_path} {shlex_quote(name_without_digits)}`",
use_unsafe_shell=True)
if query_package(module, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
module.fail_json(msg=f"failed to remove {package}: {out}")
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=True, msg=f"removed {remove_c} package(s)")
module.exit_json(changed=False, msg="package(s) already absent")
@@ -177,16 +176,16 @@ def install_packages(module, packages, use_packages):
if matches == 1:
rc, out, err = module.run_command([portinstall_path, "--batch"] + portinstall_params + [package])
if not query_package(module, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
module.fail_json(msg=f"failed to install {package}: {out}")
elif matches == 0:
module.fail_json(msg="no matches for package %s" % (package))
module.fail_json(msg=f"no matches for package {package}")
else:
module.fail_json(msg="%s matches found for package name %s" % (matches, package))
module.fail_json(msg=f"{matches} matches found for package name {package}")
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)" % (install_c))
module.exit_json(changed=True, msg=f"present {install_c} package(s)")
module.exit_json(changed=False, msg="package(s) already present")

View File

@@ -158,11 +158,10 @@ def remove_pritunl_organization(module):
else:
module.fail_json(
msg=(
"Can not remove organization '%s' with %d attached users. "
f"Can not remove organization '{org_name}' with {org['user_count']} attached users. "
"Either set 'force' option to true or remove active users "
"from the organization"
)
% (org_name, org["user_count"])
)
module.exit_json(**result)

View File

@@ -97,7 +97,7 @@ def get_pritunl_organizations(module):
if org_name and len(organizations) == 0:
# When an org_name is provided but no organization match return an error
module.fail_json(msg="Organization '%s' does not exist" % org_name)
module.fail_json(msg=f"Organization '{org_name}' does not exist")
result = {}
result["changed"] = False

View File

@@ -185,7 +185,7 @@ def add_or_update_pritunl_user(module):
if len(org_obj_list) == 0:
module.fail_json(
msg="Can not add user to organization '%s' which does not exist" % org_name
msg=f"Can not add user to organization '{org_name}' which does not exist"
)
org_id = org_obj_list[0]["id"]
@@ -273,8 +273,7 @@ def remove_pritunl_user(module):
if len(org_obj_list) == 0:
module.fail_json(
msg="Can not remove user '%s' from a non existing organization '%s'"
% (user_name, org_name)
msg=f"Can not remove user '{user_name}' from a non existing organization '{org_name}'"
)
org_id = org_obj_list[0]["id"]

View File

@@ -116,8 +116,7 @@ def get_pritunl_user(module):
if len(org_obj_list) == 0:
module.fail_json(
msg="Can not list users from the organization '%s' which does not exist"
% org_name
msg=f"Can not list users from the organization '{org_name}' which does not exist"
)
org_id = org_obj_list[0]["id"]

View File

@@ -396,8 +396,7 @@ def pubnub_block(module, account, keyset):
# requested to start/stop.
if block is None and params['state'] in ['started', 'stopped']:
block_name = params.get('name')
module.fail_json(msg="'{0}' block doesn't exists.".format(block_name),
changed=account.changed, module_cache=dict(account))
module.fail_json(msg=f"'{block_name}' block doesn't exists.", changed=account.changed, module_cache=dict(account))
if block is None and params['state'] == 'present':
block = Block(name=params.get('name'),

View File

@@ -268,7 +268,7 @@ class pulp_server(object):
ssl_client_key=None,
add_export_distributor=False
):
url = "%s/pulp/api/v2/repositories/" % self.host
url = f"{self.host}/pulp/api/v2/repositories/"
data = dict()
data['id'] = repo_id
data['distributors'] = []
@@ -346,7 +346,7 @@ class pulp_server(object):
return True
def delete_repo(self, repo_id):
url = "%s/pulp/api/v2/repositories/%s/" % (self.host, repo_id)
url = f"{self.host}/pulp/api/v2/repositories/{repo_id}/"
response, info = fetch_url(self.module, url, data='', method='DELETE')
if info['status'] != 202:
@@ -369,7 +369,7 @@ class pulp_server(object):
return self.repo_cache[repo_id]
def publish_repo(self, repo_id, publish_distributor):
url = "%s/pulp/api/v2/repositories/%s/actions/publish/" % (self.host, repo_id)
url = f"{self.host}/pulp/api/v2/repositories/{repo_id}/actions/publish/"
# If there's no distributor specified, we will publish them all
if publish_distributor is None:
@@ -414,7 +414,7 @@ class pulp_server(object):
return True
def sync_repo(self, repo_id):
url = "%s/pulp/api/v2/repositories/%s/actions/sync/" % (self.host, repo_id)
url = f"{self.host}/pulp/api/v2/repositories/{repo_id}/actions/sync/"
response, info = fetch_url(self.module, url, data='', method='POST')
if info['status'] != 202:
@@ -430,11 +430,11 @@ class pulp_server(object):
return True
def update_repo_distributor_config(self, repo_id, **kwargs):
url = "%s/pulp/api/v2/repositories/%s/distributors/" % (self.host, repo_id)
url = f"{self.host}/pulp/api/v2/repositories/{repo_id}/distributors/"
repo_config = self.get_repo_config_by_id(repo_id)
for distributor in repo_config['distributors']:
distributor_url = "%s%s/" % (url, distributor['id'])
distributor_url = f"{url}{distributor['id']}/"
data = dict()
data['distributor_config'] = dict()
@@ -455,7 +455,7 @@ class pulp_server(object):
url=url)
def update_repo_importer_config(self, repo_id, **kwargs):
url = "%s/pulp/api/v2/repositories/%s/importers/" % (self.host, repo_id)
url = f"{self.host}/pulp/api/v2/repositories/{repo_id}/importers/"
data = dict()
importer_config = dict()
@@ -483,7 +483,7 @@ class pulp_server(object):
url=url)
def set_repo_list(self):
url = "%s/pulp/api/v2/repositories/?details=true" % self.host
url = f"{self.host}/pulp/api/v2/repositories/?details=true"
response, info = fetch_url(self.module, url, method='GET')
if info['status'] != 200:
@@ -497,7 +497,7 @@ class pulp_server(object):
def verify_tasks_completed(self, response_dict):
for task in response_dict['spawned_tasks']:
task_url = "%s%s" % (self.host, task['_href'])
task_url = f"{self.host}{task['_href']}"
while True:
response, info = fetch_url(

View File

@@ -196,7 +196,7 @@ from ansible.module_utils.basic import AnsibleModule
def _write_structured_data(basedir, basename, data):
if not os.path.exists(basedir):
os.makedirs(basedir)
file_path = os.path.join(basedir, "{0}.json".format(basename))
file_path = os.path.join(basedir, f"{basename}.json")
# This is more complex than you might normally expect because we want to
# open the file with only u+rw set. Also, we use the stat constants
# because ansible still supports python 2.4 and the octal syntax changed
@@ -246,8 +246,7 @@ def main():
if p['manifest']:
if not os.path.exists(p['manifest']):
module.fail_json(
msg="Manifest file %(manifest)s not found." % dict(
manifest=p['manifest']))
msg=f"Manifest file {dict(manifest=p['manifest'])['manifest']} not found.")
# Check if puppet is disabled here
if not p['manifest']:
@@ -290,11 +289,11 @@ def main():
elif rc == 124:
# timeout
module.exit_json(
rc=rc, msg="%s timed out" % ctx.cmd, stdout=stdout, stderr=stderr)
rc=rc, msg=f"{ctx.cmd} timed out", stdout=stdout, stderr=stderr)
else:
# failure
module.fail_json(
rc=rc, msg="%s failed with return code: %d" % (ctx.cmd, rc),
rc=rc, msg=f"{ctx.cmd} failed with return code: {rc}",
stdout=stdout, stderr=stderr)

View File

@@ -181,7 +181,7 @@ def main():
target.push_note(title, body)
module.exit_json(changed=False, msg="OK")
except PushError as e:
module.fail_json(msg="An error occurred, Pushbullet's response: %s" % str(e))
module.fail_json(msg=f"An error occurred, Pushbullet's response: {e}")
module.fail_json(msg="An unknown error has occurred")

View File

@@ -104,7 +104,7 @@ class Pushover(object):
def run(self, priority, msg, title, device):
''' Do, whatever it is, we do. '''
url = '%s/1/messages.json' % (self.base_uri)
url = f'{self.base_uri}/1/messages.json'
# parse config
options = dict(user=self.user,
@@ -149,7 +149,7 @@ def main():
except Exception:
module.fail_json(msg='Unable to send msg via pushover')
module.exit_json(msg='message sent successfully: %s' % response, changed=False)
module.exit_json(msg=f'message sent successfully: {response}', changed=False)
if __name__ == '__main__':

View File

@@ -174,10 +174,10 @@ def main():
for dep in module.params['dependencies']:
match = pkg_dep_re.match(dep)
if not match:
module.fail_json(msg="Failed to parse version requirement '{0}'. Must be formatted like 'ansible>2.6'".format(dep))
module.fail_json(msg=f"Failed to parse version requirement '{dep}'. Must be formatted like 'ansible>2.6'")
pkg, op, version = match.groups()
if op is not None and op not in operations:
module.fail_json(msg="Failed to parse version requirement '{0}'. Operator must be one of >, <, <=, >=, or ==".format(dep))
module.fail_json(msg=f"Failed to parse version requirement '{dep}'. Operator must be one of >, <, <=, >=, or ==")
try:
existing = pkg_resources.get_distribution(pkg).version
except pkg_resources.DistributionNotFound: