batch 2 - update Python idiom to 3.7 using pyupgrade (#11342)

* batch 2 - update Python idiom to 3.7 using pyupgrade

* Apply suggestions from code review
This commit is contained in:
Alexei Znamensky
2025-12-30 22:50:16 +13:00
committed by GitHub
parent 9e363c9f94
commit 266d9d3fb0
19 changed files with 64 additions and 70 deletions

View File

@@ -31,7 +31,7 @@ class ActionModule(ActionBase):
if task_vars is None: if task_vars is None:
task_vars = dict() task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars) result = super().run(tmp, task_vars)
del tmp # tmp no longer has any effect del tmp # tmp no longer has any effect
if "that" not in self._task.args: if "that" not in self._task.args:

View File

@@ -128,4 +128,4 @@ STATIC_URL = "/static/"
STATIC_ROOT = "/tmp/django-static" STATIC_ROOT = "/tmp/django-static"
if "DJANGO_ANSIBLE_RAISE" in os.environ: if "DJANGO_ANSIBLE_RAISE" in os.environ:
raise ValueError("DJANGO_ANSIBLE_RAISE={0}".format(os.environ["DJANGO_ANSIBLE_RAISE"])) raise ValueError("DJANGO_ANSIBLE_RAISE={}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))

View File

@@ -128,4 +128,4 @@ STATIC_URL = "/static/"
STATIC_ROOT = "/tmp/django-static" STATIC_ROOT = "/tmp/django-static"
if "DJANGO_ANSIBLE_RAISE" in os.environ: if "DJANGO_ANSIBLE_RAISE" in os.environ:
raise ValueError("DJANGO_ANSIBLE_RAISE={0}".format(os.environ["DJANGO_ANSIBLE_RAISE"])) raise ValueError("DJANGO_ANSIBLE_RAISE={}".format(os.environ["DJANGO_ANSIBLE_RAISE"]))

View File

@@ -20,7 +20,7 @@ except ImportError:
# Argument parsing # Argument parsing
if len(sys.argv) != 4: if len(sys.argv) != 4:
print("Syntax: {0} <bind> <port> <path>".format(sys.argv[0])) print(f"Syntax: {sys.argv[0]} <bind> <port> <path>")
sys.exit(-1) sys.exit(-1)
HOST, PORT, PATH = sys.argv[1:4] HOST, PORT, PATH = sys.argv[1:4]

View File

@@ -8,8 +8,8 @@ import lmdb
map_size = 1024 * 100 map_size = 1024 * 100
env = lmdb.open("./jp.mdb", map_size=map_size) env = lmdb.open("./jp.mdb", map_size=map_size)
with env.begin(write=True) as txn: with env.begin(write=True) as txn:
txn.put("fr".encode(), "France".encode()) txn.put(b"fr", b"France")
txn.put("nl".encode(), "Netherlands".encode()) txn.put(b"nl", b"Netherlands")
txn.put("es".encode(), "Spain".encode()) txn.put(b"es", b"Spain")
txn.put("be".encode(), "Belgium".encode()) txn.put(b"be", b"Belgium")
txn.put("lu".encode(), "Luxembourg".encode()) txn.put(b"lu", b"Luxembourg")

View File

@@ -46,7 +46,7 @@ if len(sys.argv) > 3:
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if HAS_TLS and ssl_ctx is not None: if HAS_TLS and ssl_ctx is not None:
print("Using %s and %s" % (certfile, keyfile)) print(f"Using {certfile} and {keyfile}")
ssl_ctx.load_cert_chain(certfile=certfile, keyfile=keyfile) ssl_ctx.load_cert_chain(certfile=certfile, keyfile=keyfile)
print("Start SMTP server on port", port1) print("Start SMTP server on port", port1)

View File

@@ -32,7 +32,7 @@ class EchoServer(BaseHTTPRequestHandler):
def run_webserver(): def run_webserver():
webServer = HTTPServer((hostname, server_port), EchoServer) webServer = HTTPServer((hostname, server_port), EchoServer)
print("Server started http://%s:%s" % (hostname, server_port)) print(f"Server started http://{hostname}:{server_port}")
try: try:
webServer.serve_forever() webServer.serve_forever()

View File

@@ -17,7 +17,7 @@ username = sys.argv[3]
password = sys.argv[4] password = sys.argv[4]
if username: if username:
url = "http://%s:%s@127.0.0.1:9001/RPC2" % (quote(username, safe=""), quote(password, safe="")) url = "http://{}:{}@127.0.0.1:9001/RPC2".format(quote(username, safe=""), quote(password, safe=""))
else: else:
url = "http://127.0.0.1:9001/RPC2" url = "http://127.0.0.1:9001/RPC2"

View File

@@ -17,13 +17,13 @@ def main():
with open(".azure-pipelines/azure-pipelines.yml", "rb") as f: with open(".azure-pipelines/azure-pipelines.yml", "rb") as f:
azp = yaml.safe_load(f) azp = yaml.safe_load(f)
allowed_targets = set(["azp/generic/1"]) allowed_targets = {"azp/generic/1"}
for stage in azp["stages"]: for stage in azp["stages"]:
if stage["stage"].startswith(("Sanity", "Unit", "Generic", "Summary")): if stage["stage"].startswith(("Sanity", "Unit", "Generic", "Summary")):
continue continue
for job in stage["jobs"]: for job in stage["jobs"]:
for group in job["parameters"]["groups"]: for group in job["parameters"]["groups"]:
allowed_targets.add("azp/posix/{0}".format(group)) allowed_targets.add(f"azp/posix/{group}")
paths = glob.glob("tests/integration/targets/*/aliases") paths = glob.glob("tests/integration/targets/*/aliases")
@@ -31,7 +31,7 @@ def main():
for path in paths: for path in paths:
targets = [] targets = []
skip = False skip = False
with open(path, "r") as f: with open(path) as f:
for line in f: for line in f:
if "#" in line: if "#" in line:
line = line[: line.find("#")] line = line[: line.find("#")]
@@ -56,11 +56,11 @@ def main():
if not targets: if not targets:
if "targets/setup_" in path: if "targets/setup_" in path:
continue continue
print("%s: %s" % (path, "found no targets")) print(f"{path}: found no targets")
has_errors = True has_errors = True
for target in targets: for target in targets:
if target not in allowed_targets: if target not in allowed_targets:
print("%s: %s" % (path, 'found invalid target "{0}"'.format(target))) print(f'{path}: found invalid target "{target}"')
has_errors = True has_errors = True
return 1 if has_errors else 0 return 1 if has_errors else 0

View File

@@ -70,7 +70,7 @@ class BotmetaCheck:
try: try:
documentation = [] documentation = []
in_docs = False in_docs = False
with open(filename, "r", encoding="utf-8") as f: with open(filename, encoding="utf-8") as f:
for line in f: for line in f:
if line.startswith("DOCUMENTATION ="): if line.startswith("DOCUMENTATION ="):
in_docs = True in_docs = True

View File

@@ -34,13 +34,13 @@ def inventory():
def load_txt_data(path): def load_txt_data(path):
with open(path, "r") as f: with open(path) as f:
s = f.read() s = f.read()
return s return s
def load_yml_data(path): def load_yml_data(path):
with open(path, "r") as f: with open(path) as f:
d = yaml.safe_load(f) d = yaml.safe_load(f)
return d return d

View File

@@ -80,7 +80,7 @@ def test_verify_file_bad_config(inventory):
def get_vm_pool_json(): def get_vm_pool_json():
with open("tests/unit/plugins/inventory/fixtures/opennebula_inventory.json", "r") as json_file: with open("tests/unit/plugins/inventory/fixtures/opennebula_inventory.json") as json_file:
jsondata = json.load(json_file) jsondata = json.load(json_file)
data = type("pyone.bindings.VM_POOLSub", (object,), {"VM": []})() data = type("pyone.bindings.VM_POOLSub", (object,), {"VM": []})()
@@ -349,15 +349,13 @@ keyed_groups:
# note the vm_pool (and json data file) has four hosts, # note the vm_pool (and json data file) has four hosts,
# but the options above asks ansible to filter one out # but the options above asks ansible to filter one out
assert len(get_vm_pool_json().VM) == 4 assert len(get_vm_pool_json().VM) == 4
assert set(vm.NAME for vm in get_vm_pool_json().VM) == set( assert {vm.NAME for vm in get_vm_pool_json().VM} == {
[ "terraform_demo_00",
"terraform_demo_00", "terraform_demo_01",
"terraform_demo_01", "terraform_demo_srv_00",
"terraform_demo_srv_00", "bs-windows",
"bs-windows", }
] assert set(im._inventory.hosts) == {"terraform_demo_00", "terraform_demo_01", "terraform_demo_srv_00"}
)
assert set(im._inventory.hosts) == set(["terraform_demo_00", "terraform_demo_01", "terraform_demo_srv_00"])
host_demo00 = im._inventory.get_host("terraform_demo_00") host_demo00 = im._inventory.get_host("terraform_demo_00")
host_demo01 = im._inventory.get_host("terraform_demo_01") host_demo01 = im._inventory.get_host("terraform_demo_01")

View File

@@ -14,7 +14,7 @@ from ansible_collections.community.general.plugins.lookup.onepassword import (
def load_file(file): def load_file(file):
with open((os.path.join(os.path.dirname(__file__), "onepassword_fixtures", file)), "r") as f: with open(os.path.join(os.path.dirname(__file__), "onepassword_fixtures", file)) as f:
return json.loads(f.read()) return json.loads(f.read())

View File

@@ -205,7 +205,7 @@ class TestLookupModule(unittest.TestCase):
def test_bitwarden_plugin_duplicates(self): def test_bitwarden_plugin_duplicates(self):
# There are two records with name dupe_name; we need to be order-insensitive with # There are two records with name dupe_name; we need to be order-insensitive with
# checking what was retrieved. # checking what was retrieved.
self.assertEqual(set(["b", "d"]), set(self.lookup.run(["dupe_name"], field="password")[0])) self.assertEqual({"b", "d"}, set(self.lookup.run(["dupe_name"], field="password")[0]))
@patch("ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden", new=MockBitwarden()) @patch("ansible_collections.community.general.plugins.lookup.bitwarden._bitwarden", new=MockBitwarden())
def test_bitwarden_plugin_full_item(self): def test_bitwarden_plugin_full_item(self):

View File

@@ -82,7 +82,7 @@ class TestInterfacesFileModule(unittest.TestCase):
with open(testfilepath, "wb") as f: with open(testfilepath, "wb") as f:
f.write(string.encode()) f.write(string.encode())
else: else:
with open(testfilepath, "r") as goldenfile: with open(testfilepath) as goldenfile:
goldenData = json.load(goldenfile) goldenData = json.load(goldenfile)
self.assertEqual(goldenData, ifaces) self.assertEqual(goldenData, ifaces)
@@ -96,7 +96,7 @@ class TestInterfacesFileModule(unittest.TestCase):
f.write(string.encode()) f.write(string.encode())
f.close() f.close()
else: else:
with open(testfilepath, "r") as goldenfile: with open(testfilepath) as goldenfile:
goldenstring = goldenfile.read() goldenstring = goldenfile.read()
goldenfile.close() goldenfile.close()
self.assertEqual(goldenstring, string) self.assertEqual(goldenstring, string)

View File

@@ -245,9 +245,7 @@ class TestPermanentParams(ModuleTestCase):
with patch("ansible_collections.community.general.plugins.modules.modprobe.Modprobe.modprobe_files"): with patch("ansible_collections.community.general.plugins.modules.modprobe.Modprobe.modprobe_files"):
modprobe.modprobe_files = ["/etc/modprobe.d/dummy1.conf", "/etc/modprobe.d/dummy2.conf"] modprobe.modprobe_files = ["/etc/modprobe.d/dummy1.conf", "/etc/modprobe.d/dummy2.conf"]
assert modprobe.permanent_params == set( assert modprobe.permanent_params == {"numdummies=4", "dummy_parameter1=6", "dummy_parameter2=5"}
["numdummies=4", "dummy_parameter1=6", "dummy_parameter2=5"]
)
def test_module_permanent_params_empty(self): def test_module_permanent_params_empty(self):
files_content = ["", ""] files_content = ["", ""]

View File

@@ -44,7 +44,7 @@ valid_inventory = {
"sed": "4.8-1", "sed": "4.8-1",
"sqlite": "3.36.0-1", "sqlite": "3.36.0-1",
}, },
"installed_groups": {"base-devel": set(["gawk", "grep", "file", "findutils", "pacman", "sed", "gzip", "gettext"])}, "installed_groups": {"base-devel": {"gawk", "grep", "file", "findutils", "pacman", "sed", "gzip", "gettext"}},
"available_pkgs": { "available_pkgs": {
"acl": "2.3.1-1", "acl": "2.3.1-1",
"amd-ucode": "20211027.1d00989-1", "amd-ucode": "20211027.1d00989-1",
@@ -61,35 +61,33 @@ valid_inventory = {
"sudo": "1.9.8.p2-3", "sudo": "1.9.8.p2-3",
}, },
"available_groups": { "available_groups": {
"base-devel": set( "base-devel": {
[ "libtool",
"libtool", "gawk",
"gawk", "which",
"which", "texinfo",
"texinfo", "fakeroot",
"fakeroot", "grep",
"grep", "findutils",
"findutils", "autoconf",
"autoconf", "gzip",
"gzip", "pkgconf",
"pkgconf", "flex",
"flex", "patch",
"patch", "groff",
"groff", "m4",
"m4", "bison",
"bison", "gcc",
"gcc", "gettext",
"gettext", "make",
"make", "file",
"file", "pacman",
"pacman", "sed",
"sed", "automake",
"automake", "sudo",
"sudo", "binutils",
"binutils", },
] "some-group": {"libtool", "sudo", "binutils"},
),
"some-group": set(["libtool", "sudo", "binutils"]),
}, },
"upgradable_pkgs": { "upgradable_pkgs": {
"sqlite": VersionTuple(current="3.36.0-1", latest="3.37.0-1"), "sqlite": VersionTuple(current="3.36.0-1", latest="3.37.0-1"),

View File

@@ -34,7 +34,7 @@ class UTHelper:
for ext in extensions: for ext in extensions:
test_spec_filename = test_module.__file__.replace(".py", ext) test_spec_filename = test_module.__file__.replace(".py", ext)
if os.path.exists(test_spec_filename): if os.path.exists(test_spec_filename):
with open(test_spec_filename, "r") as test_spec_filehandle: with open(test_spec_filename) as test_spec_filehandle:
return UTHelper.from_file(ansible_module, test_module, test_spec_filehandle, mocks=mocks) return UTHelper.from_file(ansible_module, test_module, test_spec_filehandle, mocks=mocks)
raise Exception( raise Exception(

View File

@@ -139,7 +139,7 @@ def test_make_unsafe_dict_key():
def test_make_unsafe_set(): def test_make_unsafe_set():
value = set([_make_trusted("test")]) value = {_make_trusted("test")}
if not SUPPORTS_DATA_TAGGING: if not SUPPORTS_DATA_TAGGING:
value.add(_make_trusted(b"test")) value.add(_make_trusted(b"test"))
unsafe_value = make_unsafe(value) unsafe_value = make_unsafe(value)
@@ -147,7 +147,7 @@ def test_make_unsafe_set():
for obj in unsafe_value: for obj in unsafe_value:
assert _is_trusted(obj) assert _is_trusted(obj)
value = set([_make_trusted("{{test}}")]) value = {_make_trusted("{{test}}")}
if not SUPPORTS_DATA_TAGGING: if not SUPPORTS_DATA_TAGGING:
value.add(_make_trusted(b"{{test}}")) value.add(_make_trusted(b"{{test}}"))
unsafe_value = make_unsafe(value) unsafe_value = make_unsafe(value)