Run pyupgrade against the files modified in the merge-forward

This commit is contained in:
Pedro Algarvio 2023-11-26 16:24:07 +00:00
parent a420d94431
commit 250704b18c
No known key found for this signature in database
GPG key ID: BB36BF6584A298FF
26 changed files with 218 additions and 264 deletions

View file

@ -44,7 +44,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
auto_reconnect=True,
)
except SaltClientError as exc:
self.exit(2, "{}\n".format(exc))
self.exit(2, f"{exc}\n")
return
if self.options.batch or self.options.static:
@ -146,9 +146,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
if self.config["async"]:
jid = self.local_client.cmd_async(**kwargs)
salt.utils.stringutils.print_cli(
"Executed command with job ID: {}".format(jid)
)
salt.utils.stringutils.print_cli(f"Executed command with job ID: {jid}")
return
# local will be None when there was an error
@ -224,8 +222,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
EauthAuthenticationError,
SaltClientError,
) as exc:
ret = str(exc)
self._output_ret(ret, "", retcode=1)
self._output_ret(str(exc), "", retcode=1)
finally:
self.local_client.destroy()
@ -337,16 +334,14 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
salt.utils.stringutils.print_cli("Summary")
salt.utils.stringutils.print_cli("-------------------------------------------")
salt.utils.stringutils.print_cli(
"# of minions targeted: {}".format(return_counter + not_return_counter)
f"# of minions targeted: {return_counter + not_return_counter}"
)
salt.utils.stringutils.print_cli(f"# of minions returned: {return_counter}")
salt.utils.stringutils.print_cli(
f"# of minions that did not return: {not_return_counter}"
)
salt.utils.stringutils.print_cli(
"# of minions returned: {}".format(return_counter)
)
salt.utils.stringutils.print_cli(
"# of minions that did not return: {}".format(not_return_counter)
)
salt.utils.stringutils.print_cli(
"# of minions with errors: {}".format(len(failed_minions))
f"# of minions with errors: {len(failed_minions)}"
)
if self.options.verbose:
if not_connected_minions:
@ -449,7 +444,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
if not ret:
self.exit(2, "No minions found to gather docs from\n")
if isinstance(ret, str):
self.exit(2, "{}\n".format(ret))
self.exit(2, f"{ret}\n")
for host in ret:
if isinstance(ret[host], str) and (
ret[host].startswith("Minion did not return")
@ -464,6 +459,6 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
salt.output.display_output({fun: docs[fun]}, "nested", self.config)
else:
for fun in sorted(docs):
salt.utils.stringutils.print_cli("{}:".format(fun))
salt.utils.stringutils.print_cli(f"{fun}:")
salt.utils.stringutils.print_cli(docs[fun])
salt.utils.stringutils.print_cli("")

View file

@ -239,7 +239,7 @@ def init():
per_remote_defaults = {}
for param in PER_REMOTE_OVERRIDES:
per_remote_defaults[param] = str(__opts__["hgfs_{}".format(param)])
per_remote_defaults[param] = str(__opts__[f"hgfs_{param}"])
for remote in __opts__["hgfs_remotes"]:
repo_conf = copy.deepcopy(per_remote_defaults)
@ -355,7 +355,7 @@ def init():
with salt.utils.files.fopen(hgconfpath, "w+") as hgconfig:
hgconfig.write("[paths]\n")
hgconfig.write(
salt.utils.stringutils.to_str("default = {}\n".format(repo_url))
salt.utils.stringutils.to_str(f"default = {repo_url}\n")
)
repo_conf.update(
@ -365,7 +365,7 @@ def init():
"hash": repo_hash,
"cachedir": rp_,
"lockfile": os.path.join(
__opts__["cachedir"], "hgfs", "{}.update.lk".format(repo_hash)
__opts__["cachedir"], "hgfs", f"{repo_hash}.update.lk"
),
}
)
@ -379,7 +379,7 @@ def init():
try:
with salt.utils.files.fopen(remote_map, "w+") as fp_:
timestamp = datetime.now().strftime("%d %b %Y %H:%M:%S.%f")
fp_.write("# hgfs_remote map as of {}\n".format(timestamp))
fp_.write(f"# hgfs_remote map as of {timestamp}\n")
for repo in repos:
fp_.write(
salt.utils.stringutils.to_str(
@ -444,7 +444,7 @@ def clear_cache():
try:
shutil.rmtree(rdir)
except OSError as exc:
errors.append("Unable to delete {}: {}".format(rdir, exc))
errors.append(f"Unable to delete {rdir}: {exc}")
return errors
@ -694,14 +694,12 @@ def find_file(path, tgt_env="base", **kwargs): # pylint: disable=W0613
dest = os.path.join(__opts__["cachedir"], "hgfs/refs", tgt_env, path)
hashes_glob = os.path.join(
__opts__["cachedir"], "hgfs/hash", tgt_env, "{}.hash.*".format(path)
__opts__["cachedir"], "hgfs/hash", tgt_env, f"{path}.hash.*"
)
blobshadest = os.path.join(
__opts__["cachedir"], "hgfs/hash", tgt_env, "{}.hash.blob_sha1".format(path)
)
lk_fn = os.path.join(
__opts__["cachedir"], "hgfs/hash", tgt_env, "{}.lk".format(path)
__opts__["cachedir"], "hgfs/hash", tgt_env, f"{path}.hash.blob_sha1"
)
lk_fn = os.path.join(__opts__["cachedir"], "hgfs/hash", tgt_env, f"{path}.lk")
destdir = os.path.dirname(dest)
hashdir = os.path.dirname(blobshadest)
if not os.path.isdir(destdir):
@ -746,7 +744,7 @@ def find_file(path, tgt_env="base", **kwargs): # pylint: disable=W0613
return fnd
try:
repo["repo"].cat(
[salt.utils.stringutils.to_bytes("path:{}".format(repo_path))],
[salt.utils.stringutils.to_bytes(f"path:{repo_path}")],
rev=ref[2],
output=dest,
)

View file

@ -137,7 +137,7 @@ def init():
per_remote_defaults = {}
for param in PER_REMOTE_OVERRIDES:
per_remote_defaults[param] = str(__opts__["svnfs_{}".format(param)])
per_remote_defaults[param] = str(__opts__[f"svnfs_{param}"])
for remote in __opts__["svnfs_remotes"]:
repo_conf = copy.deepcopy(per_remote_defaults)
@ -240,7 +240,7 @@ def init():
try:
with salt.utils.files.fopen(remote_map, "w+") as fp_:
timestamp = datetime.now().strftime("%d %b %Y %H:%M:%S.%f")
fp_.write("# svnfs_remote map as of {}\n".format(timestamp))
fp_.write(f"# svnfs_remote map as of {timestamp}\n")
for repo_conf in repos:
fp_.write(
salt.utils.stringutils.to_str(
@ -307,7 +307,7 @@ def clear_cache():
try:
shutil.rmtree(rdir)
except OSError as exc:
errors.append("Unable to delete {}: {}".format(rdir, exc))
errors.append(f"Unable to delete {rdir}: {exc}")
return errors

View file

@ -67,7 +67,7 @@ def mount(location, access="rw", root=None):
log.info("Path already existing: %s", root)
else:
break
cmd = "guestmount -i -a {} --{} {}".format(location, access, root)
cmd = f"guestmount -i -a {location} --{access} {root}"
__salt__["cmd.run"](cmd, python_shell=False)
return root
@ -82,7 +82,7 @@ def umount(name, disk=None):
salt '*' guestfs.umount /mountpoint disk=/srv/images/fedora.qcow
"""
cmd = "guestunmount -q {}".format(name)
cmd = f"guestunmount -q {name}"
__salt__["cmd.run"](cmd)
# Wait at most 5s that the disk is no longuer used
@ -90,7 +90,7 @@ def umount(name, disk=None):
while (
disk is not None
and loops < 5
and len(__salt__["cmd.run"]("lsof {}".format(disk)).splitlines()) != 0
and len(__salt__["cmd.run"](f"lsof {disk}").splitlines()) != 0
):
loops = loops + 1
time.sleep(1)

View file

@ -255,7 +255,7 @@ def facts_refresh():
try:
conn.facts_refresh()
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Execution failed due to "{}"'.format(exception)
ret["message"] = f'Execution failed due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -286,7 +286,7 @@ def facts():
ret["facts"] = __proxy__["junos.get_serialized_facts"]()
ret["out"] = True
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not display facts due to "{}"'.format(exception)
ret["message"] = f'Could not display facts due to "{exception}"'
ret["out"] = False
_restart_connection()
@ -362,7 +362,7 @@ def rpc(cmd=None, dest=None, **kwargs):
try:
filter_reply = etree.XML(op["filter"])
except etree.XMLSyntaxError as ex:
ret["message"] = "Invalid filter: {}".format(str(ex))
ret["message"] = f"Invalid filter: {ex}"
ret["out"] = False
return ret
@ -372,7 +372,7 @@ def rpc(cmd=None, dest=None, **kwargs):
try:
reply = getattr(conn.rpc, cmd.replace("-", "_"))(filter_reply, options=op)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'RPC execution failed due to "{}"'.format(exception)
ret["message"] = f'RPC execution failed due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -386,7 +386,7 @@ def rpc(cmd=None, dest=None, **kwargs):
try:
reply = getattr(conn.rpc, cmd.replace("-", "_"))({"format": format_}, **op)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'RPC execution failed due to "{}"'.format(exception)
ret["message"] = f'RPC execution failed due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -453,7 +453,7 @@ def set_hostname(hostname=None, **kwargs):
# Added to recent versions of JunOs
# Use text format instead
set_string = "set system host-name {}".format(hostname)
set_string = f"set system host-name {hostname}"
try:
conn.cu.load(set_string, format="set")
except Exception as exception: # pylint: disable=broad-except
@ -467,7 +467,7 @@ def set_hostname(hostname=None, **kwargs):
try:
commit_ok = conn.cu.commit_check()
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not commit check due to error "{}"'.format(exception)
ret["message"] = f'Could not commit check due to error "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -560,7 +560,7 @@ def commit(**kwargs):
try:
commit_ok = conn.cu.commit_check()
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not perform commit check due to "{}"'.format(exception)
ret["message"] = f'Could not perform commit check due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -672,7 +672,7 @@ def rollback(**kwargs):
try:
ret["out"] = conn.cu.rollback(id_)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Rollback failed due to "{}"'.format(exception)
ret["message"] = f'Rollback failed due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -697,7 +697,7 @@ def rollback(**kwargs):
try:
commit_ok = conn.cu.commit_check()
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not commit check due to "{}"'.format(exception)
ret["message"] = f'Could not commit check due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -770,7 +770,7 @@ def diff(**kwargs):
try:
ret["message"] = conn.cu.diff(rb_id=id_)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not get diff with error "{}"'.format(exception)
ret["message"] = f'Could not get diff with error "{exception}"'
ret["out"] = False
_restart_connection()
@ -835,7 +835,7 @@ def ping(dest_ip=None, **kwargs):
try:
ret["message"] = jxmlease.parse(etree.tostring(conn.rpc.ping(**op)))
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Execution failed due to "{}"'.format(exception)
ret["message"] = f'Execution failed due to "{exception}"'
ret["out"] = False
_restart_connection()
@ -892,7 +892,7 @@ def cli(command=None, **kwargs):
try:
result = conn.cli(command, format_, warning=False)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Execution failed due to "{}"'.format(exception)
ret["message"] = f'Execution failed due to "{exception}"'
ret["out"] = False
_restart_connection()
return ret
@ -985,7 +985,7 @@ def shutdown(**kwargs):
ret["message"] = "Successfully powered off/rebooted."
ret["out"] = True
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not poweroff/reboot because "{}"'.format(exception)
ret["message"] = f'Could not poweroff/reboot because "{exception}"'
ret["out"] = False
_restart_connection()
@ -1155,7 +1155,7 @@ def install_config(path=None, **kwargs):
except Exception as exception: # pylint: disable=broad-except
ret[
"message"
] = 'Could not load configuration due to : "{}"'.format(exception)
] = f'Could not load configuration due to : "{exception}"'
ret["format"] = op["format"]
ret["out"] = False
_restart_connection()
@ -1250,11 +1250,11 @@ def install_config(path=None, **kwargs):
except Exception as exception: # pylint: disable=broad-except
ret[
"message"
] = "Could not write into diffs_file due to: '{}'".format(exception)
] = f"Could not write into diffs_file due to: '{exception}'"
ret["out"] = False
except ValueError as ex:
message = "install_config failed due to: {}".format(str(ex))
message = f"install_config failed due to: {ex}"
log.error(message)
ret["message"] = message
ret["out"] = False
@ -1263,12 +1263,12 @@ def install_config(path=None, **kwargs):
ret["message"] = ex.message
ret["out"] = False
except RpcTimeoutError as ex:
message = "install_config failed due to timeout error : {}".format(str(ex))
message = f"install_config failed due to timeout error : {ex}"
log.error(message)
ret["message"] = message
ret["out"] = False
except Exception as exc: # pylint: disable=broad-except
ret["message"] = "install_config failed due to exception: '{}'".format(exc)
ret["message"] = f"install_config failed due to exception: '{exc}'"
ret["out"] = False
return ret
@ -1299,7 +1299,7 @@ def zeroize():
conn.cli("request system zeroize")
ret["message"] = "Completed zeroize and rebooted"
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not zeroize due to : "{}"'.format(exception)
ret["message"] = f'Could not zeroize due to : "{exception}"'
ret["out"] = False
_restart_connection()
@ -1429,7 +1429,7 @@ def install_os(path=None, **kwargs):
image_path, progress=True, timeout=timeout, **op
)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Installation failed due to: "{}"'.format(exception)
ret["message"] = f'Installation failed due to: "{exception}"'
ret["out"] = False
__proxy__["junos.reboot_clear"]()
_restart_connection()
@ -1440,7 +1440,7 @@ def install_os(path=None, **kwargs):
path, progress=True, timeout=timeout, **op
)
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Installation failed due to: "{}"'.format(exception)
ret["message"] = f'Installation failed due to: "{exception}"'
ret["out"] = False
__proxy__["junos.reboot_clear"]()
_restart_connection()
@ -1450,7 +1450,7 @@ def install_os(path=None, **kwargs):
ret["out"] = True
ret["message"] = "Installed the os."
else:
ret["message"] = "Installation failed. Reason: {}".format(install_message)
ret["message"] = f"Installation failed. Reason: {install_message}"
ret["out"] = False
__proxy__["junos.reboot_clear"]()
return ret
@ -1517,16 +1517,16 @@ def file_copy(src, dest):
with HandleFileCopy(src) as fp:
if fp is None:
ret["message"] = "Invalid source file path {}".format(src)
ret["message"] = f"Invalid source file path {src}"
ret["out"] = False
return ret
try:
with SCP(conn, progress=True) as scp:
scp.put(fp, dest)
ret["message"] = "Successfully copied file from {} to {}".format(src, dest)
ret["message"] = f"Successfully copied file from {src} to {dest}"
except Exception as exception: # pylint: disable=broad-except
ret["message"] = 'Could not copy file : "{}"'.format(exception)
ret["message"] = f'Could not copy file : "{exception}"'
ret["out"] = False
return ret
@ -1557,12 +1557,12 @@ def lock():
conn.cu.lock()
ret["message"] = "Successfully locked the configuration."
except RpcTimeoutError as exception:
ret["message"] = 'Could not gain lock due to : "{}"'.format(exception)
ret["message"] = f'Could not gain lock due to : "{exception}"'
ret["out"] = False
_restart_connection()
except LockError as exception:
ret["message"] = 'Could not gain lock due to : "{}"'.format(exception)
ret["message"] = f'Could not gain lock due to : "{exception}"'
ret["out"] = False
return ret
@ -1767,7 +1767,7 @@ def commit_check():
conn.cu.commit_check()
ret["message"] = "Commit check succeeded."
except Exception as exception: # pylint: disable=broad-except
ret["message"] = "Commit check failed with {}".format(exception)
ret["message"] = f"Commit check failed with {exception}"
ret["out"] = False
_restart_connection()
@ -1844,9 +1844,9 @@ def get_table(
pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__))
try:
if path is not None:
file_path = os.path.join(path, "{}".format(table_file))
file_path = os.path.join(path, f"{table_file}")
else:
file_path = os.path.join(pyez_tables_path, "{}".format(table_file))
file_path = os.path.join(pyez_tables_path, f"{table_file}")
with HandleFileCopy(file_path) as file_loc:
if file_loc is None:
@ -1923,7 +1923,7 @@ def get_table(
_restart_connection()
return ret
except Exception as err: # pylint: disable=broad-except
ret["message"] = "Uncaught exception - please report: {}".format(str(err))
ret["message"] = f"Uncaught exception - please report: {str(err)}"
ret["out"] = False
_restart_connection()
return ret
@ -2091,9 +2091,7 @@ def file_compare(file1, file2, **kwargs): # pragma: no cover
if not junos_cli:
return {"success": False, "message": "Cannot find Junos cli command"}
cliret = __salt__["cmd.run"](
"{} file compare files {} {} ".format(junos_cli, file1, file2)
)
cliret = __salt__["cmd.run"](f"{junos_cli} file compare files {file1} {file2} ")
clilines = cliret.splitlines()
for r in clilines:
@ -2147,7 +2145,7 @@ def fsentry_exists(dir, **kwargs): # pragma: no cover
if not junos_cli:
return {"success": False, "message": "Cannot find Junos cli command"}
ret = __salt__["cmd.run"]("{} file show {}".format(junos_cli, dir))
ret = __salt__["cmd.run"](f"{junos_cli} file show {dir}")
retlines = ret.splitlines()
exists = True
is_dir = False
@ -2168,7 +2166,7 @@ def _find_routing_engines():
if not junos_cli:
return {"success": False, "message": "Cannot find Junos cli command"}
re_check = __salt__["cmd.run"]("{} show chassis routing-engine".format(junos_cli))
re_check = __salt__["cmd.run"](f"{junos_cli} show chassis routing-engine")
engine_present = True
engine = {}
@ -2336,9 +2334,7 @@ def dir_copy(source, dest, force=False, **kwargs): # pragma: no cover
target = dest + d
status = fsentry_exists(target)
if not status["exists"]:
ret = __salt__["cmd.run"](
"{} file make-directory {}".format(junos_cli, target)
)
ret = __salt__["cmd.run"](f"{junos_cli} file make-directory {target}")
ret = ret_messages + ret
else:
ret_messages = ret_messages + "Directory " + target + " already exists.\n"
@ -2348,14 +2344,12 @@ def dir_copy(source, dest, force=False, **kwargs): # pragma: no cover
comp_result = file_compare(f, target)
if not comp_result["identical"] or force:
ret = __salt__["cmd.run"](
"{} file copy {} {}".format(junos_cli, f, target)
)
ret = __salt__["cmd.run"](f"{junos_cli} file copy {f} {target}")
ret = ret_messages + ret
else:
ret_messages = (
ret_messages
+ "Files {} and {} are identical, not copying.\n".format(f, target)
+ f"Files {f} and {target} are identical, not copying.\n"
)
return ret_messages

View file

@ -145,7 +145,7 @@ def _get_zone_etc_timezone():
return salt.utils.stringutils.to_unicode(fp_.read()).strip()
except OSError as exc:
raise CommandExecutionError(
"Problem reading timezone file {}: {}".format(tzfile, exc.strerror)
f"Problem reading timezone file {tzfile}: {exc.strerror}"
)
@ -241,7 +241,7 @@ def get_offset():
salt_path = "/opt/salt/bin/date"
if not os.path.exists(salt_path):
return "date in salt binaries does not exist: {}".format(salt_path)
return f"date in salt binaries does not exist: {salt_path}"
return __salt__["cmd.run"]([salt_path, "+%z"], python_shell=False)
@ -274,24 +274,24 @@ def set_zone(timezone):
"""
if salt.utils.path.which("timedatectl"):
try:
__salt__["cmd.run"]("timedatectl set-timezone {}".format(timezone))
__salt__["cmd.run"](f"timedatectl set-timezone {timezone}")
except CommandExecutionError:
pass
if "Solaris" in __grains__["os_family"] or "AIX" in __grains__["os_family"]:
zonepath = "/usr/share/lib/zoneinfo/{}".format(timezone)
zonepath = f"/usr/share/lib/zoneinfo/{timezone}"
else:
zonepath = "/usr/share/zoneinfo/{}".format(timezone)
zonepath = f"/usr/share/zoneinfo/{timezone}"
if not os.path.exists(zonepath) and "AIX" not in __grains__["os_family"]:
return "Zone does not exist: {}".format(zonepath)
return f"Zone does not exist: {zonepath}"
tzfile = _get_localtime_path()
if os.path.exists(tzfile):
os.unlink(tzfile)
if "Solaris" in __grains__["os_family"]:
__salt__["file.sed"]("/etc/default/init", "^TZ=.*", "TZ={}".format(timezone))
__salt__["file.sed"]("/etc/default/init", "^TZ=.*", f"TZ={timezone}")
elif "AIX" in __grains__["os_family"]:
# timezone could be Olson or Posix
curtzstring = get_zone()
@ -308,12 +308,10 @@ def set_zone(timezone):
os.symlink(zonepath, tzfile)
if "RedHat" in __grains__["os_family"]:
__salt__["file.sed"](
"/etc/sysconfig/clock", "^ZONE=.*", 'ZONE="{}"'.format(timezone)
)
__salt__["file.sed"]("/etc/sysconfig/clock", "^ZONE=.*", f'ZONE="{timezone}"')
elif "Suse" in __grains__["os_family"]:
__salt__["file.sed"](
"/etc/sysconfig/clock", "^TIMEZONE=.*", 'TIMEZONE="{}"'.format(timezone)
"/etc/sysconfig/clock", "^TIMEZONE=.*", f'TIMEZONE="{timezone}"'
)
elif "Debian" in __grains__["os_family"] or "Gentoo" in __grains__["os_family"]:
with salt.utils.files.fopen("/etc/timezone", "w") as ofh:
@ -362,9 +360,7 @@ def zone_compare(timezone):
except OSError as exc:
problematic_file = exc.filename
if problematic_file == zonepath:
raise SaltInvocationError(
'Can\'t find a local timezone "{}"'.format(timezone)
)
raise SaltInvocationError(f'Can\'t find a local timezone "{timezone}"')
elif problematic_file == tzfile:
raise CommandExecutionError(
"Failed to read {} to determine current timezone: {}".format(
@ -384,7 +380,7 @@ def _get_localtime_path():
def _get_zone_file(timezone):
return "/usr/share/zoneinfo/{}".format(timezone)
return f"/usr/share/zoneinfo/{timezone}"
def get_hwclock():
@ -454,7 +450,7 @@ def get_hwclock():
if line == "local":
return "LOCAL"
raise CommandExecutionError(
"Correct offset value not found in {}".format(offset_file)
f"Correct offset value not found in {offset_file}"
)
except OSError as exc:
raise CommandExecutionError(
@ -556,10 +552,10 @@ def set_hwclock(clock):
cmd = ["rtc", "-z", "GMT" if clock.lower() == "utc" else timezone]
return __salt__["cmd.retcode"](cmd, python_shell=False) == 0
zonepath = "/usr/share/zoneinfo/{}".format(timezone)
zonepath = f"/usr/share/zoneinfo/{timezone}"
if not os.path.exists(zonepath):
raise CommandExecutionError("Zone '{}' does not exist".format(zonepath))
raise CommandExecutionError(f"Zone '{zonepath}' does not exist")
os.unlink("/etc/localtime")
os.symlink(zonepath, "/etc/localtime")
@ -573,13 +569,13 @@ def set_hwclock(clock):
return __salt__["cmd.retcode"](cmd, python_shell=False) == 0
elif "RedHat" in __grains__["os_family"]:
__salt__["file.sed"](
"/etc/sysconfig/clock", "^ZONE=.*", 'ZONE="{}"'.format(timezone)
"/etc/sysconfig/clock", "^ZONE=.*", f'ZONE="{timezone}"'
)
elif "Suse" in __grains__["os_family"]:
__salt__["file.sed"](
"/etc/sysconfig/clock",
"^TIMEZONE=.*",
'TIMEZONE="{}"'.format(timezone),
f'TIMEZONE="{timezone}"',
)
elif "Debian" in __grains__["os_family"]:
if clock == "UTC":
@ -591,14 +587,10 @@ def set_hwclock(clock):
raise SaltInvocationError("Only 'UTC' and 'localtime' are allowed")
if clock == "localtime":
clock = "local"
__salt__["file.sed"](
"/etc/conf.d/hwclock", "^clock=.*", 'clock="{}"'.format(clock)
)
__salt__["file.sed"]("/etc/conf.d/hwclock", "^clock=.*", f'clock="{clock}"')
elif "Slackware" in os_family:
if clock not in ("UTC", "localtime"):
raise SaltInvocationError("Only 'UTC' and 'localtime' are allowed")
__salt__["file.sed"](
"/etc/hardwareclock", "^(UTC|localtime)", "{}".format(clock)
)
__salt__["file.sed"]("/etc/hardwareclock", "^(UTC|localtime)", f"{clock}")
return True

View file

@ -31,7 +31,7 @@ def mk_token(opts, tdata):
hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE))
tok = str(hash_type(os.urandom(512)).hexdigest())
t_path = os.path.join(opts["token_dir"], tok)
temp_t_path = "{}.tmp".format(t_path)
temp_t_path = f"{t_path}.tmp"
while os.path.isfile(t_path):
tok = str(hash_type(os.urandom(512)).hexdigest())
t_path = os.path.join(opts["token_dir"], tok)

View file

@ -27,21 +27,21 @@ class DigitalOceanTest(CloudTest):
"""
Tests the return of running the --list-images command for digitalocean
"""
image_list = self.run_cloud("--list-images {}".format(self.PROVIDER))
image_list = self.run_cloud(f"--list-images {self.PROVIDER}")
self.assertIn("ubuntu-18-04-x64", [i.strip() for i in image_list])
def test_list_locations(self):
"""
Tests the return of running the --list-locations command for digitalocean
"""
_list_locations = self.run_cloud("--list-locations {}".format(self.PROVIDER))
_list_locations = self.run_cloud(f"--list-locations {self.PROVIDER}")
self.assertIn("San Francisco 2", [i.strip() for i in _list_locations])
def test_list_sizes(self):
"""
Tests the return of running the --list-sizes command for digitalocean
"""
_list_sizes = self.run_cloud("--list-sizes {}".format(self.PROVIDER))
_list_sizes = self.run_cloud(f"--list-sizes {self.PROVIDER}")
self.assertIn("16gb", [i.strip() for i in _list_sizes])
@pytest.mark.skip_on_fips_enabled_platform
@ -84,25 +84,23 @@ class DigitalOceanTest(CloudTest):
self.assertIn(finger_print, [i.strip() for i in _key])
# List all keys
list_keypairs = self.run_cloud("-f list_keypairs {}".format(self.PROVIDER))
list_keypairs = self.run_cloud(f"-f list_keypairs {self.PROVIDER}")
self.assertIn(finger_print, [i.strip() for i in list_keypairs])
# List key
show_keypair = self.run_cloud(
"-f show_keypair {} keyname={}".format(self.PROVIDER, do_key_name)
f"-f show_keypair {self.PROVIDER} keyname={do_key_name}"
)
self.assertIn(finger_print, [i.strip() for i in show_keypair])
except AssertionError:
# Delete the public key if the above assertions fail
self.run_cloud("-f remove_key {} id={}".format(self.PROVIDER, finger_print))
self.run_cloud(f"-f remove_key {self.PROVIDER} id={finger_print}")
raise
finally:
# Delete public key
self.assertTrue(
self.run_cloud(
"-f remove_key {} id={}".format(self.PROVIDER, finger_print)
)
self.run_cloud(f"-f remove_key {self.PROVIDER} id={finger_print}")
)
def test_instance(self):
@ -111,7 +109,7 @@ class DigitalOceanTest(CloudTest):
"""
# check if instance with salt installed returned
ret_str = self.run_cloud(
"-p digitalocean-test {}".format(self.instance_name), timeout=TIMEOUT
f"-p digitalocean-test {self.instance_name}", timeout=TIMEOUT
)
self.assertInstanceExists(ret_str)

View file

@ -45,7 +45,7 @@ class VenafiTest(ShellCase):
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_request(self, name):
cn = "{}.example.com".format(name)
cn = f"{name}.example.com"
ret = self.run_run_plus(
fun="venafi.request",

View file

@ -421,7 +421,7 @@ class CPModuleTest(ModuleCase):
cp.cache_file
"""
nginx_port = ports.get_unused_localhost_port()
url_prefix = "http://localhost:{}/".format(nginx_port)
url_prefix = f"http://localhost:{nginx_port}/"
temp_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.addCleanup(shutil.rmtree, temp_dir, ignore_errors=True)
nginx_root_dir = os.path.join(temp_dir, "root")
@ -444,7 +444,7 @@ class CPModuleTest(ModuleCase):
fp_.write(
textwrap.dedent(
salt.utils.stringutils.to_str(
"""\
f"""\
user root;
worker_processes 1;
error_log {nginx_conf_dir}/server_error.log;
@ -474,9 +474,7 @@ class CPModuleTest(ModuleCase):
return 302 /actual_file;
}}
}}
}}""".format(
**locals()
)
}}"""
)
)
)

View file

@ -23,7 +23,7 @@ ARCHIVE_DIR = (
)
ARCHIVE_NAME = "custom.tar.gz"
ARCHIVE_TAR_SOURCE = "http://localhost:{}/{}".format(9999, ARCHIVE_NAME)
ARCHIVE_TAR_SOURCE = f"http://localhost:{9999}/{ARCHIVE_NAME}"
ARCHIVE_TAR_HASH = "md5=7643861ac07c30fe7d2310e9f25ca514"
ARCHIVE_TAR_SHA_HASH = (
"sha256=9591159d86f0a180e4e0645b2320d0235e23e66c66797df61508bf185e0ac1d2"

View file

@ -31,7 +31,7 @@ pytestmark = [
def _win_user_where(username, password, program):
cmd = "cmd.exe /c where {}".format(program)
cmd = f"cmd.exe /c where {program}"
ret = salt.utils.win_runas.runas(cmd, username, password)
assert ret["retcode"] == 0, "{} returned {}".format(cmd, ret["retcode"])
return ret["stdout"].strip().split("\n")[-1].strip()
@ -90,7 +90,7 @@ def test_pip_installed_removed(modules, states):
"""
name = "pudb"
if name in modules.pip.list():
pytest.skip("{} is already installed, uninstall to run this test".format(name))
pytest.skip(f"{name} is already installed, uninstall to run this test")
ret = states.pip.installed(name=name)
assert ret.result is True
ret = states.pip.removed(name=name)
@ -310,9 +310,7 @@ def test_issue_6912_wrong_owner(tmp_path, create_virtualenv, modules, states):
str(venv_dir), user=account.username, password="PassWord1!", **venv_kwargs
)
if venv_create.get("retcode", 1) > 0:
pytest.skip(
"Failed to create testcase virtual environment: {}".format(venv_create)
)
pytest.skip(f"Failed to create testcase virtual environment: {venv_create}")
# pip install passing the package name in `name`
ret = states.pip.installed(
@ -379,9 +377,7 @@ def test_issue_6912_wrong_owner_requirements_file(
str(venv_dir), user=account.username, password="PassWord1!", **venv_kwargs
)
if venv_create.get("retcode", 1) > 0:
pytest.skip(
"failed to create testcase virtual environment: {}".format(venv_create)
)
pytest.skip(f"failed to create testcase virtual environment: {venv_create}")
# pip install using a requirements file
contents = "pep8\n"
@ -526,9 +522,7 @@ def test_22359_pip_installed_unless_does_not_trigger_warnings(
venv_dir = str(tmp_path / "pip-installed-unless")
venv_create = create_virtualenv(venv_dir)
if venv_create["retcode"] > 0:
pytest.skip(
"Failed to create testcase virtual environment: {}".format(venv_create)
)
pytest.skip(f"Failed to create testcase virtual environment: {venv_create}")
false_cmd = salt.utils.path.which("false")
if salt.utils.platform.is_windows():
@ -574,7 +568,7 @@ def test_issue_54755(tmp_path, state_tree, modules):
with pytest.helpers.temp_file("issue-54755.sls", sls_contents, state_tree):
ret = modules.state.sls(mods="issue-54755", pillar={"file_path": file_path})
key = "file_|-issue-54755_|-{}_|-managed".format(file_path)
key = f"file_|-issue-54755_|-{file_path}_|-managed"
assert key in ret.raw
assert ret.raw[key]["result"] is True
with salt.utils.files.fopen(str(file_path), "r") as fp:

View file

@ -36,7 +36,7 @@ def test_issue_1959_virtualenv_runas(tmp_path_world_rw, state_tree, states):
ret = states.virtualenv.managed(
name=str(venv_dir),
user=account.username,
requirements="salt://{}/requirements.txt".format(state_tree_dirname),
requirements=f"salt://{state_tree_dirname}/requirements.txt",
)
assert ret.result is True
@ -56,11 +56,11 @@ def test_issue_2594_non_invalidated_cache(tmp_path, state_tree, modules, require
# Our state template
template = [
"{}:".format(venv_dir),
f"{venv_dir}:",
" virtualenv.managed:",
" - system_site_packages: False",
" - clear: false",
" - requirements: salt://{}/requirements.txt".format(state_tree_dirname),
f" - requirements: salt://{state_tree_dirname}/requirements.txt",
]
# Let's run our state!!!

View file

@ -39,7 +39,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir):
path=testfile_path
)
with pytest.helpers.temp_file(
"{}.sls".format(sls_name), sls_contents, base_env_state_tree_root_dir
f"{sls_name}.sls", sls_contents, base_env_state_tree_root_dir
):
yield sls_name

View file

@ -39,7 +39,7 @@ def test_load_map(grains, salt_cli, salt_minion):
assert isinstance(
ret.data, dict
), "failed to return dictionary from jinja.load_map: {}".format(ret)
), f"failed to return dictionary from jinja.load_map: {ret}"
with salt.utils.files.fopen(_path("defaults.yaml", absolute=True)) as fh_:
defaults = salt.utils.yaml.safe_load(fh_)

View file

@ -15,7 +15,7 @@ def test_issue_54765_salt(tmp_path, salt_cli, salt_minion):
pillar={"file_path": file_path},
minion_tgt=salt_minion.id,
).data
key = "file_|-issue-54765_|-{}_|-managed".format(file_path)
key = f"file_|-issue-54765_|-{file_path}_|-managed"
assert key in ret
assert ret[key]["result"] is True
with salt.utils.files.fopen(file_path, "r") as fp:
@ -30,7 +30,7 @@ def test_issue_54765_call(tmp_path, salt_call_cli):
"issue-54765",
pillar=f"{{'file_path': '{file_path}'}}",
)
key = "file_|-issue-54765_|-{}_|-managed".format(file_path)
key = f"file_|-issue-54765_|-{file_path}_|-managed"
assert ret.data[key]["result"] is True
with salt.utils.files.fopen(file_path, "r") as fp:
assert fp.read().strip() == "bar"

View file

@ -2686,7 +2686,7 @@ def test_get_table_wrong_path():
"out": False,
"hostname": "1.1.1.1",
"tablename": "ModuleTable",
"message": "Given table file {} cannot be located".format(file),
"message": f"Given table file {file} cannot be located",
}
with patch.dict(
junos.__salt__, {"file.file_exists": MagicMock(return_value=False)}
@ -2706,7 +2706,7 @@ def test_get_table_no_path_no_file():
"out": False,
"hostname": "1.1.1.1",
"tablename": "ModuleTable",
"message": "Given table file {} cannot be located".format(file),
"message": f"Given table file {file} cannot be located",
}
with patch.dict(
junos.__salt__, {"file.file_exists": MagicMock(return_value=False)}

View file

@ -118,7 +118,7 @@ def test_add_quotes(pol_info):
(None, "Not Defined"),
(chr(0), "Disabled"),
(chr(1), "Enabled"),
(chr(2), "Invalid Value: {!r}".format(chr(2))),
(chr(2), f"Invalid Value: {chr(2)!r}"),
("patrick", "Invalid Value"),
),
)

View file

@ -391,7 +391,7 @@ def test_update_dict_key_value(minion_opts, local_salt):
# Test incorrect usage
for update_with in [42, "foo", [42]]:
template = "{{ {} | update_dict_key_value('bar:baz', update_with) }}"
expected = r"Cannot update {} with a {}.".format(type({}), type(update_with))
expected = rf"Cannot update {type({})} with a {type(update_with)}."
with pytest.raises(SaltRenderError, match=expected):
render_jinja_tmpl(
template,
@ -462,7 +462,7 @@ def test_extend_dict_key_value(minion_opts, local_salt):
# Test incorrect usage
template = "{{ {} | extend_dict_key_value('bar:baz', 42) }}"
expected = r"Cannot extend {} with a {}.".format(type([]), int)
expected = rf"Cannot extend {type([])} with a {int}."
with pytest.raises(SaltRenderError, match=expected):
render_jinja_tmpl(
template, dict(opts=minion_opts, saltenv="test", salt=local_salt)
@ -811,12 +811,12 @@ def test_http_query(minion_opts, local_salt, backend, httpserver):
"backend": backend,
"body": "Hey, this isn't http://google.com!",
}
httpserver.expect_request("/{}".format(backend)).respond_with_data(
httpserver.expect_request(f"/{backend}").respond_with_data(
salt.utils.json.dumps(response), content_type="text/plain"
)
rendered = render_jinja_tmpl(
"{{ '"
+ httpserver.url_for("/{}".format(backend))
+ httpserver.url_for(f"/{backend}")
+ "' | http_query(backend='"
+ backend
+ "') }}",
@ -836,7 +836,7 @@ def test_http_query(minion_opts, local_salt, backend, httpserver):
)
assert isinstance(
dict_reply["body"], str
), "Failed with rendered template: {}".format(rendered)
), f"Failed with rendered template: {rendered}"
def test_to_bool(minion_opts, local_salt):

View file

@ -432,10 +432,7 @@ def test_get_log_level_default(
# Check log file logger
assert log_impl.log_level_logfile == default_log_level
# Check help message
assert (
"Default: '{}'.".format(default_log_level)
in instance.get_option("--log-level").help
)
assert f"Default: '{default_log_level}'." in instance.get_option("--log-level").help
# log file configuration tests
@ -458,7 +455,7 @@ def test_get_log_file_cli(
log_level = testing_config[loglevel_config_setting_name]
# Set log file in CLI
log_file = "{}_cli.log".format(log_file)
log_file = f"{log_file}_cli.log"
args = ["--log-file", log_file] + args
instance = parser()
@ -497,7 +494,7 @@ def test_get_log_file_config(
log_level = testing_config[loglevel_config_setting_name]
# Set log file in config
log_file = "{}_config.log".format(log_file)
log_file = f"{log_file}_config.log"
testing_config.update({logfile_config_setting_name: log_file})
instance = parser()
@ -555,10 +552,7 @@ def test_get_log_file_default(
# Check log file logger
assert log_impl.log_file == log_file
# Check help message
assert (
"Default: '{}'.".format(default_log_file)
in instance.get_option("--log-file").help
)
assert f"Default: '{default_log_file}'." in instance.get_option("--log-file").help
# log file log level configuration tests
@ -683,7 +677,7 @@ def test_get_log_file_level_default(
assert log_impl.log_level_logfile == log_level_logfile
# Check help message
assert (
"Default: '{}'.".format(default_log_level)
f"Default: '{default_log_level}'."
in instance.get_option("--log-file-level").help
)

View file

@ -499,7 +499,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
self.assertTrue(create_api_result.get("created"))
self.assertTrue(api)
self.assertEqual(api["id"], assigned_api_id)
self.assertEqual(api["createdDate"], "{}".format(created_date))
self.assertEqual(api["createdDate"], f"{created_date}")
self.assertEqual(api["name"], "unit-testing123")
self.assertEqual(api["description"], "unit-testing1234")
@ -726,7 +726,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
"test-salt-key", "test-lambda-api-key", **conn_parameters
)
api_key = create_api_key_result.get("apiKey")
now_str = "{}".format(now)
now_str = f"{now}"
self.assertTrue(create_api_key_result.get("created"))
self.assertEqual(api_key.get("lastUpdatedDate"), now_str)
@ -797,7 +797,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.update_api_key_description(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
description="test-lambda-api-key",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("updated"))
@ -813,7 +813,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.update_api_key_description(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
description="test-lambda-api-key",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("updated"))
@ -884,7 +884,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.associate_api_key_stagekeys(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
stagekeyslist=["123yd1l123/test"],
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("associated"))
@ -900,7 +900,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.associate_api_key_stagekeys(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
stagekeyslist=["123yd1l123/test"],
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("associated"))
@ -914,7 +914,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.disassociate_api_key_stagekeys(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
stagekeyslist=["123yd1l123/test"],
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("disassociated"))
@ -930,7 +930,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
result = boto_apigateway.disassociate_api_key_stagekeys(
apiKey="88883333amaa1ZMVGCoLeaTrQk8kzOC36vCgRcT2",
stagekeyslist=["123yd1l123/test"],
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("disassociated"))
@ -1035,7 +1035,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
deploymentId="n05smo",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("set"))
@ -1050,7 +1050,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
deploymentId="n05smo",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("set"))
@ -1076,7 +1076,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4", stageName="test", **conn_parameters
)
deployment = result.get("deployment")
now_str = "{}".format(now)
now_str = f"{now}"
self.assertTrue(result.get("created"))
self.assertEqual(deployment.get("createdDate"), now_str)
@ -1258,7 +1258,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
variables=dict(key1="val2"),
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("stage").get("variables").get("key1"), "val2")
@ -1273,7 +1273,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="no_such_stage",
variables=dict(key1="val1", key2="val2"),
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"), error_message.format("get_stage")
@ -1304,7 +1304,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
variables=dict(key1="val2"),
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"), error_message.format("update_stage")
@ -1336,10 +1336,10 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
deploymentId="n05smo",
**conn_parameters
**conn_parameters,
)
stage = result.get("stage")
now_str = "{}".format(now)
now_str = f"{now}"
self.assertIs(result.get("created"), True)
self.assertEqual(stage.get("createdDate"), now_str)
self.assertEqual(stage.get("lastUpdatedDate"), now_str)
@ -1356,7 +1356,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
stageName="test",
deploymentId="n05smo",
**conn_parameters
**conn_parameters,
)
self.assertIs(result.get("created"), False)
self.assertEqual(
@ -1532,7 +1532,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
modelName="Error",
schema=api_model_error_schema,
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("updated"))
@ -1547,7 +1547,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
modelName="no_such_model",
schema=api_model_error_schema,
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("updated"))
@ -1563,7 +1563,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
modelName="Error",
modelDescription="Error Model",
schema=api_model_error_schema,
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("created"))
@ -1579,7 +1579,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
modelName="Error",
modelDescription="Error Model",
schema=api_model_error_schema,
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("created"))
@ -1803,7 +1803,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("method"))
@ -1819,7 +1819,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="PUT",
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"), error_message.format("get_method")
@ -1836,7 +1836,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/does/not/exist",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("error"))
@ -1859,7 +1859,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="GET",
authorizationType="NONE",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("created"))
@ -1875,7 +1875,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api5",
httpMethod="GET",
authorizationType="NONE",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("created"))
@ -1892,7 +1892,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="GET",
authorizationType="NONE",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("created"))
@ -1913,7 +1913,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("deleted"))
@ -1931,7 +1931,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="GET",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("deleted"))
@ -1946,7 +1946,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users5",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("deleted"))
@ -1969,7 +1969,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode=200,
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("response"))
@ -1988,7 +1988,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode=250,
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"),
@ -2007,7 +2007,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api5/users",
httpMethod="POST",
statusCode=200,
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("error"))
@ -2030,7 +2030,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode="201",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("created"))
@ -2046,7 +2046,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api5",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("created"))
@ -2065,7 +2065,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("created"))
@ -2087,7 +2087,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("deleted"))
@ -2106,7 +2106,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="GET",
statusCode="201",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("deleted"))
@ -2122,7 +2122,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users5",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertFalse(result.get("deleted"))
@ -2149,7 +2149,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("integration"))
@ -2167,7 +2167,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api/users",
httpMethod="GET",
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"), error_message.format("get_integration")
@ -2184,7 +2184,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
restApiId="rm06h9oac4",
resourcePath="/api5/users",
httpMethod="POST",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("error"))
@ -2208,7 +2208,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("response"))
@ -2227,7 +2227,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api/users",
httpMethod="POST",
statusCode="201",
**conn_parameters
**conn_parameters,
)
self.assertEqual(
result.get("error").get("message"),
@ -2246,7 +2246,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
resourcePath="/api5/users",
httpMethod="POST",
statusCode="200",
**conn_parameters
**conn_parameters,
)
self.assertTrue(result.get("error"))
@ -2327,7 +2327,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
description=None,
throttle=throttle,
quota=quota,
**conn_parameters
**conn_parameters,
)
self.assertNotEqual(None, res.get("error"))
res = boto_apigateway.update_usage_plan(
@ -2341,7 +2341,7 @@ class BotoApiGatewayTestCase(BotoApiGatewayTestCaseBase, BotoApiGatewayTestCaseM
description=None,
throttle=None,
quota=quota,
**conn_parameters
**conn_parameters,
)
self.assertNotEqual(None, res.get("error"))
res = boto_apigateway.update_usage_plan(

View file

@ -125,7 +125,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"model": "virtio",
"filename": "myvm_system.qcow2",
"image": "/path/to/image",
"source_file": "{}{}myvm_system.qcow2".format(root_dir, os.sep),
"source_file": f"{root_dir}{os.sep}myvm_system.qcow2",
},
{
"name": "data",
@ -134,7 +134,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"format": "raw",
"model": "virtio",
"filename": "myvm_data.raw",
"source_file": "{}{}myvm_data.raw".format(root_dir, os.sep),
"source_file": f"{root_dir}{os.sep}myvm_data.raw",
},
],
disks,
@ -2123,7 +2123,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
self.assertIsNone(definition.find("./devices/disk[2]/source"))
self.assertEqual(
mock_run.call_args[0][0],
'qemu-img create -f qcow2 "{}" 10240M'.format(expected_disk_path),
f'qemu-img create -f qcow2 "{expected_disk_path}" 10240M',
)
self.assertEqual(mock_chmod.call_args[0][0], expected_disk_path)
@ -4385,7 +4385,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"tag": "first-snap",
"vmsize": 1234,
"date": datetime.datetime.fromtimestamp(
float("{}.{}".format(1528877587, 380589000))
float(f"{1528877587}.{380589000}")
).isoformat(),
"vmclock": "00:00:00",
},
@ -4394,7 +4394,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
"tag": "second snap",
"vmsize": 4567,
"date": datetime.datetime.fromtimestamp(
float("{}.{}".format(1528877592, 933509000))
float(f"{1528877592}.{933509000}")
).isoformat(),
"vmclock": "00:00:00",
},
@ -5297,9 +5297,9 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
for i in range(2):
net_mock = MagicMock()
net_mock.name.return_value = "net{}".format(i)
net_mock.name.return_value = f"net{i}"
net_mock.UUIDString.return_value = "some-uuid"
net_mock.bridgeName.return_value = "br{}".format(i)
net_mock.bridgeName.return_value = f"br{i}"
net_mock.autostart.return_value = True
net_mock.isActive.return_value = False
net_mock.isPersistent.return_value = True
@ -5759,8 +5759,8 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
pool_mocks = []
for i in range(2):
pool_mock = MagicMock()
pool_mock.name.return_value = "pool{}".format(i)
pool_mock.UUIDString.return_value = "some-uuid-{}".format(i)
pool_mock.name.return_value = f"pool{i}"
pool_mock.UUIDString.return_value = f"some-uuid-{i}"
pool_mock.info.return_value = [0, 1234, 5678, 123]
pool_mock.autostart.return_value = True
pool_mock.isPersistent.return_value = True
@ -6290,7 +6290,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
for idx, disk in enumerate(vms_disks):
vm = MagicMock()
# pylint: disable=no-member
vm.name.return_value = "vm{}".format(idx)
vm.name.return_value = f"vm{idx}"
vm.XMLDesc.return_value = """
<domain type='kvm' id='1'>
<name>vm{}</name>
@ -6829,7 +6829,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
def create_mock_vm(idx):
mock_vm = MagicMock()
mock_vm.name.return_value = "vm{}".format(idx)
mock_vm.name.return_value = f"vm{idx}"
return mock_vm
mock_vms = [create_mock_vm(idx) for idx in range(3)]

View file

@ -76,7 +76,7 @@ class Base(TestCase, LoaderModuleMockMixin):
cls.tdir = os.path.join(cls.rdir, "test")
for idx, url in buildout._URL_VERSIONS.items():
log.debug("Downloading bootstrap from %s", url)
dest = os.path.join(cls.rdir, "{}_bootstrap.py".format(idx))
dest = os.path.join(cls.rdir, f"{idx}_bootstrap.py")
try:
download_to(url, dest)
except urllib.error.URLError as exc:
@ -124,7 +124,7 @@ class Base(TestCase, LoaderModuleMockMixin):
shutil.copytree(self.root, self.tdir)
for idx in BOOT_INIT:
path = os.path.join(self.rdir, "{}_bootstrap.py".format(idx))
path = os.path.join(self.rdir, f"{idx}_bootstrap.py")
for fname in BOOT_INIT[idx]:
shutil.copy2(path, os.path.join(self.tdir, fname))
@ -155,7 +155,7 @@ class BuildoutTestCase(Base):
@buildout._salt_callback
def callback1(a, b=1):
for i in buildout.LOG.levels:
getattr(buildout.LOG, i)("{}bar".format(i[0]))
getattr(buildout.LOG, i)(f"{i[0]}bar")
return "foo"
def callback2(a, b=1):
@ -212,7 +212,7 @@ class BuildoutTestCase(Base):
self.assertEqual(
buildout._URL_VERSIONS[1],
buildout._get_bootstrap_url(path),
"b1 url for {}".format(path),
f"b1 url for {path}",
)
for path in [
os.path.join(self.tdir, "/non/existing"),
@ -222,7 +222,7 @@ class BuildoutTestCase(Base):
self.assertEqual(
buildout._URL_VERSIONS[2],
buildout._get_bootstrap_url(path),
"b2 url for {}".format(path),
f"b2 url for {path}",
)
@pytest.mark.slow_test
@ -231,17 +231,13 @@ class BuildoutTestCase(Base):
os.path.join(self.tdir, "var/ver/1/dumppicked"),
os.path.join(self.tdir, "var/ver/1/versions"),
]:
self.assertEqual(
1, buildout._get_buildout_ver(path), "1 for {}".format(path)
)
self.assertEqual(1, buildout._get_buildout_ver(path), f"1 for {path}")
for path in [
os.path.join(self.tdir, "/non/existing"),
os.path.join(self.tdir, "var/ver/2/versions"),
os.path.join(self.tdir, "var/ver/2/default"),
]:
self.assertEqual(
2, buildout._get_buildout_ver(path), "2 for {}".format(path)
)
self.assertEqual(2, buildout._get_buildout_ver(path), f"2 for {path}")
@pytest.mark.slow_test
def test_get_bootstrap_content(self):
@ -380,14 +376,14 @@ class BuildoutOnlineTestCase(Base):
"-C",
cls.ppy_dis,
"-xzvf",
"{}/distribute-0.6.43.tar.gz".format(cls.ppy_dis),
f"{cls.ppy_dis}/distribute-0.6.43.tar.gz",
]
)
subprocess.check_call(
[
"{}/bin/python".format(cls.ppy_dis),
"{}/distribute-0.6.43/setup.py".format(cls.ppy_dis),
f"{cls.ppy_dis}/bin/python",
f"{cls.ppy_dis}/distribute-0.6.43/setup.py",
"install",
]
)
@ -492,7 +488,7 @@ class BuildoutOnlineTestCase(Base):
self.assertTrue(ret["status"])
self.assertTrue("Creating directory" in out)
self.assertTrue("Installing a." in out)
self.assertTrue("{} bootstrap.py".format(self.py_st) in comment)
self.assertTrue(f"{self.py_st} bootstrap.py" in comment)
self.assertTrue("buildout -c buildout.cfg" in comment)
ret = buildout.buildout(
b_dir, parts=["a", "b", "c"], buildout_ver=2, python=self.py_st

View file

@ -240,7 +240,7 @@ class BotoCognitoIdentityTestCase(
name="test pool present",
IdentityPoolName=first_pool_name,
AuthenticatedRole="my_auth_role",
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue("error on describe identity pool" in result.get("comment", {}))
@ -258,12 +258,10 @@ class BotoCognitoIdentityTestCase(
name="test pool present",
IdentityPoolName=first_pool_name,
AuthenticatedRole="my_auth_role",
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertIn(
"{}".format([first_pool_ret, third_pool_ret]), result.get("comment", "")
)
self.assertIn(f"{[first_pool_ret, third_pool_ret]}", result.get("comment", ""))
def test_present_when_failing_to_create_a_new_identity_pool(self):
"""
@ -281,7 +279,7 @@ class BotoCognitoIdentityTestCase(
name="test pool present",
IdentityPoolName=default_pool_name,
AuthenticatedRole="my_auth_role",
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue("error on create_identity_pool" in result.get("comment", ""))
@ -304,7 +302,7 @@ class BotoCognitoIdentityTestCase(
IdentityPoolName=second_pool_name,
AuthenticatedRole="my_auth_role",
AllowUnauthenticatedIdentities=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue("error on update_identity_pool" in result.get("comment", ""))
@ -339,7 +337,7 @@ class BotoCognitoIdentityTestCase(
IdentityPoolName=second_pool_name,
AuthenticatedRole="my_auth_role",
AllowUnauthenticatedIdentities=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue("error on get_identity_pool_roles" in result.get("comment", ""))
@ -375,7 +373,7 @@ class BotoCognitoIdentityTestCase(
IdentityPoolName=second_pool_name,
AuthenticatedRole="my_auth_role",
AllowUnauthenticatedIdentities=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue(
@ -417,7 +415,7 @@ class BotoCognitoIdentityTestCase(
AuthenticatedRole="my_auth_role",
AllowUnauthenticatedIdentities=True,
DeveloperProviderName=default_dev_provider,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), True)
expected_call_args = (
@ -469,7 +467,7 @@ class BotoCognitoIdentityTestCase(
IdentityPoolName=second_pool_name,
AuthenticatedRole="my_auth_role",
AllowUnauthenticatedIdentities=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), True)
expected_call_args = (
@ -502,7 +500,7 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName="no_such_pool_name",
RemoveAllMatched=False,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), True)
self.assertEqual(result["changes"], {})
@ -521,12 +519,12 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName=first_pool_name,
RemoveAllMatched=False,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertEqual(result["changes"], {})
self.assertTrue(
"{}".format([first_pool_ret, third_pool_ret]) in result.get("comment", "")
f"{[first_pool_ret, third_pool_ret]}" in result.get("comment", "")
)
def test_absent_when_failing_to_describe_identity_pools(self):
@ -541,7 +539,7 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName=first_pool_name,
RemoveAllMatched=False,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertTrue("error on describe identity pool" in result.get("comment", {}))
@ -561,7 +559,7 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName=first_pool_name,
RemoveAllMatched=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), False)
self.assertEqual(result["changes"], {})
@ -579,12 +577,12 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName=second_pool_name,
RemoveAllMatched=False,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), True)
expected_changes = {
"new": {"Identity Pool Id {}".format(second_pool_id): None},
"old": {"Identity Pool Id {}".format(second_pool_id): second_pool_name},
"new": {f"Identity Pool Id {second_pool_id}": None},
"old": {f"Identity Pool Id {second_pool_id}": second_pool_name},
}
self.assertEqual(result["changes"], expected_changes)
@ -604,17 +602,17 @@ class BotoCognitoIdentityTestCase(
name="test pool absent",
IdentityPoolName=first_pool_name,
RemoveAllMatched=True,
**conn_parameters
**conn_parameters,
)
self.assertEqual(result.get("result"), True)
expected_changes = {
"new": {
"Identity Pool Id {}".format(first_pool_id): None,
"Identity Pool Id {}".format(third_pool_id): None,
f"Identity Pool Id {first_pool_id}": None,
f"Identity Pool Id {third_pool_id}": None,
},
"old": {
"Identity Pool Id {}".format(first_pool_id): first_pool_name,
"Identity Pool Id {}".format(third_pool_id): third_pool_name,
f"Identity Pool Id {first_pool_id}": first_pool_name,
f"Identity Pool Id {third_pool_id}": third_pool_name,
},
}
self.assertEqual(result["changes"], expected_changes)

View file

@ -205,7 +205,7 @@ class BotoUtilsGetConnTestCase(BotoUtilsTestCaseBase):
@mock_ec2
def test_get_conn_with_no_auth_params_raises_invocation_error(self):
with patch(
"boto.{}.connect_to_region".format(service),
f"boto.{service}.connect_to_region",
side_effect=boto.exception.NoAuthHandlerFound(),
):
with self.assertRaises(SaltInvocationError):
@ -214,7 +214,7 @@ class BotoUtilsGetConnTestCase(BotoUtilsTestCaseBase):
@mock_ec2
def test_get_conn_error_raises_command_execution_error(self):
with patch(
"boto.{}.connect_to_region".format(service),
f"boto.{service}.connect_to_region",
side_effect=BotoServerError(400, "Mocked error", body=error_body),
):
with self.assertRaises(BotoServerError):

View file

@ -99,10 +99,7 @@ def build_path_cache():
# rest_cherrypy, rest_tornado
subpackage = parts.pop(0)
stub_path = (
stub_path
/ package
/ "all"
/ "salt.netapi.{}.rst".format(subpackage)
stub_path / package / "all" / f"salt.netapi.{subpackage}.rst"
)
else:
stub_path = (
@ -122,7 +119,7 @@ build_path_cache()
def build_file_list(files, extension):
if not files:
_files = tools.utils.REPO_ROOT.rglob("*{}".format(extension))
_files = tools.utils.REPO_ROOT.rglob(f"*{extension}")
else:
_files = [fpath.resolve() for fpath in files if fpath.suffix == extension]
_files = [path.relative_to(tools.utils.REPO_ROOT) for path in _files]
@ -315,7 +312,7 @@ def check_module_indexes(ctx: Context, files: list[pathlib.Path]) -> int:
if module.name == "__init__.py":
modules.add(module.parent.stem)
continue
modules.add("{}.{}".format(module.parent.stem, module.stem))
modules.add(f"{module.parent.stem}.{module.stem}")
continue
if module.name == "__init__.py":
continue