Update to latest `pyupgrade` hook. Stop skipping it on CI.

Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
Pedro Algarvio 2022-01-25 18:25:44 +00:00 committed by Gareth J. Greenaway
parent fd8060c58c
commit f2a783643d
67 changed files with 121 additions and 149 deletions

View file

@ -47,7 +47,7 @@ jobs:
- name: Check ALL Files On Branch
if: github.event_name != 'pull_request'
env:
SKIP: lint-salt,lint-tests,pyupgrade,remove-import-headers,rstcheck
SKIP: lint-salt,lint-tests,remove-import-headers,rstcheck
run: |
pre-commit run --show-diff-on-failure --color=always --all-files

View file

@ -1197,9 +1197,7 @@ repos:
# ----- Code Formatting ------------------------------------------------------------------------------------------->
- repo: https://github.com/asottile/pyupgrade
# This, for now, is meant to run when locally committing code and will be disabled(skipped) when we run pre-commit
# against all codebase to avoid MASSIVE code churn. This way, we do it in smaller chunks, a few at a time.
rev: v2.7.2
rev: v2.31.0
hooks:
- id: pyupgrade
name: Drop six usage and Py2 support
@ -1207,7 +1205,7 @@ repos:
exclude: >
(?x)^(
salt/client/ssh/ssh_py_shim.py|
salt/ext/ipaddress.py
salt/ext/.*\.py
)$
- repo: https://github.com/saltstack/pre-commit-remove-import-headers

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
"""
saltrepo
~~~~~~~~

View file

@ -1,5 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Taken from sphinx-contrib
# https://bitbucket.org/birkenfeld/sphinx-contrib/src/a3d904f8ab24/youtube
@ -33,7 +32,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division
import re
@ -51,7 +49,7 @@ CONTROL_HEIGHT = 30
def get_size(d, key):
if key not in d:
return None
m = re.match("(\d+)(|%|px)$", d[key])
m = re.match(r"(\d+)(|%|px)$", d[key])
if not m:
raise ValueError("invalid size %r" % d[key])
return int(m.group(1)), m.group(2) or "px"
@ -134,7 +132,7 @@ class YouTube(Directive):
def run(self):
if "aspect" in self.options:
aspect = self.options.get("aspect")
m = re.match("(\d+):(\d+)", aspect)
m = re.match(r"(\d+):(\d+)", aspect)
if m is None:
raise ValueError("invalid aspect ratio %r" % aspect)
aspect = tuple(int(x) for x in m.groups())

View file

@ -534,9 +534,7 @@ class Resolver:
)
print(
"Available eauth types: {}".format(
", ".join(
sorted([k[:-5] for k in self.auth if k.endswith(".auth")])
)
", ".join(sorted(k[:-5] for k in self.auth if k.endswith(".auth")))
)
)
return ret

View file

@ -9,7 +9,7 @@ try:
import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
twilio_version = tuple(int(x) for x in twilio.__version_info__)
if twilio_version > (5,):
from twilio.rest import Client as TwilioRestClient
else:

View file

@ -1813,10 +1813,8 @@ class LinodeAPIv3(LinodeAPI):
vm_["name"],
pprint.pprint(
sorted(
[
distro["LABEL"].encode(__salt_system_encoding__)
for distro in distributions
]
distro["LABEL"].encode(__salt_system_encoding__)
for distro in distributions
)
),
)

View file

@ -22,12 +22,10 @@ try:
HAS_LIBCLOUD = True
LIBCLOUD_VERSION_INFO = tuple(
[
int(part)
for part in libcloud.__version__.replace("-", ".")
.replace("rc", ".")
.split(".")[:3]
]
int(part)
for part in libcloud.__version__.replace("-", ".")
.replace("rc", ".")
.split(".")[:3]
)
except ImportError:

View file

@ -1775,15 +1775,15 @@ def _parse_cpe_name(cpe):
ret["phase"] = cpe[5] if len(cpe) > 5 else None
ret["part"] = part.get(cpe[1][1:])
elif len(cpe) == 6 and cpe[1] == "2.3": # WFN to a string
ret["vendor"], ret["product"], ret["version"] = [
ret["vendor"], ret["product"], ret["version"] = (
x if x != "*" else None for x in cpe[3:6]
]
)
ret["phase"] = None
ret["part"] = part.get(cpe[2])
elif len(cpe) > 7 and len(cpe) <= 13 and cpe[1] == "2.3": # WFN to a string
ret["vendor"], ret["product"], ret["version"], ret["phase"] = [
ret["vendor"], ret["product"], ret["version"], ret["phase"] = (
x if x != "*" else None for x in cpe[3:7]
]
)
ret["part"] = part.get(cpe[2])
return ret
@ -2103,9 +2103,9 @@ def os_data():
log.trace(
"Getting OS name, release, and codename from distro id, version, codename"
)
(osname, osrelease, oscodename) = [
(osname, osrelease, oscodename) = (
x.strip('"').strip("'") for x in _linux_distribution()
]
)
# Try to assign these three names based on the lsb info, they tend to
# be more accurate than what python gets from /etc/DISTRO-release.
# It's worth noting that Ubuntu has patched their Python distribution

View file

@ -1472,9 +1472,9 @@ def list_pkgs(
for line in out.splitlines():
cols = line.split()
try:
linetype, status, name, version_num, arch = [
linetype, status, name, version_num, arch = (
cols[x] for x in (0, 2, 3, 4, 5)
]
)
except (ValueError, IndexError):
continue
if __grains__.get("cpuarch", "") == "x86_64":
@ -2997,7 +2997,7 @@ def show(*names, **kwargs):
line = line.strip()
if line:
try:
key, val = [x.strip() for x in line.split(":", 1)]
key, val = (x.strip() for x in line.split(":", 1))
except ValueError:
pass
else:

View file

@ -644,13 +644,13 @@ def super_(dev):
if not line:
continue
key, val = [val.strip() for val in re.split(r"[\s]+", line, maxsplit=1)]
key, val = (val.strip() for val in re.split(r"[\s]+", line, maxsplit=1))
if not (key and val):
continue
mval = None
if " " in val:
rval, mval = [val.strip() for val in re.split(r"[\s]+", val, maxsplit=1)]
rval, mval = (val.strip() for val in re.split(r"[\s]+", val, maxsplit=1))
mval = mval[1:-1]
else:
rval = val

View file

@ -188,7 +188,7 @@ def get_config(name, region=None, key=None, keyid=None, profile=None):
# convert SuspendedProcess objects to names
elif attr == "suspended_processes":
suspended_processes = getattr(asg, attr)
ret[attr] = sorted([x.process_name for x in suspended_processes])
ret[attr] = sorted(x.process_name for x in suspended_processes)
else:
ret[attr] = getattr(asg, attr)
# scaling policies

View file

@ -232,9 +232,9 @@ def get_eip_address_info(
.. versionadded:: 2016.3.0
"""
if type(addresses) == (type("string")):
if isinstance(addresses, str):
addresses = [addresses]
if type(allocation_ids) == (type("string")):
if isinstance(allocation_ids, str):
allocation_ids = [allocation_ids]
ret = _get_all_eip_addresses(

View file

@ -58,7 +58,7 @@ def _parse_btrfs_info(data):
for line in [line for line in data.split("\n") if line][:-1]:
if line.startswith("Label:"):
line = re.sub(r"Label:\s+", "", line)
label, uuid_ = [tkn.strip() for tkn in line.split("uuid:")]
label, uuid_ = (tkn.strip() for tkn in line.split("uuid:"))
ret["label"] = label != "none" and label or None
ret["uuid"] = uuid_
continue

View file

@ -145,7 +145,7 @@ def __virtual__():
if HAS_DOCKERCOMPOSE:
match = re.match(VERSION_RE, str(compose.__version__))
if match:
version = tuple([int(x) for x in match.group(1).split(".")])
version = tuple(int(x) for x in match.group(1).split("."))
if version >= MIN_DOCKERCOMPOSE:
return __virtualname__
return (

View file

@ -2505,11 +2505,11 @@ def version():
if "Version" in ret:
match = version_re.match(str(ret["Version"]))
if match:
ret["VersionInfo"] = tuple([int(x) for x in match.group(1).split(".")])
ret["VersionInfo"] = tuple(int(x) for x in match.group(1).split("."))
if "ApiVersion" in ret:
match = version_re.match(str(ret["ApiVersion"]))
if match:
ret["ApiVersionInfo"] = tuple([int(x) for x in match.group(1).split(".")])
ret["ApiVersionInfo"] = tuple(int(x) for x in match.group(1).split("."))
return ret

View file

@ -874,9 +874,9 @@ def get_source_sum(
# The source_hash is a hash expression
ret = {}
try:
ret["hash_type"], ret["hsum"] = [
ret["hash_type"], ret["hsum"] = (
x.strip() for x in source_hash.split("=", 1)
]
)
except AttributeError:
_invalid_source_hash_format()
except ValueError:

View file

@ -53,9 +53,9 @@ class SysInfo:
log.error(msg)
raise SIException(msg)
devpath, blocks, used, available, used_p, mountpoint = [
devpath, blocks, used, available, used_p, mountpoint = (
elm for elm in out["stdout"].split(os.linesep)[-1].split(" ") if elm
]
)
return {
"device": devpath,
"blocks": blocks,

View file

@ -74,7 +74,7 @@ def list_installed():
prefix_len = len(_package_prefix()) + 1
return sorted(
[pkg[prefix_len:] for pkg in result], key=functools.cmp_to_key(_cmp_version)
(pkg[prefix_len:] for pkg in result), key=functools.cmp_to_key(_cmp_version)
)

View file

@ -519,10 +519,7 @@ def list_users(root=None):
getspall = functools.partial(spwd.getspall)
return sorted(
[
user.sp_namp if hasattr(user, "sp_namp") else user.sp_nam
for user in getspall()
]
user.sp_namp if hasattr(user, "sp_namp") else user.sp_nam for user in getspall()
)

View file

@ -983,7 +983,7 @@ def _get_veths(net_data):
if sitem.startswith("#") or not sitem:
continue
elif "=" in item:
item = tuple([a.strip() for a in item.split("=", 1)])
item = tuple(a.strip() for a in item.split("=", 1))
if item[0] == "lxc.network.type":
current_nic = salt.utils.odict.OrderedDict()
if item[0] == "lxc.network.name":

View file

@ -1036,7 +1036,7 @@ def list_repo_pkgs(*args, **kwargs):
# Sort versions newest to oldest
for pkgname in ret[reponame]:
sorted_versions = sorted(
[_LooseVersion(x) for x in ret[reponame][pkgname]], reverse=True
(_LooseVersion(x) for x in ret[reponame][pkgname]), reverse=True
)
ret[reponame][pkgname] = [x.vstring for x in sorted_versions]
return ret
@ -1047,7 +1047,7 @@ def list_repo_pkgs(*args, **kwargs):
byrepo_ret.setdefault(pkgname, []).extend(ret[reponame][pkgname])
for pkgname in byrepo_ret:
sorted_versions = sorted(
[_LooseVersion(x) for x in byrepo_ret[pkgname]], reverse=True
(_LooseVersion(x) for x in byrepo_ret[pkgname]), reverse=True
)
byrepo_ret[pkgname] = [x.vstring for x in sorted_versions]
return byrepo_ret

View file

@ -72,7 +72,7 @@ def _supports_regex():
"""
Check support of regexp
"""
return tuple([int(i) for i in _get_version()]) > (0, 5)
return tuple(int(i) for i in _get_version()) > (0, 5)
@decorators.memoize
@ -80,7 +80,7 @@ def _supports_parsing():
"""
Check support of parsing
"""
return tuple([int(i) for i in _get_version()]) > (0, 6)
return tuple(int(i) for i in _get_version()) > (0, 6)
def __virtual__():

View file

@ -503,7 +503,7 @@ def list_users():
salt '*' user.list_users
"""
return sorted([user.pw_name for user in pwd.getpwall()])
return sorted(user.pw_name for user in pwd.getpwall())
def rename(name, new_name):

View file

@ -712,7 +712,7 @@ def _parse_settings_eth(opts, iface_type, enabled, iface):
result["ipaddrs"] = []
for opt in opts["ipaddrs"]:
if salt.utils.validate.net.ipv4_addr(opt):
ip, prefix = [i.strip() for i in opt.split("/")]
ip, prefix = (i.strip() for i in opt.split("/"))
result["ipaddrs"].append({"ipaddr": ip, "prefix": prefix})
else:
msg = "ipv4 CIDR is invalid"

View file

@ -383,7 +383,7 @@ def _get_svc_list(name="*", status=None):
'DISABLED' : available service that is not enabled
'ENABLED' : enabled service (whether started on boot or not)
"""
return sorted([os.path.basename(el) for el in _get_svc_path(name, status)])
return sorted(os.path.basename(el) for el in _get_svc_path(name, status))
def get_svc_alias():

View file

@ -443,7 +443,7 @@ def list_users():
salt '*' user.list_users
"""
return sorted([user.pw_name for user in pwd.getpwall()])
return sorted(user.pw_name for user in pwd.getpwall())
def rename(name, new_name):

View file

@ -270,7 +270,7 @@ def list_all(
results = OrderedDict()
# sort the splunk searches by name, so we get consistent output
searches = sorted([(s.name, s) for s in client.saved_searches])
searches = sorted((s.name, s) for s in client.saved_searches)
for name, search in searches:
if app and search.access.app != app:
continue

View file

@ -26,7 +26,7 @@ try:
import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
twilio_version = tuple(int(x) for x in twilio.__version_info__)
if twilio_version > (5,):
TWILIO_5 = False
from twilio.rest import Client as TwilioRestClient

View file

@ -874,7 +874,7 @@ def list_users(root=None):
else:
getpwall = functools.partial(pwd.getpwall)
return sorted([user.pw_name for user in getpwall()])
return sorted(user.pw_name for user in getpwall())
def rename(name, new_name, root=None):

View file

@ -1780,11 +1780,7 @@ def _fill_disk_filename(conn, vm_name, disk, hypervisor, pool_caps):
int(re.sub("[a-z]+", "", vol_name)) for vol_name in all_volumes
] or [0]
index = min(
[
idx
for idx in range(1, max(indexes) + 2)
if idx not in indexes
]
idx for idx in range(1, max(indexes) + 2) if idx not in indexes
)
disk["filename"] = "{}{}".format(os.path.basename(device), index)

View file

@ -67,7 +67,7 @@ def virtualenv_ver(venv_bin, user=None, **kwargs):
[x for x in ret["stdout"].strip().split() if re.search(r"^\d.\d*", x)]
)
virtualenv_version_info = tuple(
[int(i) for i in re.sub(r"(rc|\+ds).*$", "", version).split(".")]
int(i) for i in re.sub(r"(rc|\+ds).*$", "", version).split(".")
)
return virtualenv_version_info

View file

@ -247,7 +247,7 @@ def list_sites():
filtered_binding.update({key.lower(): binding[key]})
binding_info = binding["bindingInformation"].split(":", 2)
ipaddress, port, hostheader = [element.strip() for element in binding_info]
ipaddress, port, hostheader = (element.strip() for element in binding_info)
filtered_binding.update(
{"hostheader": hostheader, "ipaddress": ipaddress, "port": port}
)

View file

@ -1316,7 +1316,7 @@ def _get_source_sum(source_hash, file_path, saltenv):
)
raise SaltInvocationError(invalid_hash_msg)
ret["hash_type"], ret["hsum"] = [item.strip().lower() for item in items]
ret["hash_type"], ret["hsum"] = (item.strip().lower() for item in items)
return ret

View file

@ -288,7 +288,7 @@ def _xfs_prune_output(out, uuid):
cnt.append(line)
for kset in [e for e in cnt[1:] if ":" in e]:
key, val = [t.strip() for t in kset.split(":", 1)]
key, val = (t.strip() for t in kset.split(":", 1))
data[key.lower().replace(" ", "_")] = val
return data.get("uuid") == uuid and data or {}

View file

@ -1010,7 +1010,7 @@ def list_repo_pkgs(*args, **kwargs):
# Sort versions newest to oldest
for pkgname in ret[reponame]:
sorted_versions = sorted(
[_LooseVersion(x) for x in ret[reponame][pkgname]], reverse=True
(_LooseVersion(x) for x in ret[reponame][pkgname]), reverse=True
)
ret[reponame][pkgname] = [x.vstring for x in sorted_versions]
return ret
@ -1021,7 +1021,7 @@ def list_repo_pkgs(*args, **kwargs):
byrepo_ret.setdefault(pkgname, []).extend(ret[reponame][pkgname])
for pkgname in byrepo_ret:
sorted_versions = sorted(
[_LooseVersion(x) for x in byrepo_ret[pkgname]], reverse=True
(_LooseVersion(x) for x in byrepo_ret[pkgname]), reverse=True
)
byrepo_ret[pkgname] = [x.vstring for x in sorted_versions]
return byrepo_ret
@ -2566,7 +2566,7 @@ def group_info(name, expand=False, ignore_groups=None):
g_info = {}
for line in salt.utils.itertools.split(out, "\n"):
try:
key, value = [x.strip() for x in line.split(":")]
key, value = (x.strip() for x in line.split(":"))
g_info[key.lower()] = value
except ValueError:
continue

View file

@ -435,10 +435,8 @@ class Wildcard:
self.name = pkg_name
self._set_version(pkg_version) # Dissects possible operator
versions = sorted(
[
LooseVersion(vrs)
for vrs in self._get_scope_versions(self._get_available_versions())
]
LooseVersion(vrs)
for vrs in self._get_scope_versions(self._get_available_versions())
)
return versions and "{}{}".format(self._op or "", versions[-1]) or None
@ -1080,7 +1078,7 @@ def list_repo_pkgs(*args, **kwargs):
# Sort versions newest to oldest
for pkgname in ret[reponame]:
sorted_versions = sorted(
[LooseVersion(x) for x in ret[reponame][pkgname]], reverse=True
(LooseVersion(x) for x in ret[reponame][pkgname]), reverse=True
)
ret[reponame][pkgname] = [x.vstring for x in sorted_versions]
return ret
@ -1091,7 +1089,7 @@ def list_repo_pkgs(*args, **kwargs):
byrepo_ret.setdefault(pkgname, []).extend(ret[reponame][pkgname])
for pkgname in byrepo_ret:
sorted_versions = sorted(
[LooseVersion(x) for x in byrepo_ret[pkgname]], reverse=True
(LooseVersion(x) for x in byrepo_ret[pkgname]), reverse=True
)
byrepo_ret[pkgname] = [x.vstring for x in sorted_versions]
return byrepo_ret
@ -2047,7 +2045,7 @@ def list_locks(root=None):
for element in [el for el in meta if el]:
if ":" in element:
lock.update(
dict([tuple([i.strip() for i in element.split(":", 1)])])
dict([tuple(i.strip() for i in element.split(":", 1))])
)
if lock.get("solvable_name"):
locks[lock.pop("solvable_name")] = lock

View file

@ -155,7 +155,7 @@ class TableDisplay:
columns = map(lambda *args: args, *reduce(operator.add, logical_rows))
max_widths = [max([len(str(item)) for item in column]) for column in columns]
max_widths = [max(len(str(item)) for item in column) for column in columns]
row_separator = self.row_delimiter * (
len(self.prefix)
+ len(self.suffix)

View file

@ -112,11 +112,9 @@ def _get_message(ret):
if isinstance(kwargs, dict):
kwarg_string = " ".join(
sorted(
[
"{}={}".format(k, v)
for k, v in kwargs.items()
if not k.startswith("_")
]
"{}={}".format(k, v)
for k, v in kwargs.items()
if not k.startswith("_")
)
)
return "salt func: {fun} {argstr} {kwargstr}".format(

View file

@ -37,7 +37,7 @@ try:
import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
twilio_version = tuple(int(x) for x in twilio.__version_info__)
if twilio_version > (5,):
TWILIO_5 = False
from twilio.rest import Client as TwilioRestClient

View file

@ -173,8 +173,8 @@ def _load_minion(minion_id, cache):
pillar = {}
addrs = {
4: sorted([ipaddress.IPv4Address(addr) for addr in grains.get("ipv4", [])]),
6: sorted([ipaddress.IPv6Address(addr) for addr in grains.get("ipv6", [])]),
4: sorted(ipaddress.IPv4Address(addr) for addr in grains.get("ipv4", [])),
6: sorted(ipaddress.IPv6Address(addr) for addr in grains.get("ipv6", [])),
}
mine = cache.fetch("minions/{}".format(minion_id), "mine")

View file

@ -797,10 +797,10 @@ def bootstrap_psexec(
'>(Salt-Minion-(.+?)-(.+)-Setup.exe)</a></td><td align="right">(.*?)\\s*<'
)
source_list = sorted(
[
(
[path, ver, plat, time.strptime(date, "%d-%b-%Y %H:%M")]
for path, ver, plat, date in salty_rx.findall(source)
],
),
key=operator.itemgetter(3),
reverse=True,
)

View file

@ -212,9 +212,9 @@ def availability_set_present(
return ret
aset_vms = aset.get("virtual_machines", [])
remote_vms = sorted(
[vm["id"].split("/")[-1].lower() for vm in aset_vms if "id" in aset_vms]
vm["id"].split("/")[-1].lower() for vm in aset_vms if "id" in aset_vms
)
local_vms = sorted([vm.lower() for vm in virtual_machines or []])
local_vms = sorted(vm.lower() for vm in virtual_machines or [])
if local_vms != remote_vms:
ret["changes"]["virtual_machines"] = {
"old": aset_vms,

View file

@ -237,10 +237,10 @@ def zone_present(
return ret
reg_vnets = zone.get("registration_virtual_networks", [])
remote_reg_vnets = sorted(
[vnet["id"].lower() for vnet in reg_vnets if "id" in vnet]
vnet["id"].lower() for vnet in reg_vnets if "id" in vnet
)
local_reg_vnets = sorted(
[vnet.lower() for vnet in registration_virtual_networks or []]
vnet.lower() for vnet in registration_virtual_networks or []
)
if local_reg_vnets != remote_reg_vnets:
ret["changes"]["registration_virtual_networks"] = {
@ -259,10 +259,10 @@ def zone_present(
return ret
res_vnets = zone.get("resolution_virtual_networks", [])
remote_res_vnets = sorted(
[vnet["id"].lower() for vnet in res_vnets if "id" in vnet]
vnet["id"].lower() for vnet in res_vnets if "id" in vnet
)
local_res_vnets = sorted(
[vnet.lower() for vnet in resolution_virtual_networks or []]
vnet.lower() for vnet in resolution_virtual_networks or []
)
if local_res_vnets != remote_res_vnets:
ret["changes"]["resolution_virtual_networks"] = {
@ -582,9 +582,9 @@ def record_set_present(
" dictionaries!".format(record_str)
)
return ret
local, remote = [
local, remote = (
sorted(config) for config in (record, rec_set[record_str])
]
)
for val in local:
for key in val:
local_val = val[key]

View file

@ -358,7 +358,7 @@ def _sort_policy(doc):
the likelihood of false negatives.
"""
if isinstance(doc, list):
return sorted([_sort_policy(i) for i in doc])
return sorted(_sort_policy(i) for i in doc)
elif isinstance(doc, (dict, OrderedDict)):
return {k: _sort_policy(v) for k, v in doc.items()}
return doc

View file

@ -222,9 +222,9 @@ class DictDiffer:
Iitialize the differ.
"""
self.current_dict, self.past_dict = current_dict, past_dict
self.current_keys, self.past_keys = [
self.current_keys, self.past_keys = (
set(d.keys()) for d in (current_dict, past_dict)
]
)
self.intersect = self.current_keys.intersection(self.past_keys)
def same(self):

View file

@ -145,7 +145,7 @@ def present(name, acl_type, acl_name="", perms="", recurse=False, force=False):
user = None
if user:
octal_sum = sum([_octal.get(i, i) for i in perms])
octal_sum = sum(_octal.get(i, i) for i in perms)
need_refresh = False
# If recursive check all paths retrieved via acl.getfacl
if recurse:
@ -169,7 +169,7 @@ def present(name, acl_type, acl_name="", perms="", recurse=False, force=False):
break
# Check the permissions from the already located file
elif user[_search_name]["octal"] == sum([_octal.get(i, i) for i in perms]):
elif user[_search_name]["octal"] == sum(_octal.get(i, i) for i in perms):
need_refresh = False
# If they don't match then refresh
else:
@ -402,7 +402,7 @@ def list_present(name, acl_type, acl_names=None, perms="", recurse=False, force=
ret = {"name": name, "result": True, "changes": {}, "comment": ""}
_octal = {"r": 4, "w": 2, "x": 1, "-": 0}
_octal_perms = sum([_octal.get(i, i) for i in perms])
_octal_perms = sum(_octal.get(i, i) for i in perms)
if not os.path.exists(name):
ret["comment"] = "{} does not exist".format(name)
ret["result"] = False
@ -475,7 +475,7 @@ def list_present(name, acl_type, acl_names=None, perms="", recurse=False, force=
for count, search_name in enumerate(_search_names):
if search_name in users:
if users[search_name]["octal"] == sum(
[_octal.get(i, i) for i in perms]
_octal.get(i, i) for i in perms
):
ret["comment"] = "Permissions are in the desired state"
else:

View file

@ -3003,7 +3003,7 @@ def _uninstall(
}
comments = []
not_installed = sorted([x for x in pkg_params if x not in targets])
not_installed = sorted(x for x in pkg_params if x not in targets)
if not_installed:
comments.append(
"The following packages were not installed: {}".format(

View file

@ -440,9 +440,9 @@ def managed(name, ppa=None, copr=None, **kwargs):
# split the line and sort everything after the URL
sanitizedsplit = sanitizedkwargs[kwarg].split()
sanitizedsplit[3:] = sorted(sanitizedsplit[3:])
reposplit, _, pre_comments = [
reposplit, _, pre_comments = (
x.strip() for x in pre[kwarg].partition("#")
]
)
reposplit = reposplit.split()
reposplit[3:] = sorted(reposplit[3:])
if sanitizedsplit != reposplit:

View file

@ -306,9 +306,9 @@ def compare_list_of_dicts(old, new, convert_id_to_name=None):
return ret
try:
local_configs, remote_configs = [
local_configs, remote_configs = (
sorted(config, key=itemgetter("name")) for config in (new, old)
]
)
except TypeError:
ret["comment"] = "configurations must be provided as a list of dictionaries!"
return ret

View file

@ -96,7 +96,7 @@ def _blkid(fs_type=None):
dev_name = device.pop(0)[:-1]
data[dev_name] = dict()
for k_set in device:
ks_key, ks_value = [elm.replace('"', "") for elm in k_set.split("=")]
ks_key, ks_value = (elm.replace('"', "") for elm in k_set.split("="))
data[dev_name][ks_key.lower()] = ks_value
if fs_type:

View file

@ -187,9 +187,9 @@ def is_fedora():
"""
Simple function to return if host is Fedora or not
"""
(osname, osrelease, oscodename) = [
(osname, osrelease, oscodename) = (
x.strip('"').strip("'") for x in linux_distribution()
]
)
return osname == "Fedora"
@ -198,9 +198,9 @@ def is_photonos():
"""
Simple function to return if host is Photon OS or not
"""
(osname, osrelease, oscodename) = [
(osname, osrelease, oscodename) = (
x.strip('"').strip("'") for x in linux_distribution()
]
)
return osname == "VMware Photon OS"

View file

@ -21,10 +21,8 @@ LIBZMQ_VERSION_INFO = (-1, -1, -1)
try:
if zmq:
ZMQ_VERSION_INFO = tuple([int(v_el) for v_el in zmq.__version__.split(".")])
LIBZMQ_VERSION_INFO = tuple(
[int(v_el) for v_el in zmq.zmq_version().split(".")]
)
ZMQ_VERSION_INFO = tuple(int(v_el) for v_el in zmq.__version__.split("."))
LIBZMQ_VERSION_INFO = tuple(int(v_el) for v_el in zmq.zmq_version().split("."))
except Exception: # pylint: disable=broad-except
log.exception("Error while getting LibZMQ/PyZMQ library version")

View file

@ -190,7 +190,7 @@ class SaltVersionsInfo(type):
def versions(cls):
if not cls._sorted_versions:
cls._sorted_versions = sorted(
[getattr(cls, name) for name in dir(cls) if name.isupper()],
(getattr(cls, name) for name in dir(cls) if name.isupper()),
key=operator.attrgetter("info"),
)
return cls._sorted_versions

View file

@ -401,7 +401,7 @@ def check_stray(ctx, files):
DOCS_DIR / "ref" / "states" / "writing.rst",
DOCS_DIR / "topics",
)
exclude_paths = tuple([str(p.relative_to(CODE_DIR)) for p in exclude_paths])
exclude_paths = tuple(str(p.relative_to(CODE_DIR)) for p in exclude_paths)
files = build_docs_paths(files)
for path in files:
if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))):

View file

@ -157,7 +157,7 @@ class GitModuleTest(ModuleCase):
)
)
ret = self.run_function("git.add", [self.repo, newdir])
res = "\n".join(sorted(["add '{}'".format(x) for x in files_relpath]))
res = "\n".join(sorted("add '{}'".format(x) for x in files_relpath))
if salt.utils.platform.is_windows():
res = res.replace("\\", "/")
self.assertEqual(ret, res)
@ -783,7 +783,7 @@ class GitModuleTest(ModuleCase):
)
# Remove an entire dir
expected = "\n".join(
sorted(["rm '" + os.path.join(entire_dir, x) + "'" for x in self.files])
sorted("rm '" + os.path.join(entire_dir, x) + "'" for x in self.files)
)
if salt.utils.platform.is_windows():
expected = expected.replace("\\", "/")

View file

@ -805,9 +805,9 @@ def test_parallel_state_with_long_tag(state, state_tree):
__pub_jid="1", # Because these run in parallel we need a fake JID
)
comments = sorted([x.comment for x in ret])
comments = sorted(x.comment for x in ret)
expected = sorted(
['Command "{}" run'.format(x) for x in (short_command, long_command)]
'Command "{}" run'.format(x) for x in (short_command, long_command)
)
assert comments == expected, "{} != {}".format(comments, expected)

View file

@ -271,7 +271,7 @@ def gpg_agent(request, gpghome):
check=True,
universal_newlines=True,
)
if tuple([int(p) for p in gpg_version_proc.stdout.split(".")]) >= (2, 1):
if tuple(int(p) for p in gpg_version_proc.stdout.split(".")) >= (2, 1):
kill_option_supported = True
else:
kill_option_supported = False

View file

@ -13,7 +13,7 @@ try:
import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
twilio_version = tuple(int(x) for x in twilio.__version_info__)
if twilio_version > (5,):
TWILIO_5 = False
else:

View file

@ -77,7 +77,7 @@ def test_present():
assert ret["changes"] == {
"added": {ip_list[0]: [hostname], ip_list[1]: [hostname]}
}, ret["changes"]
expected = sorted([call(x, hostname) for x in ip_list])
expected = sorted(call(x, hostname) for x in ip_list)
assert sorted(add_host.mock_calls) == expected, add_host.mock_calls
assert rm_host.mock_calls == [], rm_host.mock_calls
@ -161,7 +161,7 @@ def test_present():
assert ret["changes"] == {
"added": {ip_list[0]: [hostname], ip_list[1]: [hostname]},
}, ret["changes"]
expected = sorted([call(x, hostname) for x in ip_list])
expected = sorted(call(x, hostname) for x in ip_list)
assert sorted(add_host.mock_calls) == expected, add_host.mock_calls
assert rm_host.mock_calls == [], rm_host.mock_calls
@ -185,7 +185,7 @@ def test_present():
"added": {ip_list[0]: [hostname], ip_list[1]: [hostname]},
"removed": {cur_ip: [hostname]},
}, ret["changes"]
expected = sorted([call(x, hostname) for x in ip_list])
expected = sorted(call(x, hostname) for x in ip_list)
assert sorted(add_host.mock_calls) == expected, add_host.mock_calls
expected = [call(cur_ip, hostname)]
assert rm_host.mock_calls == expected, rm_host.mock_calls
@ -331,10 +331,10 @@ def test_present():
"added": {ip_list[0]: [hostname], ip_list[1]: [hostname]},
"comment_added": {ip_list[0]: ["A comment"], ip_list[1]: ["A comment"]},
}, ret["changes"]
expected = sorted([call(x, hostname) for x in ip_list])
expected = sorted(call(x, hostname) for x in ip_list)
assert sorted(add_host_mock.mock_calls) == expected, add_host_mock.mock_calls
expected = sorted([call(x, "A comment") for x in ip_list])
expected = sorted(call(x, "A comment") for x in ip_list)
assert (
sorted(set_comment_mock.mock_calls) == expected
), set_comment_mock.mock_calls

View file

@ -458,7 +458,7 @@ def test_extend_dict_key_value(minion_opts, local_salt):
# Test incorrect usage
template = "{{ {} | extend_dict_key_value('bar:baz', 42) }}"
expected = r"Cannot extend {} with a {}.".format(type([]), type(42))
expected = r"Cannot extend {} with a {}.".format(type([]), int)
with pytest.raises(SaltRenderError, match=expected):
render_jinja_tmpl(
template, dict(opts=minion_opts, saltenv="test", salt=local_salt)

View file

@ -44,7 +44,7 @@ from mock import (
__mock_version = tuple(
[int(part) for part in mock.__version__.split(".") if part.isdigit()]
int(part) for part in mock.__version__.split(".") if part.isdigit()
) # pylint: disable=no-member
if sys.version_info < (3, 6) and __mock_version < (2,):
# We need mock >= 2.0.0 before Py3.6

View file

@ -231,12 +231,8 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
# between Python releases.
lines_written = sorted(mtime_map_mock.write_calls())
expected = sorted(
[
salt.utils.stringutils.to_bytes(
"{key}:{val}\n".format(key=key, val=val)
)
for key, val in new_mtime_map.items()
]
salt.utils.stringutils.to_bytes("{key}:{val}\n".format(key=key, val=val))
for key, val in new_mtime_map.items()
)
assert lines_written == expected, lines_written

View file

@ -13,7 +13,7 @@ try:
import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
twilio_version = tuple(int(x) for x in twilio.__version_info__)
if twilio_version > (5,):
TWILIO_5 = False
else:

View file

@ -287,7 +287,7 @@ class BuildoutTestCase(Base):
@pytest.mark.slow_test
def test__find_cfgs(self):
result = sorted(
[a.replace(self.root, "") for a in buildout._find_cfgs(self.root)]
a.replace(self.root, "") for a in buildout._find_cfgs(self.root)
)
assertlist = sorted(
[

View file

@ -771,11 +771,11 @@ class FilterFalseyTestCase(TestCase):
self.assertIs(type(old_dict), type(new_dict))
# Test excluding int
old_list = [0]
new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type(0)])
new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[int])
self.assertEqual(old_list, new_list)
# Test excluding str (or unicode) (or both)
old_list = [""]
new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type("")])
new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[str])
self.assertEqual(old_list, new_list)
# Test excluding list
old_list = [[]]
@ -987,7 +987,7 @@ class FilterFalseyTestCase(TestCase):
[{"foo": ""}],
]
new_list = salt.utils.data.filter_falsey(
old_list, recurse_depth=3, ignore_types=[type(0), type("")]
old_list, recurse_depth=3, ignore_types=[int, str]
)
self.assertEqual(
["foo", ["foo"], ["foo"], {"foo": 0}, {"foo": "bar"}, [{"foo": ""}]],

View file

@ -223,7 +223,7 @@ class SSHThinTestCase(TestCase):
with pytest.raises(salt.exceptions.SaltSystemExit):
thin.get_ext_tops(cfg)
assert len(thin.log.warning.mock_calls) == 4
assert sorted([x[1][1] for x in thin.log.warning.mock_calls]) == [
assert sorted(x[1][1] for x in thin.log.warning.mock_calls) == [
"jinja2",
"msgpack",
"tornado",