mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Fix lint issues in 3007.x branch
This commit is contained in:
parent
1f22924a9a
commit
628c0d2713
15 changed files with 71 additions and 59 deletions
|
@ -271,7 +271,7 @@ def parse_ret(stdout, stderr, retcode, result_only=False):
|
||||||
try:
|
try:
|
||||||
retcode = int(retcode)
|
retcode = int(retcode)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
log.warning(f"Got an invalid retcode for host: '{retcode}'")
|
log.warning("Got an invalid retcode for host: '%s'", retcode)
|
||||||
retcode = 1
|
retcode = 1
|
||||||
|
|
||||||
if "Permission denied" in stderr:
|
if "Permission denied" in stderr:
|
||||||
|
@ -307,7 +307,9 @@ def parse_ret(stdout, stderr, retcode, result_only=False):
|
||||||
# Ensure a reported local retcode is kept (at least)
|
# Ensure a reported local retcode is kept (at least)
|
||||||
retcode = max(retcode, remote_retcode)
|
retcode = max(retcode, remote_retcode)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
log.warning(f"Host reported an invalid retcode: '{remote_retcode}'")
|
log.warning(
|
||||||
|
"Host reported an invalid retcode: '%s'", remote_retcode
|
||||||
|
)
|
||||||
retcode = max(retcode, 1)
|
retcode = max(retcode, 1)
|
||||||
|
|
||||||
if not isinstance(result, dict):
|
if not isinstance(result, dict):
|
||||||
|
|
|
@ -964,7 +964,7 @@ class SSHCpClient(salt.fileclient.FSClient):
|
||||||
|
|
||||||
def _send_file(self, src, dest, makedirs, cachedir):
|
def _send_file(self, src, dest, makedirs, cachedir):
|
||||||
def _error(stdout, stderr):
|
def _error(stdout, stderr):
|
||||||
log.error(f"Failed sending file: {stderr or stdout}")
|
log.error("Failed sending file: %s", stderr or stdout)
|
||||||
if Path(self.get_cachedir(cachedir)) in Path(src).parents:
|
if Path(self.get_cachedir(cachedir)) in Path(src).parents:
|
||||||
# remove the cached file if the transfer fails
|
# remove the cached file if the transfer fails
|
||||||
Path(src).unlink(missing_ok=True)
|
Path(src).unlink(missing_ok=True)
|
||||||
|
@ -1022,7 +1022,7 @@ class SSHCpClient(salt.fileclient.FSClient):
|
||||||
"rm -rf " + shlex.quote(str(path))
|
"rm -rf " + shlex.quote(str(path))
|
||||||
)
|
)
|
||||||
if retcode:
|
if retcode:
|
||||||
log.error(f"Failed deleting path '{path}': {stderr or stdout}")
|
log.error("Failed deleting path '%s': %s", path, stderr or stdout)
|
||||||
return not retcode
|
return not retcode
|
||||||
|
|
||||||
def get_url(
|
def get_url(
|
||||||
|
|
|
@ -102,21 +102,29 @@ def get(
|
||||||
for host, data in mrets.items():
|
for host, data in mrets.items():
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
log.error(
|
log.error(
|
||||||
f"Error executing mine func {fun} on {host}: {data}."
|
"Error executing mine func %s on %s: %s."
|
||||||
" Excluding minion from mine."
|
" Excluding minion from mine.",
|
||||||
|
fun,
|
||||||
|
host,
|
||||||
|
data,
|
||||||
)
|
)
|
||||||
elif "_error" in data:
|
elif "_error" in data:
|
||||||
log.error(
|
log.error(
|
||||||
f"Error executing mine func {fun} on {host}: {data['_error']}."
|
"Error executing mine func %s on %s: %s."
|
||||||
" Excluding minion from mine. Full output in debug log."
|
" Excluding minion from mine. Full output in debug log.",
|
||||||
|
fun,
|
||||||
|
host,
|
||||||
|
data["_error"],
|
||||||
)
|
)
|
||||||
log.debug(f"Return was: {salt.utils.json.dumps(data)}")
|
log.debug("Return was: %s", salt.utils.json.dumps(data))
|
||||||
elif "return" not in data:
|
elif "return" not in data:
|
||||||
log.error(
|
log.error(
|
||||||
f"Error executing mine func {fun} on {host}: No return was specified."
|
"Error executing mine func %s on %s: No return was specified."
|
||||||
" Excluding minion from mine. Full output in debug log."
|
" Excluding minion from mine. Full output in debug log.",
|
||||||
|
fun,
|
||||||
|
host,
|
||||||
)
|
)
|
||||||
log.debug(f"Return was: {salt.utils.json.dumps(data)}")
|
log.debug("Return was: %s", salt.utils.json.dumps(data))
|
||||||
else:
|
else:
|
||||||
rets[host] = data["return"]
|
rets[host] = data["return"]
|
||||||
return rets
|
return rets
|
||||||
|
|
|
@ -270,7 +270,7 @@ def remove(query=None, include_store=False, frameworks=False, deprovision_only=F
|
||||||
remove_package(item)
|
remove_package(item)
|
||||||
else:
|
else:
|
||||||
if bundle and bundle["IsBundle"]:
|
if bundle and bundle["IsBundle"]:
|
||||||
log.debug(f'Found bundle: {bundle["PackageFullName"]}')
|
log.debug("Found bundle: %s", bundle["PackageFullName"])
|
||||||
remove_name = bundle["PackageFullName"]
|
remove_name = bundle["PackageFullName"]
|
||||||
if deprovision_only:
|
if deprovision_only:
|
||||||
log.debug("Deprovisioning package: %s", remove_name)
|
log.debug("Deprovisioning package: %s", remove_name)
|
||||||
|
@ -288,7 +288,7 @@ def remove(query=None, include_store=False, frameworks=False, deprovision_only=F
|
||||||
# The old one will not have an installer and will throw an error
|
# The old one will not have an installer and will throw an error
|
||||||
# We should be safe just logging the message
|
# We should be safe just logging the message
|
||||||
# This is really hard to replicate
|
# This is really hard to replicate
|
||||||
log.debug(f"There was an error removing package: {remove_name}")
|
log.debug("There was an error removing package: %s", remove_name)
|
||||||
log.debug(exc)
|
log.debug(exc)
|
||||||
|
|
||||||
if isinstance(packages, list):
|
if isinstance(packages, list):
|
||||||
|
|
|
@ -314,7 +314,7 @@ class AllEventsHandler(
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=W0221
|
# pylint: disable=W0221
|
||||||
def get(self, token):
|
def get(self, token): # pylint: disable=invalid-overridden-method
|
||||||
"""
|
"""
|
||||||
Check the token, returns a 401 if the token is invalid.
|
Check the token, returns a 401 if the token is invalid.
|
||||||
Else open the websocket connection
|
Else open the websocket connection
|
||||||
|
|
|
@ -128,9 +128,9 @@ class PublishClient(salt.transport.base.PublishClient):
|
||||||
conn = aiohttp.UnixConnector(path=self.path)
|
conn = aiohttp.UnixConnector(path=self.path)
|
||||||
session = aiohttp.ClientSession(connector=conn)
|
session = aiohttp.ClientSession(connector=conn)
|
||||||
if self.ssl:
|
if self.ssl:
|
||||||
url = f"https://ipc.saltproject.io/ws"
|
url = "https://ipc.saltproject.io/ws"
|
||||||
else:
|
else:
|
||||||
url = f"http://ipc.saltproject.io/ws"
|
url = "http://ipc.saltproject.io/ws"
|
||||||
log.error("pub client connect %r %r", url, ctx)
|
log.error("pub client connect %r %r", url, ctx)
|
||||||
ws = await asyncio.wait_for(session.ws_connect(url, ssl=ctx), 3)
|
ws = await asyncio.wait_for(session.ws_connect(url, ssl=ctx), 3)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
@ -154,7 +154,7 @@ class PublishClient(salt.transport.base.PublishClient):
|
||||||
if self._ws is None:
|
if self._ws is None:
|
||||||
self._ws, self._session = await self.getstream(timeout=timeout)
|
self._ws, self._session = await self.getstream(timeout=timeout)
|
||||||
if self.connect_callback:
|
if self.connect_callback:
|
||||||
self.connect_callback(True)
|
self.connect_callback(True) # pylint: disable=not-callable
|
||||||
self.connected = True
|
self.connected = True
|
||||||
|
|
||||||
async def connect(
|
async def connect(
|
||||||
|
@ -282,9 +282,6 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
|
||||||
def __setstate__(self, state):
|
def __setstate__(self, state):
|
||||||
self.__init__(**state)
|
self.__init__(**state)
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
self.__init__(state["opts"])
|
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
return {
|
return {
|
||||||
"opts": self.opts,
|
"opts": self.opts,
|
||||||
|
@ -424,7 +421,9 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer):
|
||||||
self._connecting = asyncio.create_task(self._connect())
|
self._connecting = asyncio.create_task(self._connect())
|
||||||
return self._connecting
|
return self._connecting
|
||||||
|
|
||||||
async def publish(self, payload, **kwargs):
|
async def publish(
|
||||||
|
self, payload, **kwargs
|
||||||
|
): # pylint: disable=invalid-overridden-method
|
||||||
"""
|
"""
|
||||||
Publish "load" to minions
|
Publish "load" to minions
|
||||||
"""
|
"""
|
||||||
|
@ -545,7 +544,7 @@ class RequestClient(salt.transport.base.RequestClient):
|
||||||
self._closed = False
|
self._closed = False
|
||||||
self.ssl = self.opts.get("ssl", None)
|
self.ssl = self.opts.get("ssl", None)
|
||||||
|
|
||||||
async def connect(self):
|
async def connect(self): # pylint: disable=invalid-overridden-method
|
||||||
ctx = None
|
ctx = None
|
||||||
if self.ssl is not None:
|
if self.ssl is not None:
|
||||||
ctx = tornado.netutil.ssl_options_to_context(self.ssl, server_side=False)
|
ctx = tornado.netutil.ssl_options_to_context(self.ssl, server_side=False)
|
||||||
|
|
|
@ -96,7 +96,7 @@ class CommonCache:
|
||||||
if int(time.time()) - updated >= self.ttl:
|
if int(time.time()) - updated >= self.ttl:
|
||||||
if flush:
|
if flush:
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Cached data in {self.cbank}/{ckey} outdated, flushing."
|
"Cached data in %s/%s outdated, flushing.", self.cbank, ckey
|
||||||
)
|
)
|
||||||
self.flush()
|
self.flush()
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -213,11 +213,14 @@ def clear_cache(
|
||||||
]
|
]
|
||||||
):
|
):
|
||||||
scope = cbank.split("/")[-1]
|
scope = cbank.split("/")[-1]
|
||||||
_get_event(opts)(tag=f"vault/cache/{scope}/clear")
|
_get_event(opts)( # pylint: disable=no-value-for-parameter
|
||||||
|
tag=f"vault/cache/{scope}/clear"
|
||||||
|
)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
log.error(
|
log.error(
|
||||||
"Failed to revoke token or send event before clearing cache:\n"
|
"Failed to revoke token or send event before clearing cache:\n%s: %s",
|
||||||
f"{type(err).__name__}: {err}"
|
type(err).__name__,
|
||||||
|
err,
|
||||||
)
|
)
|
||||||
|
|
||||||
if cbank in context:
|
if cbank in context:
|
||||||
|
|
|
@ -518,8 +518,8 @@ class LeaseStore:
|
||||||
try:
|
try:
|
||||||
self.renew(lease, increment=increment)
|
self.renew(lease, increment=increment)
|
||||||
except (VaultPermissionDeniedError, VaultNotFoundError) as err:
|
except (VaultPermissionDeniedError, VaultNotFoundError) as err:
|
||||||
log.warning(f"Failed renewing cached lease: {type(err).__name__}")
|
log.warning("Failed renewing cached lease: %s", type(err).__name__)
|
||||||
log.debug(f"Lease ID was: {lease}")
|
log.debug("Lease ID was: %s", lease)
|
||||||
failed.append(ckey)
|
failed.append(ckey)
|
||||||
if failed:
|
if failed:
|
||||||
raise VaultException(f"Failed renewing some leases: {list(failed)}")
|
raise VaultException(f"Failed renewing some leases: {list(failed)}")
|
||||||
|
|
|
@ -49,7 +49,7 @@ def run_dict(cmd, cwd=None):
|
||||||
|
|
||||||
if "retcode" not in ret or ret["retcode"] != 0:
|
if "retcode" not in ret or ret["retcode"] != 0:
|
||||||
# run_all logs an error to log.error, fail hard back to the user
|
# run_all logs an error to log.error, fail hard back to the user
|
||||||
raise CommandExecutionError(f"Issue executing PowerShell cmd", info=ret)
|
raise CommandExecutionError("Issue executing PowerShell cmd", info=ret)
|
||||||
|
|
||||||
# Sometimes Powershell returns an empty string, which isn't valid JSON
|
# Sometimes Powershell returns an empty string, which isn't valid JSON
|
||||||
if ret["stdout"] == "":
|
if ret["stdout"] == "":
|
||||||
|
|
|
@ -662,7 +662,7 @@ def test_pillar_refresh_pillar_beacons(
|
||||||
# Give the beacons a chance to start
|
# Give the beacons a chance to start
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
event_tag = f"salt/beacon/*/status/*"
|
event_tag = "salt/beacon/*/status/*"
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
event_pattern = (salt_master.id, event_tag)
|
event_pattern = (salt_master.id, event_tag)
|
||||||
|
@ -686,7 +686,7 @@ def test_pillar_refresh_pillar_beacons(
|
||||||
# Give the beacons a chance to stop
|
# Give the beacons a chance to stop
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
event_tag = f"salt/beacon/*/status/*"
|
event_tag = "salt/beacon/*/status/*"
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
event_pattern = (salt_master.id, event_tag)
|
event_pattern = (salt_master.id, event_tag)
|
||||||
|
|
|
@ -88,7 +88,7 @@ def test_get_file(salt_ssh_cli, tmp_path, template, dst_is_dir, cachedir):
|
||||||
)
|
)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = path.read_text()
|
data = path.read_text(encoding="utf-8")
|
||||||
assert "Gromit" in data
|
assert "Gromit" in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -101,7 +101,7 @@ def test_get_file_gzipped(salt_ssh_cli, caplog, tmp_path):
|
||||||
assert res.data == str(tgt)
|
assert res.data == str(tgt)
|
||||||
assert "The gzip argument to cp.get_file in salt-ssh is unsupported" in caplog.text
|
assert "The gzip argument to cp.get_file in salt-ssh is unsupported" in caplog.text
|
||||||
assert tgt.exists()
|
assert tgt.exists()
|
||||||
data = tgt.read_text()
|
data = tgt.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in data
|
assert "KNIGHT: They're nervous, sire." in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ def test_get_file_makedirs(salt_ssh_cli, tmp_path, cachedir):
|
||||||
)
|
)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = path.read_text()
|
data = path.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in data
|
assert "KNIGHT: They're nervous, sire." in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ def test_get_file_from_env(salt_ssh_cli, tmp_path, suffix):
|
||||||
assert ret.returncode == 0
|
assert ret.returncode == 0
|
||||||
assert ret.data
|
assert ret.data
|
||||||
assert ret.data == str(tgt)
|
assert ret.data == str(tgt)
|
||||||
data = tgt.read_text()
|
data = tgt.read_text(encoding="utf-8")
|
||||||
assert "Gromit" in data
|
assert "Gromit" in data
|
||||||
assert ("Comte" in data) is bool(suffix)
|
assert ("Comte" in data) is bool(suffix)
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ def test_get_template(salt_ssh_cli, tmp_path, cachedir):
|
||||||
)
|
)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert tgt.exists()
|
assert tgt.exists()
|
||||||
data = tgt.read_text()
|
data = tgt.read_text(encoding="utf-8")
|
||||||
assert "bacon" in data
|
assert "bacon" in data
|
||||||
assert "spam" not in data
|
assert "spam" not in data
|
||||||
|
|
||||||
|
@ -198,7 +198,7 @@ def test_get_template_dest_empty(salt_ssh_cli, cachedir):
|
||||||
assert res.data == str(tgt)
|
assert res.data == str(tgt)
|
||||||
for file in (tgt, master_path):
|
for file in (tgt, master_path):
|
||||||
assert file.exists()
|
assert file.exists()
|
||||||
data = file.read_text()
|
data = file.read_text(encoding="utf-8")
|
||||||
assert "bacon" in data
|
assert "bacon" in data
|
||||||
assert "spam" not in data
|
assert "spam" not in data
|
||||||
|
|
||||||
|
@ -293,7 +293,7 @@ def test_get_url(salt_ssh_cli, tmp_path, dst_is_dir, cachedir):
|
||||||
)
|
)
|
||||||
for file in (tgt, master_path):
|
for file in (tgt, master_path):
|
||||||
assert file.exists()
|
assert file.exists()
|
||||||
data = file.read_text()
|
data = file.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in data
|
assert "KNIGHT: They're nervous, sire." in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -317,7 +317,7 @@ def test_get_url_makedirs(salt_ssh_cli, tmp_path, cachedir):
|
||||||
)
|
)
|
||||||
for file in (tgt, master_path):
|
for file in (tgt, master_path):
|
||||||
assert file.exists()
|
assert file.exists()
|
||||||
data = file.read_text()
|
data = file.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in data
|
assert "KNIGHT: They're nervous, sire." in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -343,7 +343,7 @@ def test_get_url_dest_empty(salt_ssh_cli, cachedir):
|
||||||
assert res.data == str(tgt)
|
assert res.data == str(tgt)
|
||||||
for file in (tgt, master_path):
|
for file in (tgt, master_path):
|
||||||
assert file.exists()
|
assert file.exists()
|
||||||
data = file.read_text()
|
data = file.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in data
|
assert "KNIGHT: They're nervous, sire." in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -387,7 +387,7 @@ def test_get_url_https(salt_ssh_cli, tmp_path, cachedir):
|
||||||
)
|
)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = path.read_text()
|
data = path.read_text(encoding="utf-8")
|
||||||
assert "Salt Project" in data
|
assert "Salt Project" in data
|
||||||
assert "Package" in data
|
assert "Package" in data
|
||||||
assert "Repo" in data
|
assert "Repo" in data
|
||||||
|
@ -414,7 +414,7 @@ def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir):
|
||||||
assert res.data == str(tgt)
|
assert res.data == str(tgt)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = path.read_text()
|
data = path.read_text(encoding="utf-8")
|
||||||
assert "Salt Project" in data
|
assert "Salt Project" in data
|
||||||
assert "Package" in data
|
assert "Package" in data
|
||||||
assert "Repo" in data
|
assert "Repo" in data
|
||||||
|
@ -500,7 +500,7 @@ def test_get_url_ftp(salt_ssh_cli, tmp_path, cachedir):
|
||||||
)
|
)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = path.read_text()
|
data = path.read_text(encoding="utf-8")
|
||||||
assert "The official FreeBSD" in data
|
assert "The official FreeBSD" in data
|
||||||
|
|
||||||
|
|
||||||
|
@ -515,7 +515,7 @@ def test_get_file_str_salt(salt_ssh_cli, cachedir):
|
||||||
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
text = path.read_text()
|
text = path.read_text(encoding="utf-8")
|
||||||
assert "KNIGHT: They're nervous, sire." in text
|
assert "KNIGHT: They're nervous, sire." in text
|
||||||
|
|
||||||
|
|
||||||
|
@ -540,7 +540,7 @@ def test_get_file_str_https(salt_ssh_cli, cachedir):
|
||||||
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
||||||
for path in (tgt, master_path):
|
for path in (tgt, master_path):
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
text = path.read_text()
|
text = path.read_text(encoding="utf-8")
|
||||||
assert "Salt Project" in text
|
assert "Salt Project" in text
|
||||||
assert "Package" in text
|
assert "Package" in text
|
||||||
assert "Repo" in text
|
assert "Repo" in text
|
||||||
|
@ -572,7 +572,7 @@ def test_cache_file(salt_ssh_cli, suffix, cachedir):
|
||||||
)
|
)
|
||||||
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True)
|
||||||
for file in (tgt, master_path):
|
for file in (tgt, master_path):
|
||||||
data = file.read_text()
|
data = file.read_text(encoding="utf-8")
|
||||||
assert "Gromit" in data
|
assert "Gromit" in data
|
||||||
assert ("Comte" in data) is bool(suffix)
|
assert ("Comte" in data) is bool(suffix)
|
||||||
|
|
||||||
|
@ -622,7 +622,7 @@ def test_cache_file_context_cache(salt_ssh_cli, cachedir, _cache_twice):
|
||||||
for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)):
|
for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)):
|
||||||
assert tgt.exists()
|
assert tgt.exists()
|
||||||
# If both files were present, they should not be re-fetched
|
# If both files were present, they should not be re-fetched
|
||||||
assert "wasmodifiedhahaha" in tgt.read_text()
|
assert "wasmodifiedhahaha" in tgt.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("_cache_twice", ("master", "minion"), indirect=True)
|
@pytest.mark.parametrize("_cache_twice", ("master", "minion"), indirect=True)
|
||||||
|
@ -637,7 +637,7 @@ def test_cache_file_context_cache_requires_both_caches(
|
||||||
for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)):
|
for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)):
|
||||||
assert tgt.exists()
|
assert tgt.exists()
|
||||||
# If one of the files was removed, it should be re-fetched
|
# If one of the files was removed, it should be re-fetched
|
||||||
assert "wasmodifiedhahaha" not in tgt.read_text()
|
assert "wasmodifiedhahaha" not in tgt.read_text(encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
def test_cache_file_nonexistent_source(salt_ssh_cli):
|
def test_cache_file_nonexistent_source(salt_ssh_cli):
|
||||||
|
@ -663,7 +663,7 @@ def test_cache_files(salt_ssh_cli, files):
|
||||||
assert isinstance(path, str)
|
assert isinstance(path, str)
|
||||||
path = Path(path)
|
path = Path(path)
|
||||||
assert path.exists()
|
assert path.exists()
|
||||||
data = Path(path).read_text()
|
data = Path(path).read_text(encoding="utf-8")
|
||||||
assert "ARTHUR:" in data
|
assert "ARTHUR:" in data
|
||||||
assert "bacon" not in data
|
assert "bacon" not in data
|
||||||
|
|
||||||
|
@ -893,4 +893,4 @@ def test_cp_cache_file_as_workaround_for_missing_map_file(
|
||||||
assert isinstance(ret.data, dict)
|
assert isinstance(ret.data, dict)
|
||||||
assert ret.data
|
assert ret.data
|
||||||
assert tgt.exists()
|
assert tgt.exists()
|
||||||
assert tgt.read_text().strip() == "bar"
|
assert tgt.read_text(encoding="utf-8").strip() == "bar"
|
||||||
|
|
|
@ -193,4 +193,4 @@ def test_pwdata_decrypt():
|
||||||
b"\x1a(\x04&yL8\x19s\n\x11\x81\xfd?\xfb2\x80Ll\xa1\xdc\xc9\xb6P\xca\x8d'\x11\xc1"
|
b"\x1a(\x04&yL8\x19s\n\x11\x81\xfd?\xfb2\x80Ll\xa1\xdc\xc9\xb6P\xca\x8d'\x11\xc1"
|
||||||
b"\x07\xa5\xa1\x058\xc7\xce\xbeb\x92\xbf\x0bL\xec\xdf\xc3M\x83\xfb$\xec\xd5\xf9"
|
b"\x07\xa5\xa1\x058\xc7\xce\xbeb\x92\xbf\x0bL\xec\xdf\xc3M\x83\xfb$\xec\xd5\xf9"
|
||||||
)
|
)
|
||||||
assert "1234", salt.crypt.pwdata_decrypt(key_string, pwdata)
|
assert salt.crypt.pwdata_decrypt(key_string, pwdata) == "1234"
|
||||||
|
|
|
@ -151,14 +151,14 @@ def test_port_policy_present():
|
||||||
with patch.dict(selinux.__opts__, {"test": False}):
|
with patch.dict(selinux.__opts__, {"test": False}):
|
||||||
comt = (
|
comt = (
|
||||||
f'SELinux policy for "{name}" already present '
|
f'SELinux policy for "{name}" already present '
|
||||||
+ f'with specified sel_type "http_cache_port_t", protocol "None" '
|
+ 'with specified sel_type "http_cache_port_t", protocol "None" '
|
||||||
+ f'and port "None".'
|
+ 'and port "None".'
|
||||||
)
|
)
|
||||||
ret.update({"comment": comt, "result": True})
|
ret.update({"comment": comt, "result": True})
|
||||||
assert selinux.port_policy_present(name, "http_cache_port_t") == ret
|
assert selinux.port_policy_present(name, "http_cache_port_t") == ret
|
||||||
|
|
||||||
comt = (
|
comt = (
|
||||||
f'SELinux policy for "name" already present '
|
'SELinux policy for "name" already present '
|
||||||
+ f'with specified sel_type "http_cache_port_t", protocol "{protocol}" '
|
+ f'with specified sel_type "http_cache_port_t", protocol "{protocol}" '
|
||||||
+ f'and port "{port}".'
|
+ f'and port "{port}".'
|
||||||
)
|
)
|
||||||
|
@ -337,14 +337,14 @@ def test_port_policy_absent():
|
||||||
with patch.dict(selinux.__opts__, {"test": False}):
|
with patch.dict(selinux.__opts__, {"test": False}):
|
||||||
comt = (
|
comt = (
|
||||||
f'SELinux policy for "{name}" already absent '
|
f'SELinux policy for "{name}" already absent '
|
||||||
+ f'with specified sel_type "http_cache_port_t", protocol "None" '
|
+ 'with specified sel_type "http_cache_port_t", protocol "None" '
|
||||||
+ f'and port "None".'
|
+ 'and port "None".'
|
||||||
)
|
)
|
||||||
ret.update({"comment": comt, "changes": {}, "result": True})
|
ret.update({"comment": comt, "changes": {}, "result": True})
|
||||||
assert selinux.port_policy_absent(name, "http_cache_port_t") == ret
|
assert selinux.port_policy_absent(name, "http_cache_port_t") == ret
|
||||||
|
|
||||||
comt = (
|
comt = (
|
||||||
f'SELinux policy for "name" already absent '
|
'SELinux policy for "name" already absent '
|
||||||
+ f'with specified sel_type "http_cache_port_t", protocol "{protocol}" '
|
+ f'with specified sel_type "http_cache_port_t", protocol "{protocol}" '
|
||||||
+ f'and port "{port}".'
|
+ f'and port "{port}".'
|
||||||
)
|
)
|
||||||
|
|
|
@ -511,7 +511,7 @@ def test_req_server_chan_encrypt_v2(master_opts, pki_dir):
|
||||||
if HAS_M2:
|
if HAS_M2:
|
||||||
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
|
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
|
||||||
else:
|
else:
|
||||||
cipher = PKCS1_OAEP.new(key)
|
cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment
|
||||||
aes = cipher.decrypt(ret["key"])
|
aes = cipher.decrypt(ret["key"])
|
||||||
pcrypt = salt.crypt.Crypticle(master_opts, aes)
|
pcrypt = salt.crypt.Crypticle(master_opts, aes)
|
||||||
signed_msg = pcrypt.loads(ret[dictkey])
|
signed_msg = pcrypt.loads(ret[dictkey])
|
||||||
|
|
Loading…
Add table
Reference in a new issue