Merge pull request #64574 from s0undt3ch/hotfix/merge-forward

[master] Merge 3006.x into master
This commit is contained in:
Megan Wilhite 2023-06-29 13:13:13 +00:00 committed by GitHub
commit 2d02017d6c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 102 additions and 127 deletions

View file

@ -0,0 +1,5 @@
Some more deprecated code fixes:
* Stop using the deprecated `locale.getdefaultlocale()` function
* Stop accessing deprecated attributes
* `pathlib.Path.__enter__()` usage is deprecated and not required, a no-op

View file

@ -577,33 +577,37 @@ def pytest_runtest_setup(item):
):
item._skipped_by_mark = True
pytest.skip(PRE_PYTEST_SKIP_REASON)
test_group_count = sum(
bool(item.get_closest_marker(group))
for group in ("core_test", "slow_test", "flaky_jail")
)
if item.get_closest_marker("core_test") and item.get_closest_marker("slow_test"):
raise pytest.UsageError(
"Tests can only be in one test group. ('core_test', 'slow_test')"
)
if item.get_closest_marker("core_test"):
if not item.config.getoption("--core-tests"):
raise pytest.skip.Exception(
"Core tests are disabled, pass '--core-tests' to enable them.",
_use_item_location=True,
)
if item.get_closest_marker("slow_test"):
if not item.config.getoption("--slow-tests"):
raise pytest.skip.Exception(
"Slow tests are disabled, pass '--run-slow' to enable them.",
_use_item_location=True,
)
if item.get_closest_marker("flaky_jail"):
if not item.config.getoption("--flaky-jail"):
raise pytest.skip.Exception(
"flaky jail tests are disabled, pass '--flaky-jail' to enable them.",
_use_item_location=True,
)
if (
not item.get_closest_marker("slow_test")
and not item.get_closest_marker("core_test")
and not item.get_closest_marker("flaky_jail")
):
if not item.config.getoption("--no-fast-tests"):
else:
if item.get_closest_marker("core_test"):
if not item.config.getoption("--core-tests"):
raise pytest.skip.Exception(
"Core tests are disabled, pass '--core-tests' to enable them.",
_use_item_location=True,
)
if item.get_closest_marker("slow_test"):
if not item.config.getoption("--slow-tests"):
raise pytest.skip.Exception(
"Slow tests are disabled, pass '--run-slow' to enable them.",
_use_item_location=True,
)
if test_group_count == 0 and item.config.getoption("--no-fast-tests"):
raise pytest.skip.Exception(
"Fast tests are disabled, dont pass '--no-fast-tests' to enable them.",
"Fast tests have been disabled by '--no-fast-tests'.",
_use_item_location=True,
)

View file

@ -322,7 +322,6 @@ def test_issue_46762_prereqs_on_a_state_with_unfulfilled_requirements(
@pytest.mark.skip_on_darwin(reason="Test is broken on macosx")
@pytest.mark.slow_test
def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path):
"""
test cmd.run using multiple unless options where the first cmd in the
@ -370,21 +369,21 @@ def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path):
# then the unless state run bailed out after the first unless command succeeded,
# which is the bug we're regression testing for.
_expected = {
"file_|-unless_false_onlyif_false_|-{}_|-managed".format(test_txt_path): {
f"file_|-unless_false_onlyif_false_|-{test_txt_path}_|-managed": {
"comment": "onlyif condition is false\nunless condition is false",
"name": "{}".format(test_txt_path),
"name": f"{test_txt_path}",
"skip_watch": True,
"changes": {},
"result": True,
},
"file_|-unless_false_onlyif_true_|-{}_|-managed".format(test_txt_path): {
f"file_|-unless_false_onlyif_true_|-{test_txt_path}_|-managed": {
"comment": "Empty file",
"name": str(test_txt_path),
"start_time": "18:10:20.341753",
"result": True,
"changes": {"new": "file {} created".format(test_txt_path)},
"changes": {"new": f"file {test_txt_path} created"},
},
"file_|-unless_true_onlyif_false_|-{}_|-managed".format(test_txt_path): {
f"file_|-unless_true_onlyif_false_|-{test_txt_path}_|-managed": {
"comment": "onlyif condition is false\nunless condition is true",
"name": str(test_txt_path),
"start_time": "18:10:22.936446",
@ -392,7 +391,7 @@ def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path):
"changes": {},
"result": True,
},
"file_|-unless_true_onlyif_true_|-{}_|-managed".format(test_txt_path): {
f"file_|-unless_true_onlyif_true_|-{test_txt_path}_|-managed": {
"comment": "onlyif condition is true\nunless condition is true",
"name": str(test_txt_path),
"skip_watch": True,

View file

@ -571,7 +571,6 @@ def test_issue_38683_require_order_failhard_combination(state, state_tree):
assert ret[state_id].comment == "Failure!"
@pytest.mark.slow_test
@pytest.mark.skip_on_windows
def test_parallel_state_with_requires(state, state_tree):
"""
@ -610,7 +609,7 @@ def test_parallel_state_with_requires(state, state_tree):
assert (end_time - start_time) < 30
for item in range(1, 10):
_id = "cmd_|-blah-{}_|-sleep 2_|-run".format(item)
_id = f"cmd_|-blah-{item}_|-sleep 2_|-run"
assert "__parallel__" in ret[_id]

View file

@ -57,7 +57,6 @@ def test_watch_in_failure(state, state_tree):
)
@pytest.mark.slow_test
def test_requisites_watch_any(state, state_tree):
"""
Call sls file containing several require_in and require.
@ -114,51 +113,51 @@ def test_requisites_watch_any(state, state_tree):
cmd_true=cmd_true, cmd_false=cmd_false
)
expected_result = {
"cmd_|-A_|-{}_|-wait".format(cmd_true): {
f"cmd_|-A_|-{cmd_true}_|-wait": {
"__run_num__": 4,
"comment": 'Command "{}" run'.format(cmd_true),
"comment": f'Command "{cmd_true}" run',
"result": True,
"changes": True,
},
"cmd_|-B_|-{}_|-run".format(cmd_true): {
f"cmd_|-B_|-{cmd_true}_|-run": {
"__run_num__": 0,
"comment": 'Command "{}" run'.format(cmd_true),
"comment": f'Command "{cmd_true}" run',
"result": True,
"changes": True,
},
"cmd_|-C_|-{}_|-run".format(cmd_false): {
f"cmd_|-C_|-{cmd_false}_|-run": {
"__run_num__": 1,
"comment": 'Command "{}" run'.format(cmd_false),
"comment": f'Command "{cmd_false}" run',
"result": False,
"changes": True,
},
"cmd_|-D_|-{}_|-run".format(cmd_true): {
f"cmd_|-D_|-{cmd_true}_|-run": {
"__run_num__": 2,
"comment": 'Command "{}" run'.format(cmd_true),
"comment": f'Command "{cmd_true}" run',
"result": True,
"changes": True,
},
"cmd_|-E_|-{}_|-wait".format(cmd_true): {
f"cmd_|-E_|-{cmd_true}_|-wait": {
"__run_num__": 9,
"comment": 'Command "{}" run'.format(cmd_true),
"comment": f'Command "{cmd_true}" run',
"result": True,
"changes": True,
},
"cmd_|-F_|-{}_|-run".format(cmd_true): {
f"cmd_|-F_|-{cmd_true}_|-run": {
"__run_num__": 5,
"comment": 'Command "{}" run'.format(cmd_true),
"comment": f'Command "{cmd_true}" run',
"result": True,
"changes": True,
},
"cmd_|-G_|-{}_|-run".format(cmd_false): {
f"cmd_|-G_|-{cmd_false}_|-run": {
"__run_num__": 6,
"comment": 'Command "{}" run'.format(cmd_false),
"comment": f'Command "{cmd_false}" run',
"result": False,
"changes": True,
},
"cmd_|-H_|-{}_|-run".format(cmd_false): {
f"cmd_|-H_|-{cmd_false}_|-run": {
"__run_num__": 7,
"comment": 'Command "{}" run'.format(cmd_false),
"comment": f'Command "{cmd_false}" run',
"result": False,
"changes": True,
},

View file

@ -650,7 +650,6 @@ def test_issues_7905_and_8174_sls_syntax_error(state, state_tree):
assert ret.errors == ["State 'C' in SLS 'badlist2' is not formed as a list"]
@pytest.mark.slow_test
def test_retry_option(state, state_tree):
"""
test the retry option on a simple state with defaults
@ -752,7 +751,6 @@ def test_retry_option_success_parallel(state, state_tree, tmp_path):
assert "Attempt 2" not in state_return.comment
@pytest.mark.slow_test
def test_retry_option_eventual_success(state, state_tree, tmp_path):
"""
test a state with the retry option that should return True, eventually
@ -800,7 +798,6 @@ def test_retry_option_eventual_success(state, state_tree, tmp_path):
@pytest.mark.skip_on_windows(
reason="Skipped until parallel states can be fixed on Windows"
)
@pytest.mark.slow_test
def test_retry_option_eventual_success_parallel(state, state_tree, tmp_path):
"""
test a state with the retry option that should return True, eventually
@ -849,7 +846,6 @@ def test_retry_option_eventual_success_parallel(state, state_tree, tmp_path):
assert "Attempt 5" not in state_return.comment
@pytest.mark.slow_test
def test_state_non_base_environment(state, state_tree_prod, tmp_path):
"""
test state.sls with saltenv using a nonbase environment
@ -908,10 +904,8 @@ def test_parallel_state_with_long_tag(state, state_tree):
)
comments = sorted(x.comment for x in ret)
expected = sorted(
'Command "{}" run'.format(x) for x in (short_command, long_command)
)
assert comments == expected, "{} != {}".format(comments, expected)
expected = sorted(f'Command "{x}" run' for x in (short_command, long_command))
assert comments == expected, f"{comments} != {expected}"
@pytest.mark.skip_on_darwin(reason="Test is broken on macosx")

View file

@ -132,7 +132,6 @@ def test_exit_status_correct_usage(salt_cli, salt_minion):
assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support SIGINT")
@pytest.mark.skip_initial_onedir_failure
def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion):
@ -156,7 +155,7 @@ def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion):
cmdline = [
sys.executable,
salt_cli.get_script_path(),
"--config-dir={}".format(salt_master.config_dir),
f"--config-dir={salt_master.config_dir}",
salt_minion.id,
"test.sleep",
"30",

View file

@ -32,7 +32,6 @@ def dest_testfile():
dst.unlink()
@pytest.mark.slow_test
@pytest.mark.windows_whitelisted
@pytest.mark.core_test
def test_cp_testfile(salt_minion, salt_cp_cli, source_testfile, dest_testfile):

View file

@ -44,14 +44,11 @@ def clear_proxy_minions(salt_master, proxy_minion_id):
for proxy in [proxy_minion_id, "dummy_proxy_one", "dummy_proxy_two"]:
pytest.helpers.remove_stale_minion_key(salt_master, proxy)
cachefile = os.path.join(
salt_master.config["cachedir"], "{}.cache".format(proxy)
)
cachefile = os.path.join(salt_master.config["cachedir"], f"{proxy}.cache")
if os.path.exists(cachefile):
os.unlink(cachefile)
@pytest.mark.slow_test
def test_exit_status_no_proxyid(salt_master, proxy_minion_id):
"""
Ensure correct exit status when --proxyid argument is missing.
@ -93,7 +90,6 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id):
assert "The user is not available." in exc.value.process_result.stderr
@pytest.mark.slow_test
def test_exit_status_unknown_argument(salt_master, proxy_minion_id):
"""
Ensure correct exit status when an unknown argument is passed to
@ -183,11 +179,11 @@ def test_exit_status_correct_usage(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
f"{proxy_one}.sls",
dummy_proxy_one_pillar_file,
)
dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_two),
f"{proxy_two}.sls",
dummy_proxy_two_pillar_file,
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile:
@ -290,7 +286,7 @@ def test_missing_pillar_file(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
f"{proxy_one}.sls",
dummy_proxy_one_pillar_file,
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile:
@ -408,14 +404,14 @@ def test_invalid_connection(
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
f"{proxy_one}.sls",
dummy_proxy_one_pillar_file,
)
broken_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(broken_proxy_one), broken_proxy_one_pillar_file
f"{broken_proxy_one}.sls", broken_proxy_one_pillar_file
)
broken_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(broken_proxy_two), broken_proxy_two_pillar_file
f"{broken_proxy_two}.sls", broken_proxy_two_pillar_file
)
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, broken_proxy_one_tempfile, broken_proxy_two_tempfile:
factory = salt_master.salt_proxy_minion_daemon(
@ -536,11 +532,11 @@ def ping():
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
f"{proxy_one}.sls",
dummy_proxy_one_pillar_file,
)
dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_two),
f"{proxy_two}.sls",
dummy_proxy_two_pillar_file,
)
@ -670,11 +666,11 @@ def ping():
"controlproxy.sls", controlproxy_pillar_file
)
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_one),
f"{proxy_one}.sls",
dummy_proxy_one_pillar_file,
)
dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
"{}.sls".format(proxy_two),
f"{proxy_two}.sls",
dummy_proxy_two_pillar_file,
)

View file

@ -16,8 +16,8 @@ def reset_pillar(salt_call_cli):
finally:
# Refresh pillar once all tests are done.
ret = salt_call_cli.run("saltutil.refresh_pillar", wait=True)
assert ret.exitcode == 0
assert ret.json is True
assert ret.returncode == 0
assert ret.data is True
@pytest.fixture
@ -77,8 +77,8 @@ def test_state_apply_aborts_on_pillar_error(
shell_result = salt_cli.run(
"state.apply", "sls-id-test", minion_tgt=salt_minion.id
)
assert shell_result.exitcode == 1
assert shell_result.json == expected_comment
assert shell_result.returncode == 1
assert shell_result.data == expected_comment
@pytest.mark.usefixtures("testfile_path", "reset_pillar")
@ -117,7 +117,7 @@ def test_state_apply_continues_after_pillar_error_is_fixed(
shell_result = salt_cli.run(
"saltutil.refresh_pillar", minion_tgt=salt_minion.id
)
assert shell_result.exitcode == 0
assert shell_result.returncode == 0
# run state.apply with fixed pillar render error
with pytest.helpers.temp_file(
@ -128,7 +128,7 @@ def test_state_apply_continues_after_pillar_error_is_fixed(
shell_result = salt_cli.run(
"state.apply", "sls-id-test", minion_tgt=salt_minion.id
)
assert shell_result.exitcode == 0
state_result = StateResult(shell_result.json)
assert shell_result.returncode == 0
state_result = StateResult(shell_result.data)
assert state_result.result is True
assert state_result.changes == {"diff": "New file", "mode": "0644"}

View file

@ -87,13 +87,13 @@ def world():
return "world"
"""
test_moduledir = salt_master.state_tree.base.paths[0] / f"_{module_type}"
test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}"
test_moduledir.mkdir(parents=True, exist_ok=True)
module_tempfile = salt_master.state_tree.base.temp_file(
f"_{module_type}/{module_name}.py", module_contents
)
with module_tempfile, test_moduledir:
with module_tempfile:
salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}"
ret = salt_run_cli.run(salt_cmd)
assert ret.returncode == 0

View file

@ -293,7 +293,7 @@ def test_contents_pillar_with_pillar_list(salt_master, salt_call_cli, tmp_path):
target_path
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile:
ret = salt_call_cli.run("state.sls", sls_name)
@ -315,7 +315,7 @@ def test_managed_file_with_pillar_sls(salt_master, salt_call_cli, tmp_path):
assert ret.returncode == 0
assert ret.data
target_path = tmp_path / "file-pillar-{}-target.txt".format(ret.data)
target_path = tmp_path / f"file-pillar-{ret.data}-target.txt"
sls_name = "file-pillar-get"
sls_contents = (
"""
@ -329,7 +329,7 @@ def test_managed_file_with_pillar_sls(salt_master, salt_call_cli, tmp_path):
"""
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile:
ret = salt_call_cli.run("state.sls", sls_name)
@ -358,7 +358,7 @@ def test_issue_50221(
- contents_pillar: issue-50221
"""
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
issue_50221_ext_pillar_tempfile = pytest.helpers.temp_file(
"issue-50221",
@ -421,11 +421,11 @@ def test_issue_60426(
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
jinja_tempfile = salt_master.state_tree.base.temp_file(
"{}.jinja".format(jinja_name), jinja_contents
f"{jinja_name}.jinja", jinja_contents
)
with sls_tempfile, jinja_tempfile:
@ -462,11 +462,11 @@ def test_issue_60426(
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
jinja_tempfile = salt_master.state_tree.base.temp_file(
"{}.jinja".format(jinja_name), jinja_contents
f"{jinja_name}.jinja", jinja_contents
)
with sls_tempfile, jinja_tempfile:
@ -700,9 +700,10 @@ def test_patch_saltenv(salt_call_cli, content, math_patch_file, salt_master, tmp
# Check to make sure the patch was applied okay
state_run = next(iter(ret.data.values()))
assert state_run["result"] is False
assert state_run[
"comment"
] == "Source file {} not found in saltenv 'prod'".format(math_patch_file)
assert (
state_run["comment"]
== f"Source file {math_patch_file} not found in saltenv 'prod'"
)
@pytest.mark.skip_unless_on_windows
@ -774,9 +775,7 @@ def test_patch_single_file_failure(
state_run = next(iter(ret.data.values()))
assert "Patch would not apply cleanly" in state_run["comment"]
assert (
re.match(
state_run["comment"], "saving rejects to (file )?{}".format(reject_file)
)
re.match(state_run["comment"], f"saving rejects to (file )?{reject_file}")
is None
)
@ -849,9 +848,7 @@ def test_patch_directory_failure(
state_run = next(iter(ret.data.values()))
assert "Patch would not apply cleanly" in state_run["comment"]
assert (
re.match(
state_run["comment"], "saving rejects to (file )?{}".format(reject_file)
)
re.match(state_run["comment"], f"saving rejects to (file )?{reject_file}")
is None
)
@ -1067,10 +1064,10 @@ def test_recurse(
test_tempdir = salt_master.state_tree.base.write_path / "tmp_dir"
test_tempdir.mkdir(parents=True, exist_ok=True)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile, test_tempdir:
with sls_tempfile:
for _dir in "test1", "test2", "test3":
test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True)
@ -1114,10 +1111,10 @@ def test_recurse_keep_symlinks_in_fileserver_root(
test_tempdir = salt_master.state_tree.base.write_path / "tmp_dir"
test_tempdir.mkdir(parents=True, exist_ok=True)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile, test_tempdir:
with sls_tempfile:
for _dir in "test1", "test2", "test3":
test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True)
@ -1166,10 +1163,10 @@ def test_recurse_keep_symlinks_outside_fileserver_root(
test_tempdir = salt_secondary_master.state_tree.base.write_path / "tmp_dir"
test_tempdir.mkdir(parents=True, exist_ok=True)
sls_tempfile = salt_secondary_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile, test_tempdir:
with sls_tempfile:
for _dir in "test1", "test2", "test3":
test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True)
@ -1218,17 +1215,13 @@ def test_issue_62117(
- name: pwd
"""
yaml_tempfile = salt_master.state_tree.base.temp_file(
"{}.yaml".format(name), yaml_contents
)
yaml_tempfile = salt_master.state_tree.base.temp_file(f"{name}.yaml", yaml_contents)
jinja_tempfile = salt_master.state_tree.base.temp_file(
"{}.jinja".format(name), jinja_contents
f"{name}.jinja", jinja_contents
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(name), sls_contents
)
sls_tempfile = salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents)
with yaml_tempfile, jinja_tempfile, sls_tempfile:
ret = salt_call_cli.run("--local", "state.apply", name.replace("/", "."))
@ -1264,12 +1257,10 @@ def test_issue_62611(
)
jinja_tempfile = salt_master.state_tree.base.temp_file(
"{}.jinja".format(name), jinja_contents
f"{name}.jinja", jinja_contents
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(name), sls_contents
)
sls_tempfile = salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents)
with jinja_tempfile, sls_tempfile:
ret = salt_call_cli.run("--local", "state.apply", name.replace("/", "."))
@ -1296,7 +1287,7 @@ def test_contents_file(salt_master, salt_call_cli, tmp_path):
target_path
)
sls_tempfile = salt_master.state_tree.base.temp_file(
"{}.sls".format(sls_name), sls_contents
f"{sls_name}.sls", sls_contents
)
with sls_tempfile:
for i in range(1, 4):

View file

@ -38,7 +38,6 @@ def test_format_log_non_ascii_character():
salt.state.format_log(ret)
@pytest.mark.slow_test
def test_render_error_on_invalid_requisite(minion_opts):
"""
Test that the state compiler correctly deliver a rendering
@ -531,7 +530,7 @@ def test_verify_onlyif_parse_slots(tmp_path, minion_opts):
"onlyif": [
{
"fun": "file.search",
"args": ["__slot__:salt:test.echo({})".format(_expand_win_path(name))],
"args": [f"__slot__:salt:test.echo({_expand_win_path(name)})"],
"pattern": "__slot__:salt:test.echo(file-contents)",
}
],
@ -633,7 +632,7 @@ def test_verify_unless_parse_slots(tmp_path, minion_opts):
"unless": [
{
"fun": "file.search",
"args": ["__slot__:salt:test.echo({})".format(_expand_win_path(name))],
"args": [f"__slot__:salt:test.echo({_expand_win_path(name)})"],
"pattern": "__slot__:salt:test.echo(file-contents)",
}
],

View file

@ -35,7 +35,6 @@ def test_format_slots_no_slots(state_obj):
assert cdata == {"args": ["arg"], "kwargs": {"key": "val"}}
@pytest.mark.slow_test
def test_format_slots_arg(state_obj):
"""
Test the format slots is calling a slot specified in args with corresponding arguments.
@ -51,7 +50,6 @@ def test_format_slots_arg(state_obj):
assert cdata == {"args": ["fun_return"], "kwargs": {"key": "val"}}
@pytest.mark.slow_test
def test_format_slots_dict_arg(state_obj):
"""
Test the format slots is calling a slot specified in dict arg.
@ -67,7 +65,6 @@ def test_format_slots_dict_arg(state_obj):
assert cdata == {"args": [{"subarg": "fun_return"}], "kwargs": {"key": "val"}}
@pytest.mark.slow_test
def test_format_slots_listdict_arg(state_obj):
"""
Test the format slots is calling a slot specified in list containing a dict.
@ -83,7 +80,6 @@ def test_format_slots_listdict_arg(state_obj):
assert cdata == {"args": [[{"subarg": "fun_return"}]], "kwargs": {"key": "val"}}
@pytest.mark.slow_test
def test_format_slots_liststr_arg(state_obj):
"""
Test the format slots is calling a slot specified in list containing a dict.
@ -99,7 +95,6 @@ def test_format_slots_liststr_arg(state_obj):
assert cdata == {"args": [["fun_return"]], "kwargs": {"key": "val"}}
@pytest.mark.slow_test
def test_format_slots_kwarg(state_obj):
"""
Test the format slots is calling a slot specified in kwargs with corresponding arguments.
@ -115,7 +110,6 @@ def test_format_slots_kwarg(state_obj):
assert cdata == {"args": ["arg"], "kwargs": {"key": "fun_return"}}
@pytest.mark.slow_test
def test_format_slots_multi(state_obj):
"""
Test the format slots is calling all slots with corresponding arguments when multiple slots
@ -155,7 +149,6 @@ def test_format_slots_multi(state_obj):
}
@pytest.mark.slow_test
def test_format_slots_malformed(state_obj):
"""
Test the format slots keeps malformed slots untouched.
@ -186,7 +179,6 @@ def test_format_slots_malformed(state_obj):
assert cdata == sls_data
@pytest.mark.slow_test
def test_slot_traverse_dict(state_obj):
"""
Test the slot parsing of dict response.
@ -203,7 +195,6 @@ def test_slot_traverse_dict(state_obj):
assert cdata == {"args": ["arg"], "kwargs": {"key": "value1"}}
@pytest.mark.slow_test
def test_slot_append(state_obj):
"""
Test the slot parsing of dict response.