Merge 3006.x into master

This commit is contained in:
Pedro Algarvio 2023-11-26 16:10:40 +00:00
commit e7bfe4bd4f
95 changed files with 9550 additions and 405 deletions

View file

@ -69,15 +69,13 @@ def _nsenter(pid):
return f"nsenter --target {pid} --mount --uts --ipc --net --pid"
def _get_md5(name, path, run_func):
def _get_sha256(name, path, run_func):
"""
Get the MD5 checksum of a file from a container
Get the sha256 checksum of a file from a container
"""
output = run_func(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)[
"stdout"
]
ret = run_func(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True)
try:
return output.split()[0]
return ret["stdout"].split()[0]
except IndexError:
# Destination file does not exist or could not be accessed
return None
@ -368,8 +366,8 @@ def copy_to(
)
# Before we try to replace the file, compare checksums.
source_md5 = __salt__["file.get_sum"](local_file, "md5")
if source_md5 == _get_md5(name, dest, run_all):
source_sha256 = __salt__["file.get_sum"](local_file, "sha256")
if source_sha256 == _get_sha256(name, dest, run_all):
log.debug("%s and %s:%s are the same file, skipping copy", source, name, dest)
return True
@ -399,4 +397,4 @@ def copy_to(
local_file, name, PATH, dest
)
__salt__["cmd.run"](copy_cmd, python_shell=True, output_loglevel="quiet")
return source_md5 == _get_md5(name, dest, run_all)
return source_sha256 == _get_sha256(name, dest, run_all)

View file

@ -23,6 +23,7 @@ import os
import salt.pillar
import salt.utils.stringutils
from salt.config import DEFAULT_HASH_TYPE
try:
import hglib
@ -90,7 +91,7 @@ class Repo:
"""Initialize a hg repo (or open it if it already exists)"""
self.repo_uri = repo_uri
cachedir = os.path.join(__opts__["cachedir"], "hg_pillar")
hash_type = getattr(hashlib, __opts__.get("hash_type", "md5"))
hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE))
repo_hash = hash_type(salt.utils.stringutils.to_bytes(repo_uri)).hexdigest()
self.working_dir = os.path.join(cachedir, repo_hash)
if not os.path.isdir(self.working_dir):

View file

@ -198,22 +198,20 @@ More complete example for MySQL (to also show configuration)
with_lists: [1,3]
"""
import abc # Added in python2.6 so always available
import abc
import logging
from salt.utils.dictupdate import update
from salt.utils.odict import OrderedDict
log = logging.getLogger(__name__)
# Please don't strip redundant parentheses from this file.
# I have added some for clarity.
# tests/unit/pillar/mysql_test.py may help understand this code.
# Set up logging
log = logging.getLogger(__name__)
# This ext_pillar is abstract and cannot be used directory
def __virtual__():
return False

View file

@ -67,6 +67,9 @@ else:
# Flag coverage to track suprocesses by pointing it to the right .coveragerc file
os.environ["COVERAGE_PROCESS_START"] = str(COVERAGERC_FILE)
# Variable defining a FIPS test run or not
FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1"
# Define the pytest plugins we rely on
pytest_plugins = ["helpers_namespace"]
@ -1049,7 +1052,10 @@ def salt_syndic_master_factory(
config_defaults["syndic_master"] = "localhost"
config_defaults["transport"] = request.config.getoption("--transport")
config_overrides = {"log_level_logfile": "quiet"}
config_overrides = {
"log_level_logfile": "quiet",
"fips_mode": FIPS_TESTRUN,
}
ext_pillar = []
if salt.utils.platform.is_windows():
ext_pillar.append(
@ -1162,7 +1168,10 @@ def salt_master_factory(
config_defaults["syndic_master"] = "localhost"
config_defaults["transport"] = salt_syndic_master_factory.config["transport"]
config_overrides = {"log_level_logfile": "quiet"}
config_overrides = {
"log_level_logfile": "quiet",
"fips_mode": FIPS_TESTRUN,
}
ext_pillar = []
if salt.utils.platform.is_windows():
ext_pillar.append(
@ -1270,6 +1279,7 @@ def salt_minion_factory(salt_master_factory):
"log_level_logfile": "quiet",
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
}
virtualenv_binary = get_virtualenv_binary_path()
@ -1301,6 +1311,7 @@ def salt_sub_minion_factory(salt_master_factory):
"log_level_logfile": "quiet",
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
}
virtualenv_binary = get_virtualenv_binary_path()

View file

@ -1,10 +1,11 @@
"""
Integration tests for DigitalOcean APIv2
"""
import base64
import hashlib
import pytest
import salt.crypt
import salt.utils.stringutils
from tests.integration.cloud.helpers.cloud_test_base import TIMEOUT, CloudTest
@ -43,6 +44,7 @@ class DigitalOceanTest(CloudTest):
_list_sizes = self.run_cloud("--list-sizes {}".format(self.PROVIDER))
self.assertIn("16gb", [i.strip() for i in _list_sizes])
@pytest.mark.skip_on_fips_enabled_platform
def test_key_management(self):
"""
Test key management

View file

@ -43,13 +43,10 @@ class VenafiTest(ShellCase):
@with_random_name
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_request(self, name):
cn = "{}.example.com".format(name)
# Provide python27 compatibility
if not isinstance(cn, str):
cn = cn.decode()
ret = self.run_run_plus(
fun="venafi.request",
minion_id=cn,
@ -126,10 +123,6 @@ xlAKgaU6i03jOm5+sww5L2YVMi1eeBN+kx7o94ogpRemC/EUidvl1PUJ6+e7an9V
csr_path = f.name
cn = "test-csr-32313131.venafi.example.com"
# Provide python27 compatibility
if not isinstance(cn, str):
cn = cn.decode()
ret = self.run_run_plus(
fun="venafi.request", minion_id=cn, csr_path=csr_path, zone="fake"
)

View file

@ -89,12 +89,12 @@ class CPModuleTest(ModuleCase):
"""
src = os.path.join(RUNTIME_VARS.FILES, "file", "base", "file.big")
with salt.utils.files.fopen(src, "rb") as fp_:
hash_str = hashlib.md5(fp_.read()).hexdigest()
hash_str = hashlib.sha256(fp_.read()).hexdigest()
self.run_function("cp.get_file", ["salt://file.big", tgt], gzip=5)
with salt.utils.files.fopen(tgt, "rb") as scene:
data = scene.read()
self.assertEqual(hash_str, hashlib.md5(data).hexdigest())
self.assertEqual(hash_str, hashlib.sha256(data).hexdigest())
data = salt.utils.stringutils.to_unicode(data)
self.assertIn("KNIGHT: They're nervous, sire.", data)
self.assertNotIn("bacon", data)

View file

@ -106,6 +106,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_with_source_hash(self):
"""
test archive.extracted without skip_verify
@ -127,6 +128,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.skip_if_not_root
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_with_root_user_and_group(self):
"""
test archive.extracted with user and group set to "root"
@ -151,6 +153,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_with_strip_in_options(self):
"""
test archive.extracted with --strip in options
@ -170,6 +173,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(os.path.join(ARCHIVE_DIR, "README"))
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_with_strip_components_in_options(self):
"""
test archive.extracted with --strip-components in options
@ -190,6 +194,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(os.path.join(ARCHIVE_DIR, "README"))
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_without_archive_format(self):
"""
test archive.extracted with no archive_format option
@ -206,6 +211,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.skip_on_fips_enabled_platform
def test_archive_extracted_with_cmd_unzip_false(self):
"""
test archive.extracted using use_cmd_unzip argument as false
@ -240,6 +246,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.skip_on_fips_enabled_platform
def test_local_archive_extracted_skip_verify(self):
"""
test archive.extracted with local file, bad hash and skip_verify
@ -258,6 +265,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_local_archive_extracted_with_source_hash(self):
"""
test archive.extracted with local file and valid hash
@ -275,6 +283,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self._check_extracted(self.untar_file)
@pytest.mark.slow_test
@pytest.mark.skip_on_fips_enabled_platform
def test_local_archive_extracted_with_bad_source_hash(self):
"""
test archive.extracted with local file and bad hash
@ -289,6 +298,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltFalseReturn(ret)
@pytest.mark.skip_on_fips_enabled_platform
def test_local_archive_extracted_with_uppercase_source_hash(self):
"""
test archive.extracted with local file and bad hash

View file

@ -24,6 +24,7 @@ from saltfactories.utils import random_string
import salt.utils.files
import salt.utils.platform
from salt.serializers import yaml
from tests.conftest import FIPS_TESTRUN
from tests.support.helpers import Webserver, get_virtualenv_binary_path
from tests.support.pytest.helpers import TestAccount
from tests.support.runtests import RUNTIME_VARS
@ -187,7 +188,10 @@ def salt_master_factory(
os.path.join(RUNTIME_VARS.FILES, "returners")
)
config_defaults["event_return"] = "runtests_noop"
config_overrides = {"pytest-master": {"log": {"level": "DEBUG"}}}
config_overrides = {
"pytest-master": {"log": {"level": "DEBUG"}},
"fips_mode": FIPS_TESTRUN,
}
ext_pillar = []
if salt.utils.platform.is_windows():
ext_pillar.append(
@ -316,6 +320,7 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul
config_overrides = {
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
}
virtualenv_binary = get_virtualenv_binary_path()
@ -346,6 +351,7 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id):
config_overrides = {
"file_roots": salt_master_factory.config["file_roots"].copy(),
"pillar_roots": salt_master_factory.config["pillar_roots"].copy(),
"fips_mode": FIPS_TESTRUN,
}
virtualenv_binary = get_virtualenv_binary_path()

View file

@ -14,6 +14,7 @@ docker = pytest.importorskip("docker")
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("dockerd"),
]

View file

@ -0,0 +1,25 @@
import salt.channel.client
from tests.support.mock import MagicMock, patch
async def test_async_pub_channel_connect_cb(minion_opts):
"""
Validate connect_callback closes the request channel it creates.
"""
minion_opts["master_uri"] = "tcp://127.0.0.1:4506"
minion_opts["master_ip"] = "127.0.0.1"
with salt.channel.client.AsyncPubChannel.factory(minion_opts) as channel:
async def send_id(*args):
return
channel.send_id = send_id
channel._reconnected = True
mock = MagicMock(salt.channel.client.AsyncReqChannel)
mock.__enter__ = lambda self: mock
with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock):
await channel.connect_callback(None)
mock.send.assert_called_once()
mock.__exit__.assert_called_once()

View file

@ -6,6 +6,7 @@ import os
import attr
import pytest
from pytestskipmarkers.utils import platform
import salt.utils.files
import salt.utils.path
@ -498,6 +499,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="avg_not_list",
expected={"ret": 2.0},
sls="""
{% set result = 2 | avg() %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="difference",
expected={"ret": [1, 3]},
@ -509,6 +521,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="difference_hashable",
expected={"ret": [1, 3]},
sls="""
{% set result = (1, 2, 3, 4) | difference((2, 4, 6)) | list %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="intersect",
expected={"ret": [2, 4]},
@ -520,6 +543,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="intersect_hashable",
expected={"ret": [2, 4]},
sls="""
{% set result = (1, 2, 3, 4) | intersect((2, 4, 6)) | list %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="max",
expected={"ret": 4},
@ -568,6 +602,28 @@ def _filter_id(value):
name="regex_match",
expected={"ret": "('a', 'd')"},
sls="""
{% set result = 'abcd' | regex_match('^(.*)bc(.*)$') %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_match_no_match",
expected={"ret": "None"},
sls="""
{% set result = 'abcd' | regex_match('^(.*)BC(.*)$') %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_match_ignorecase",
expected={"ret": "('a', 'd')"},
sls="""
{% set result = 'abcd' | regex_match('^(.*)BC(.*)$', ignorecase=True) %}
test:
module.run:
@ -575,6 +631,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="regex_match_multiline",
expected={"ret": "('foo1',)"},
sls="""
{% set result = 'foo1\nfoo2\n' | regex_match('(foo.$)', multiline=True) %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_replace",
expected={"ret": "lets__replace__spaces"},
@ -586,10 +653,65 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="regex_replace_no_match",
expected={"ret": "lets replace spaces"},
sls=r"""
{% set result = 'lets replace spaces' | regex_replace('\s+$', '__') %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_replace_ignorecase",
expected={"ret": "barbar"},
sls=r"""
{% set result = 'FOO1foo2' | regex_replace('foo.', 'bar', ignorecase=True) %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_replace_multiline",
expected={"ret": "bar bar "},
sls=r"""
{% set result = 'FOO1\nfoo2\n' | regex_replace('^foo.$', 'bar', ignorecase=True, multiline=True) %}
test:
module.run:
- name: test.echo
- text: '{{ result }}'
""",
),
Filter(
name="regex_search",
expected={"ret": "('a', 'd')"},
sls="""
{% set result = 'abcd' | regex_search('^(.*)bc(.*)$') %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_search_no_match",
expected={"ret": "None"},
sls="""
{% set result = 'abcd' | regex_search('^(.*)BC(.*)$') %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="regex_search_ignorecase",
expected={"ret": "('a', 'd')"},
sls="""
{% set result = 'abcd' | regex_search('^(.*)BC(.*)$', ignorecase=True) %}
test:
module.run:
@ -597,6 +719,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="regex_search_multiline",
expected={"ret": "('foo1',)"},
sls="""
{% set result = 'foo1\nfoo2\n' | regex_search('(foo.$)', multiline=True) %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="sequence",
expected={"ret": ["Salt Rocks!"]},
@ -630,6 +763,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="symmetric_difference_hashable",
expected={"ret": [1, 3, 6]},
sls="""
{% set result = (1, 2, 3, 4) | symmetric_difference((2, 4, 6)) | list %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="to_bool",
expected={"ret": True},
@ -641,6 +785,39 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="to_bool_none",
expected={"ret": "False"},
sls="""
{% set result = 'None' | to_bool() %}
test:
module.run:
- name: test.echo
- text: '{{ result }}'
""",
),
Filter(
name="to_bool_given_bool",
expected={"ret": "True"},
sls="""
{% set result = true | to_bool() %}
test:
module.run:
- name: test.echo
- text: '{{ result }}'
""",
),
Filter(
name="to_bool_not_hashable",
expected={"ret": "True"},
sls="""
{% set result = ['hello', 'world'] | to_bool() %}
test:
module.run:
- name: test.echo
- text: '{{ result }}'
""",
),
Filter(
name="union",
expected={"ret": [1, 2, 3, 4, 6]},
@ -652,6 +829,17 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="union_hashable",
expected={"ret": [1, 2, 3, 4, 6]},
sls="""
{% set result = (1, 2, 3, 4) | union((2, 4, 6)) | list %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="unique",
expected={"ret": ["a", "b", "c"]},
@ -928,11 +1116,117 @@ def _filter_id(value):
- text: {{ result }}
""",
),
Filter(
name="raise",
expected={"ret": {"Question": "Quieres Café?"}},
sls="""
{{ raise('Custom Error') }}
""",
),
Filter(
name="match",
expected={"ret": "match"},
sls="""
{% if 'a' is match('[a-b]') %}
{% set result = 'match' %}
{% else %}
{% set result = 'no_match' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="no_match",
expected={"ret": "no match"},
sls="""
{% if 'c' is match('[a-b]') %}
{% set result = 'match' %}
{% else %}
{% set result = 'no match' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="match_ignorecase",
expected={"ret": "match"},
sls="""
{% if 'A' is match('[a-b]', True) %}
{% set result = 'match' %}
{% else %}
{% set result = 'no_match' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="match_multiline",
expected={"ret": "match"},
sls="""
{% set ml_string = 'this is a multiline\nstring' %}
{% if ml_string is match('.*\n^string', False, True) %}
{% set result = 'match' %}
{% else %}
{% set result = 'no_match' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="equalto",
expected={"ret": "equal"},
sls="""
{% if 1 is equalto(1) %}
{% set result = 'equal' %}
{% else %}
{% set result = 'not equal' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
Filter(
name="un_equalto",
expected={"ret": "not equal"},
sls="""
{% if 1 is equalto(2) %}
{% set result = 'equal' %}
{% else %}
{% set result = 'not equal' %}
{% endif %}
test:
module.run:
- name: test.echo
- text: {{ result }}
""",
),
],
ids=_filter_id,
)
def filter(request):
return request.param
_filter = request.param
if platform.is_fips_enabled():
if _filter.name in ("md5", "random_hash"):
pytest.skip("Test cannot run on a FIPS enabled platform")
return _filter
def test_filter(state, state_tree, filter, grains):
@ -940,7 +1234,11 @@ def test_filter(state, state_tree, filter, grains):
with filter(state_tree):
ret = state.sls("filter")
log.debug("state.sls returned: %s", ret)
assert not ret.failed
for state_result in ret:
assert state_result.result is True
filter.assert_result(state_result.changes)
if filter.name == "raise":
assert ret.failed
assert "TemplateError" in ret.errors[0]
else:
assert not ret.failed
for state_result in ret:
assert state_result.result is True
filter.assert_result(state_result.changes)

View file

@ -19,6 +19,7 @@ pytestmark = [
pytest.mark.skipif(
mysqlmod.MySQLdb is None, reason="No python mysql client installed."
),
pytest.mark.skip_on_fips_enabled_platform,
]

View file

@ -64,6 +64,8 @@ def test_pkg(grains):
elif grains["os_family"] == "RedHat":
if grains["os"] == "VMware Photon OS":
_pkg = "snoopy"
elif grains["osfinger"] == "Amazon Linux-2023":
return "dnf-utils"
else:
_pkg = "units"
elif grains["os_family"] == "Debian":

View file

@ -23,7 +23,8 @@ except ImportError:
CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split("."))
pytestmark = [
pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library")
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"),
]

View file

@ -8,6 +8,7 @@ from tornado.websocket import websocket_connect
import salt.netapi.rest_tornado as rest_tornado
import salt.utils.json
import salt.utils.yaml
from salt.config import DEFAULT_HASH_TYPE
pytestmark = [
pytest.mark.destructive_test,
@ -41,7 +42,7 @@ async def test_websocket_handler_upgrade_to_websocket(
)
token = salt.utils.json.loads(response.body)["return"][0]["token"]
url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token)
url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}"
request = HTTPRequest(
url, headers={"Origin": "http://example.com", "Host": "example.com"}
)
@ -55,10 +56,12 @@ async def test_websocket_handler_bad_token(client_config, http_server, io_loop):
A bad token should returns a 401 during a websocket connect
"""
token = "A" * len(
getattr(hashlib, client_config.get("hash_type", "md5"))().hexdigest()
getattr(
hashlib, client_config.get("hash_type", DEFAULT_HASH_TYPE)
)().hexdigest()
)
url = "ws://127.0.0.1:{}/all_events/{}".format(http_server.port, token)
url = f"ws://127.0.0.1:{http_server.port}/all_events/{token}"
request = HTTPRequest(
url, headers={"Origin": "http://example.com", "Host": "example.com"}
)
@ -79,7 +82,7 @@ async def test_websocket_handler_cors_origin_wildcard(
)
token = salt.utils.json.loads(response.body)["return"][0]["token"]
url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token)
url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}"
request = HTTPRequest(
url, headers={"Origin": "http://foo.bar", "Host": "example.com"}
)
@ -100,7 +103,7 @@ async def test_cors_origin_single(
)
token = salt.utils.json.loads(response.body)["return"][0]["token"]
url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token)
url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}"
# Example.com should works
request = HTTPRequest(
@ -132,7 +135,7 @@ async def test_cors_origin_multiple(
)
token = salt.utils.json.loads(response.body)["return"][0]["token"]
url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token)
url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}"
# Example.com should works
request = HTTPRequest(

View file

@ -113,6 +113,48 @@ def test_directory_max_depth(file, tmp_path):
assert _mode == _get_oct_mode(untouched_dir)
@pytest.mark.skip_on_windows
def test_directory_children_only(file, tmp_path):
"""
file.directory with children_only=True
"""
name = tmp_path / "directory_children_only_dir"
name.mkdir(0o0700)
strayfile = name / "strayfile"
strayfile.touch()
os.chmod(strayfile, 0o700)
straydir = name / "straydir"
straydir.mkdir(0o0700)
# none of the children nor parent are currently set to the correct mode
ret = file.directory(
name=str(name),
file_mode="0644",
dir_mode="0755",
recurse=["mode"],
children_only=True,
)
assert ret.result is True
# Assert parent directory's mode remains unchanged
assert (
oct(name.stat().st_mode)[-3:] == "700"
), f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}"
# Assert child file's mode is changed
assert (
oct(strayfile.stat().st_mode)[-3:] == "644"
), f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}"
# Assert child directory's mode is changed
assert (
oct(straydir.stat().st_mode)[-3:] == "755"
), f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}"
def test_directory_clean(file, tmp_path):
"""
file.directory with clean=True

View file

@ -242,7 +242,11 @@ def copr_pkgrepo_with_comments_name(pkgrepo, grains):
or grains["os"] == "VMware Photon OS"
):
pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"]))
if grains["os"] in ("CentOS Stream", "AlmaLinux") and grains["osmajorrelease"] == 9:
if (
grains["os"] in ("CentOS Stream", "AlmaLinux")
and grains["osmajorrelease"] == 9
or grains["osfinger"] == "Amazon Linux-2023"
):
pytest.skip("No repo for {} in test COPR yet".format(grains["osfinger"]))
pkgrepo_name = "hello-copr"
try:

View file

@ -41,7 +41,7 @@ class TestRequestHandler(http.server.SimpleHTTPRequestHandler):
) as reqfp:
return_data = reqfp.read()
# We're using this checksum as the etag to show file changes
checksum = hashlib.md5(return_data).hexdigest()
checksum = hashlib.sha256(return_data).hexdigest()
if none_match == checksum:
# Status code 304 Not Modified is returned if the file is unchanged
status_code = 304

View file

@ -41,7 +41,7 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler):
) as reqfp:
return_text = reqfp.read().encode("utf-8")
# We're using this checksum as the etag to show file changes
checksum = hashlib.md5(return_text).hexdigest()
checksum = hashlib.sha256(return_text).hexdigest()
if none_match == checksum:
# Status code 304 Not Modified is returned if the file is unchanged
status_code = 304

View file

@ -10,21 +10,19 @@ log = logging.getLogger(__name__)
@pytest.mark.core_test
def test_issue_58763(tmp_path, modules, state_tree, caplog):
venv_dir = tmp_path / "issue-2028-pip-installed"
sls_contents = dedent(
"""
run_old:
module.run:
- name: test.random_hash
- size: 10
- hash_type: md5
- hash_type: sha256
run_new:
module.run:
- test.random_hash:
- size: 10
- hash_type: md5
- hash_type: sha256
"""
)
with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree):
@ -42,14 +40,12 @@ def test_issue_58763(tmp_path, modules, state_tree, caplog):
@pytest.mark.core_test
def test_issue_58763_a(tmp_path, modules, state_tree, caplog):
venv_dir = tmp_path / "issue-2028-pip-installed"
sls_contents = dedent(
"""
test.random_hash:
module.run:
- size: 10
- hash_type: md5
- hash_type: sha256
"""
)
with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree):
@ -68,8 +64,6 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog):
@pytest.mark.core_test
def test_issue_58763_b(tmp_path, modules, state_tree, caplog):
venv_dir = tmp_path / "issue-2028-pip-installed"
sls_contents = dedent(
"""
test.ping:
@ -90,8 +84,6 @@ def test_issue_58763_b(tmp_path, modules, state_tree, caplog):
@pytest.mark.core_test
def test_issue_62988_a(tmp_path, modules, state_tree, caplog):
venv_dir = tmp_path / "issue-2028-pip-installed"
sls_contents = dedent(
"""
test_foo:
@ -101,7 +93,7 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog):
module.wait:
- test.random_hash:
- size: 10
- hash_type: md5
- hash_type: sha256
- watch:
- test: test_foo
"""
@ -120,8 +112,6 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog):
@pytest.mark.core_test
def test_issue_62988_b(tmp_path, modules, state_tree, caplog):
venv_dir = tmp_path / "issue-2028-pip-installed"
sls_contents = dedent(
"""
test_foo:
@ -133,7 +123,7 @@ def test_issue_62988_b(tmp_path, modules, state_tree, caplog):
module.wait:
- test.random_hash:
- size: 10
- hash_type: md5
- hash_type: sha256
"""
)
with pytest.helpers.temp_file("issue-62988.sls", sls_contents, state_tree):

View file

@ -25,6 +25,10 @@ except ImportError:
log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
def _win_user_where(username, password, program):
cmd = "cmd.exe /c where {}".format(program)

View file

@ -43,7 +43,10 @@ def PKG_TARGETS(grains):
if grains["os"] == "Windows":
_PKG_TARGETS = ["vlc", "putty"]
elif grains["os"] == "Amazon":
_PKG_TARGETS = ["lynx", "gnuplot"]
if grains["osfinger"] == "Amazon Linux-2023":
_PKG_TARGETS = ["lynx", "gnuplot-minimal"]
else:
_PKG_TARGETS = ["lynx", "gnuplot"]
elif grains["os_family"] == "RedHat":
if grains["os"] == "VMware Photon OS":
if grains["osmajorrelease"] >= 5:

View file

@ -9,6 +9,7 @@ log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False),
]

View file

@ -1,5 +1,5 @@
import base64
from pathlib import Path
import pathlib
import pytest
@ -26,6 +26,7 @@ CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".")
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"),
pytest.mark.skip_on_fips_enabled_platform,
]
@ -703,7 +704,7 @@ def existing_pk(x509, pk_args, request):
@pytest.fixture(params=["existing_cert"])
def existing_symlink(request):
existing = request.getfixturevalue(request.param)
test_file = Path(existing).with_name("symlink")
test_file = pathlib.Path(existing).with_name("symlink")
test_file.symlink_to(existing)
yield test_file
# cleanup is done by tmp_path
@ -884,7 +885,7 @@ def test_certificate_managed_test_true(x509, cert_args, rsa_privkey, ca_key):
ret = x509.certificate_managed(**cert_args)
assert ret.result is None
assert ret.changes
assert not Path(cert_args["name"]).exists()
assert not pathlib.Path(cert_args["name"]).exists()
@pytest.mark.usefixtures("existing_cert")
@ -1324,7 +1325,7 @@ def test_certificate_managed_file_managed_create_false(
ret = x509.certificate_managed(**cert_args)
assert ret.result is True
assert not ret.changes
assert not Path(cert_args["name"]).exists()
assert not pathlib.Path(cert_args["name"]).exists()
@pytest.mark.usefixtures("existing_cert")
@ -1397,7 +1398,7 @@ def test_certificate_managed_follow_symlinks(
"""
cert_args["name"] = str(existing_symlink)
cert_args["encoding"] = encoding
assert Path(cert_args["name"]).is_symlink()
assert pathlib.Path(cert_args["name"]).is_symlink()
cert_args["follow_symlinks"] = follow
ret = x509.certificate_managed(**cert_args)
assert bool(ret.changes) == (not follow)
@ -1417,13 +1418,13 @@ def test_certificate_managed_follow_symlinks_changes(
the checking of the existing file is performed by the x509 module
"""
cert_args["name"] = str(existing_symlink)
assert Path(cert_args["name"]).is_symlink()
assert pathlib.Path(cert_args["name"]).is_symlink()
cert_args["follow_symlinks"] = follow
cert_args["encoding"] = encoding
cert_args["CN"] = "new"
ret = x509.certificate_managed(**cert_args)
assert ret.changes
assert Path(ret.name).is_symlink() == follow
assert pathlib.Path(ret.name).is_symlink() == follow
@pytest.mark.parametrize("encoding", ["pem", "der"])
@ -1436,7 +1437,7 @@ def test_certificate_managed_file_managed_error(
cert_args["private_key"] = rsa_privkey
cert_args["makedirs"] = False
cert_args["encoding"] = encoding
cert_args["name"] = str(Path(cert_args["name"]).parent / "missing" / "cert")
cert_args["name"] = str(pathlib.Path(cert_args["name"]).parent / "missing" / "cert")
ret = x509.certificate_managed(**cert_args)
assert ret.result is False
assert "Could not create file, see file.managed output" in ret.comment
@ -1504,7 +1505,7 @@ def test_crl_managed_test_true(x509, crl_args, crl_revoked):
assert ret.result is None
assert ret.changes
assert ret.result is None
assert not Path(crl_args["name"]).exists()
assert not pathlib.Path(crl_args["name"]).exists()
@pytest.mark.usefixtures("existing_crl")
@ -1708,7 +1709,7 @@ def test_crl_managed_file_managed_create_false(x509, crl_args):
ret = x509.crl_managed(**crl_args)
assert ret.result is True
assert not ret.changes
assert not Path(crl_args["name"]).exists()
assert not pathlib.Path(crl_args["name"]).exists()
@pytest.mark.usefixtures("existing_crl")
@ -1782,7 +1783,7 @@ def test_crl_managed_follow_symlinks(
"""
crl_args["name"] = str(existing_symlink)
crl_args["encoding"] = encoding
assert Path(crl_args["name"]).is_symlink()
assert pathlib.Path(crl_args["name"]).is_symlink()
crl_args["follow_symlinks"] = follow
ret = x509.crl_managed(**crl_args)
assert bool(ret.changes) == (not follow)
@ -1802,13 +1803,13 @@ def test_crl_managed_follow_symlinks_changes(
the checking of the existing file is performed by the x509 module
"""
crl_args["name"] = str(existing_symlink)
assert Path(crl_args["name"]).is_symlink()
assert pathlib.Path(crl_args["name"]).is_symlink()
crl_args["follow_symlinks"] = follow
crl_args["encoding"] = encoding
crl_args["revoked"] = crl_revoked
ret = x509.crl_managed(**crl_args)
assert ret.changes
assert Path(ret.name).is_symlink() == follow
assert pathlib.Path(ret.name).is_symlink() == follow
@pytest.mark.parametrize("encoding", ["pem", "der"])
@ -1818,7 +1819,7 @@ def test_crl_managed_file_managed_error(x509, crl_args, encoding):
"""
crl_args["makedirs"] = False
crl_args["encoding"] = encoding
crl_args["name"] = str(Path(crl_args["name"]).parent / "missing" / "crl")
crl_args["name"] = str(pathlib.Path(crl_args["name"]).parent / "missing" / "crl")
ret = x509.crl_managed(**crl_args)
assert ret.result is False
assert "Could not create file, see file.managed output" in ret.comment
@ -1866,7 +1867,7 @@ def test_csr_managed_test_true(x509, csr_args, rsa_privkey):
ret = x509.csr_managed(**csr_args)
assert ret.result is None
assert ret.changes
assert not Path(csr_args["name"]).exists()
assert not pathlib.Path(csr_args["name"]).exists()
@pytest.mark.usefixtures("existing_csr")
@ -2002,7 +2003,7 @@ def test_csr_managed_file_managed_create_false(x509, csr_args):
ret = x509.csr_managed(**csr_args)
assert ret.result is True
assert not ret.changes
assert not Path(csr_args["name"]).exists()
assert not pathlib.Path(csr_args["name"]).exists()
@pytest.mark.usefixtures("existing_csr")
@ -2066,12 +2067,12 @@ def test_csr_managed_follow_symlinks(
the checking of the existing file is performed by the x509 module
"""
csr_args["name"] = str(existing_symlink)
assert Path(csr_args["name"]).is_symlink()
assert pathlib.Path(csr_args["name"]).is_symlink()
csr_args["follow_symlinks"] = follow
csr_args["encoding"] = encoding
ret = x509.csr_managed(**csr_args)
assert bool(ret.changes) == (not follow)
assert Path(ret.name).is_symlink() == follow
assert pathlib.Path(ret.name).is_symlink() == follow
@pytest.mark.parametrize(
@ -2088,14 +2089,14 @@ def test_csr_managed_follow_symlinks_changes(
the checking of the existing file is performed by the x509 module
"""
csr_args["name"] = str(existing_symlink)
assert Path(csr_args["name"]).is_symlink()
assert pathlib.Path(csr_args["name"]).is_symlink()
csr_args["follow_symlinks"] = follow
csr_args["encoding"] = encoding
csr_args["CN"] = "new"
ret = x509.csr_managed(**csr_args)
assert ret.result
assert ret.changes
assert Path(ret.name).is_symlink() == follow
assert pathlib.Path(ret.name).is_symlink() == follow
@pytest.mark.parametrize("encoding", ["pem", "der"])
@ -2105,7 +2106,7 @@ def test_csr_managed_file_managed_error(x509, csr_args, encoding):
"""
csr_args["makedirs"] = False
csr_args["encoding"] = encoding
csr_args["name"] = str(Path(csr_args["name"]).parent / "missing" / "csr")
csr_args["name"] = str(pathlib.Path(csr_args["name"]).parent / "missing" / "csr")
ret = x509.csr_managed(**csr_args)
assert ret.result is False
assert "Could not create file, see file.managed output" in ret.comment
@ -2312,7 +2313,7 @@ def test_private_key_managed_file_managed_create_false(x509, pk_args):
ret = x509.private_key_managed(**pk_args)
assert ret.result is True
assert not ret.changes
assert not Path(pk_args["name"]).exists()
assert not pathlib.Path(pk_args["name"]).exists()
@pytest.mark.usefixtures("existing_pk")
@ -2361,7 +2362,7 @@ def test_private_key_managed_follow_symlinks(
"""
pk_args["name"] = str(existing_symlink)
pk_args["encoding"] = encoding
assert Path(pk_args["name"]).is_symlink()
assert pathlib.Path(pk_args["name"]).is_symlink()
pk_args["follow_symlinks"] = follow
ret = x509.private_key_managed(**pk_args)
assert bool(ret.changes) == (not follow)
@ -2381,13 +2382,13 @@ def test_private_key_managed_follow_symlinks_changes(
the checking of the existing file is performed by the x509 module
"""
pk_args["name"] = str(existing_symlink)
assert Path(pk_args["name"]).is_symlink()
assert pathlib.Path(pk_args["name"]).is_symlink()
pk_args["follow_symlinks"] = follow
pk_args["encoding"] = encoding
pk_args["algo"] = "ec"
ret = x509.private_key_managed(**pk_args)
assert ret.changes
assert Path(ret.name).is_symlink() == follow
assert pathlib.Path(ret.name).is_symlink() == follow
@pytest.mark.usefixtures("existing_pk")
@ -2415,7 +2416,7 @@ def test_private_key_managed_file_managed_error(x509, pk_args, encoding):
"""
pk_args["makedirs"] = False
pk_args["encoding"] = encoding
pk_args["name"] = str(Path(pk_args["name"]).parent / "missing" / "pk")
pk_args["name"] = str(pathlib.Path(pk_args["name"]).parent / "missing" / "pk")
ret = x509.private_key_managed(**pk_args)
assert ret.result is False
assert "Could not create file, see file.managed output" in ret.comment
@ -2693,7 +2694,7 @@ def _assert_cert_basic(
def _get_cert(cert, encoding="pem", passphrase=None):
try:
p = Path(cert)
p = pathlib.Path(cert)
if p.exists():
cert = p.read_bytes()
except Exception: # pylint: disable=broad-except
@ -2775,7 +2776,7 @@ def _assert_not_changed(ret):
def _get_crl(crl, encoding="pem"):
try:
p = Path(crl)
p = pathlib.Path(crl)
if p.exists():
crl = p.read_bytes()
except Exception: # pylint: disable=broad-except
@ -2793,7 +2794,7 @@ def _get_crl(crl, encoding="pem"):
def _get_csr(csr, encoding="pem"):
try:
p = Path(csr)
p = pathlib.Path(csr)
if p.exists():
csr = p.read_bytes()
except Exception: # pylint: disable=broad-except
@ -2811,7 +2812,7 @@ def _get_csr(csr, encoding="pem"):
def _get_privkey(pk, encoding="pem", passphrase=None):
try:
p = Path(pk)
p = pathlib.Path(pk)
if p.exists():
pk = p.read_bytes()
except Exception: # pylint: disable=broad-except

View file

@ -20,11 +20,13 @@ def configure_loader_modules(minion_opts, modules):
"__opts__": minion_opts,
"__salt__": modules,
"__utils__": utils,
"__context__": {},
},
win_lgpo_module: {
"__opts__": minion_opts,
"__salt__": modules,
"__utils__": utils,
"__context__": {},
},
}

View file

@ -14,9 +14,10 @@ log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skip_on_spawning_platform(
reason="These tests are currently broken on spawning platforms. Need to be rewritten.",
)
),
]

View file

@ -13,6 +13,7 @@ pytestmark = [
]
@pytest.mark.skip_on_fips_enabled_platform
def test_tcp_load_balancer_server(master_opts, io_loop):
messages = []
@ -27,7 +28,7 @@ def test_tcp_load_balancer_server(master_opts, io_loop):
def run_loop():
try:
io_loop.start()
except Exception as exc:
except Exception as exc: # pylint: disable=broad-except
print(f"Caught exeption {exc}")
thread = threading.Thread(target=server.run)
@ -50,7 +51,7 @@ def test_tcp_load_balancer_server(master_opts, io_loop):
if time.monotonic() - start > 30:
break
io_loop.run_sync(lambda: check_test())
io_loop.run_sync(lambda: check_test()) # pylint: disable=unnecessary-lambda
try:
if time.monotonic() - start > 30:

View file

@ -13,6 +13,7 @@ log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skip_on_freebsd(reason="Temporarily skipped on FreeBSD."),
pytest.mark.skip_on_spawning_platform(
reason="These tests are currently broken on spawning platforms. Need to be rewritten.",

View file

@ -24,6 +24,19 @@ pytestmark = [
]
@pytest.fixture
def salt_minion_2(salt_master):
"""
A running salt-minion fixture
"""
factory = salt_master.salt_minion_daemon(
"minion-2",
extra_cli_arguments_after_first_start_failure=["--log-level=info"],
)
with factory.started(start_timeout=120):
yield factory
def test_context_retcode_salt(salt_cli, salt_minion):
"""
Test that a nonzero retcode set in the context dunder will cause the
@ -234,3 +247,25 @@ def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion):
assert "Exiting gracefully on Ctrl-c" in ret.stderr
assert "Exception ignored in" not in ret.stderr
assert "This job's jid is" in ret.stderr
def test_minion_65400(salt_cli, salt_minion, salt_minion_2, salt_master):
"""
Ensure correct exit status when salt CLI starts correctly.
"""
state = f"""
custom_test_state:
test.configurable_test_state:
- name: example
- changes: True
- result: False
- comment: 65400 regression test
"""
with salt_master.state_tree.base.temp_file("test_65400.sls", state):
ret = salt_cli.run("state.sls", "test_65400", minion_tgt="*")
assert isinstance(ret.data, dict)
assert len(ret.data.keys()) == 2
for minion_id in ret.data:
assert ret.data[minion_id] != "Error: test.configurable_test_state"
assert isinstance(ret.data[minion_id], dict)

View file

@ -44,6 +44,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir):
yield sls_name
@pytest.mark.skip_on_fips_enabled_platform
@pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work")
@pytest.mark.flaky(max_runs=4)
def test_memory_leak(salt_cli, salt_minion, file_add_delete_sls):

View file

@ -0,0 +1,64 @@
"""
Test the jinja module
"""
import os
import salt.utils.files
import salt.utils.json
import salt.utils.yaml
from tests.support.runtests import RUNTIME_VARS
def _path(name, absolute=False):
path = os.path.join("modules", "jinja", name)
if absolute:
return os.path.join(RUNTIME_VARS.BASE_FILES, path)
else:
return path
def test_import_json(salt_cli, salt_minion):
json_file = "osarchmap.json"
ret = salt_cli.run("jinja.import_json", _path(json_file), minion_tgt=salt_minion.id)
with salt.utils.files.fopen(_path(json_file, absolute=True)) as fh_:
assert salt.utils.json.load(fh_) == ret.data
def test_import_yaml(salt_cli, salt_minion):
yaml_file = "defaults.yaml"
ret = salt_cli.run("jinja.import_yaml", _path(yaml_file), minion_tgt=salt_minion.id)
with salt.utils.files.fopen(_path(yaml_file, absolute=True)) as fh_:
assert salt.utils.yaml.safe_load(fh_) == ret.data
def test_load_map(grains, salt_cli, salt_minion):
ret = salt_cli.run(
"jinja.load_map", _path("map.jinja"), "template", minion_tgt=salt_minion.id
)
assert isinstance(
ret.data, dict
), "failed to return dictionary from jinja.load_map: {}".format(ret)
with salt.utils.files.fopen(_path("defaults.yaml", absolute=True)) as fh_:
defaults = salt.utils.yaml.safe_load(fh_)
with salt.utils.files.fopen(_path("osarchmap.json", absolute=True)) as fh_:
osarchmap = salt.utils.json.load(fh_)
with salt.utils.files.fopen(_path("osfamilymap.yaml", absolute=True)) as fh_:
osfamilymap = salt.utils.yaml.safe_load(fh_)
with salt.utils.files.fopen(_path("osmap.yaml", absolute=True)) as fh_:
osmap = salt.utils.yaml.safe_load(fh_)
with salt.utils.files.fopen(_path("osfingermap.yaml", absolute=True)) as fh_:
osfingermap = salt.utils.yaml.safe_load(fh_)
assert ret.data.get("arch") == osarchmap.get(grains["osarch"], {}).get("arch")
assert ret.data.get("config") == osfingermap.get(grains["osfinger"], {}).get(
"config",
osmap.get(grains["os"], {}).get(
"config",
osfamilymap.get(grains["os_family"], {}).get(
"config", defaults.get("template").get("config")
),
),
)

View file

@ -0,0 +1,36 @@
import pytest
import salt.utils.files
pytestmark = [
pytest.mark.slow_test,
]
def test_issue_54765_salt(tmp_path, salt_cli, salt_minion):
file_path = str(tmp_path / "issue-54765")
ret = salt_cli.run(
"state.sls",
mods="issue-54765",
pillar={"file_path": file_path},
minion_tgt=salt_minion.id,
).data
key = "file_|-issue-54765_|-{}_|-managed".format(file_path)
assert key in ret
assert ret[key]["result"] is True
with salt.utils.files.fopen(file_path, "r") as fp:
assert fp.read().strip() == "bar"
def test_issue_54765_call(tmp_path, salt_call_cli):
file_path = str(tmp_path / "issue-54765")
ret = salt_call_cli.run(
"--local",
"state.apply",
"issue-54765",
pillar=f"{{'file_path': '{file_path}'}}",
)
key = "file_|-issue-54765_|-{}_|-managed".format(file_path)
assert ret.data[key]["result"] is True
with salt.utils.files.fopen(file_path, "r") as fp:
assert fp.read().strip() == "bar"

View file

@ -100,6 +100,34 @@ def world():
assert f"{module_type}.hello" in ret.stdout
def test_sync_refresh_false(
module_type, module_sync_functions, salt_run_cli, salt_minion, salt_master
):
"""
Ensure modules are synced when various sync functions are called
"""
module_name = f"hello_sync_{module_type}"
module_contents = """
def __virtual__():
return "hello"
def world():
return "world"
"""
test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}"
test_moduledir.mkdir(parents=True, exist_ok=True)
module_tempfile = salt_master.state_tree.base.temp_file(
f"_{module_type}/{module_name}.py", module_contents
)
with module_tempfile:
salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}"
ret = salt_run_cli.run(salt_cmd, saltenv=None, refresh=False)
assert ret.returncode == 0
assert f"saltutil.sync_{module_sync_functions[module_type]}" in ret.stdout
def _write_module_dir_and_file(module_type, salt_minion, salt_master):
"""
Write out dummy module to appropriate module location

View file

@ -19,7 +19,9 @@ from saltfactories.utils import random_string
import salt.utils.files
pytestmark = pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows")
pytestmark = [
pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows"),
]
def _custom_roster(roster_file, roster_data):
@ -33,33 +35,39 @@ def _custom_roster(roster_file, roster_data):
@pytest.fixture
def _create_roster(salt_ssh_roster_file, tmp_path):
ret = {}
ret["roster"] = salt_ssh_roster_file
ret["data"] = {"ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh")}
ret["test_script"] = str(tmp_path / "test-pre-flight-script-worked.txt")
ret["thin_dir"] = tmp_path / "thin_dir"
thin_dir = tmp_path / "thin-dir"
ret = {
"roster": salt_ssh_roster_file,
"data": {
"ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh"),
},
"test_script": str(tmp_path / "test-pre-flight-script-worked.txt"),
"thin_dir": str(thin_dir),
}
with salt.utils.files.fopen(salt_ssh_roster_file, "r") as fp:
data = salt.utils.yaml.safe_load(fp)
pre_flight_script = ret["data"]["ssh_pre_flight"]
data["localhost"]["ssh_pre_flight"] = pre_flight_script
data["localhost"]["thin_dir"] = str(ret["thin_dir"])
data["localhost"]["thin_dir"] = ret["thin_dir"]
with salt.utils.files.fopen(salt_ssh_roster_file, "w") as fp:
yaml.safe_dump(data, fp)
with salt.utils.files.fopen(pre_flight_script, "w") as fp:
fp.write("touch {}".format(ret["test_script"]))
yield ret
if ret["thin_dir"].exists():
shutil.rmtree(ret["thin_dir"])
try:
yield ret
finally:
if thin_dir.exists():
shutil.rmtree(thin_dir)
@pytest.mark.slow_test
def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster):
"""
test ssh when ssh_pre_flight is set
ensure the script runs successfully
test ssh when ssh_pre_flight is set ensure the script runs successfully
"""
ret = salt_ssh_cli.run("test.ping")
assert ret.returncode == 0
@ -70,8 +78,7 @@ def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster):
@pytest.mark.slow_test
def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster):
"""
test ssh when --pre-flight is passed to salt-ssh
to ensure the script runs successfully
test ssh when --pre-flight is passed to salt-ssh to ensure the script runs successfully
"""
# make sure we previously ran a command so the thin dir exists
ret = salt_ssh_cli.run("test.ping")
@ -85,10 +92,7 @@ def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster):
assert not pathlib.Path(_create_roster["test_script"]).exists()
# Now ensure
ret = salt_ssh_cli.run(
"test.ping",
"--pre-flight",
)
ret = salt_ssh_cli.run("test.ping", "--pre-flight")
assert ret.returncode == 0
assert pathlib.Path(_create_roster["test_script"]).exists()
@ -115,18 +119,15 @@ def test_ssh_run_pre_flight_args(salt_ssh_cli, _create_roster):
assert ret.returncode == 0
assert test_script_1.exists()
assert test_script_2.exists()
pathlib.Path(test_script_1).unlink()
pathlib.Path(test_script_2).unlink()
test_script_1.unlink()
test_script_2.unlink()
ret = salt_ssh_cli.run("test.ping")
assert ret.returncode == 0
assert not test_script_1.exists()
assert not test_script_2.exists()
ret = salt_ssh_cli.run(
"test.ping",
"--pre-flight",
)
ret = salt_ssh_cli.run("test.ping", "--pre-flight")
assert ret.returncode == 0
assert test_script_1.exists()
assert test_script_2.exists()
@ -166,17 +167,14 @@ def test_ssh_run_pre_flight_args_prevent_injection(
test_script_2.unlink()
assert not injected_file.is_file()
ret = salt_ssh_cli.run(
"test.ping",
"--pre-flight",
)
ret = salt_ssh_cli.run("test.ping", "--pre-flight")
assert ret.returncode == 0
assert test_script_1.exists()
assert test_script_2.exists()
assert not pathlib.Path(
injected_file
).is_file(), "File injection suceeded. This shouldn't happend"
assert (
not injected_file.is_file()
), "File injection suceeded. This shouldn't happend"
@pytest.mark.flaky(max_runs=4)
@ -189,10 +187,7 @@ def test_ssh_run_pre_flight_failure(salt_ssh_cli, _create_roster):
with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp_:
fp_.write("exit 2")
ret = salt_ssh_cli.run(
"test.ping",
"--pre-flight",
)
ret = salt_ssh_cli.run("test.ping", "--pre-flight")
assert ret.data["retcode"] == 2
@ -255,7 +250,7 @@ def test_ssh_pre_flight_perms(salt_ssh_cli, caplog, _create_roster, account):
x=1
while [ $x -le 200000 ]; do
SCRIPT=`bash {str(tmp_preflight)} 2> /dev/null; echo $?`
if [ ${{SCRIPT}} == 0 ]; then
if [ ${{SCRIPT}} -eq 0 ]; then
break
fi
x=$(( $x + 1 ))
@ -301,10 +296,7 @@ def test_ssh_run_pre_flight_target_file_perms(salt_ssh_cli, _create_roster, tmp_
"""
)
ret = salt_ssh_cli.run(
"test.ping",
"--pre-flight",
)
ret = salt_ssh_cli.run("test.ping", "--pre-flight")
assert ret.returncode == 0
with salt.utils.files.fopen(perms_file) as fp:
data = fp.read()

View file

@ -23,6 +23,7 @@ def test_saltcheck_run_test(salt_ssh_cli):
assert ret.data["status"] == "Pass"
@pytest.mark.skip_on_aarch64
def test_saltcheck_state(salt_ssh_cli):
"""
saltcheck.run_state_tests

View file

@ -666,6 +666,7 @@ def test_privkey_new_with_prereq(x509_salt_call_cli, tmp_path):
assert not _belongs_to(cert_new, pk_cur)
@pytest.mark.skip_on_fips_enabled_platform
@pytest.mark.usefixtures("privkey_new_pkcs12")
@pytest.mark.skipif(
CRYPTOGRAPHY_VERSION[0] < 36,

View file

@ -29,7 +29,7 @@ pytestmark = [
def _get_test_versions_ids(value):
return "SaltMinion~={}".format(value)
return f"SaltMinion~={value}"
@pytest.fixture(
@ -41,13 +41,13 @@ def compat_salt_version(request):
@pytest.fixture(scope="module")
def minion_image_name(compat_salt_version):
return "salt-{}".format(compat_salt_version)
return f"salt-{compat_salt_version}"
@pytest.fixture(scope="function")
def minion_id(compat_salt_version):
return random_string(
"salt-{}-".format(compat_salt_version),
f"salt-{compat_salt_version}-",
uppercase=False,
)
@ -70,7 +70,10 @@ def salt_minion(
config_overrides = {
"master": salt_master.config["interface"],
"user": False,
"pytest-minion": {"log": {"host": host_docker_network_ip_address}},
"pytest-minion": {
"log": {"host": host_docker_network_ip_address},
"returner_address": {"host": host_docker_network_ip_address},
},
# We also want to scrutinize the key acceptance
"open_mode": False,
}

View file

@ -5,17 +5,10 @@ import logging
import shutil
import pytest
from saltfactories.daemons.container import Container
from saltfactories.utils import random_string
import salt.utils.path
from tests.support.sminion import create_sminion
docker = pytest.importorskip("docker")
# pylint: disable=3rd-party-module-not-gated,no-name-in-module
from docker.errors import DockerException # isort:skip
# pylint: enable=3rd-party-module-not-gated,no-name-in-module
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_binaries_missing("docker"),
@ -26,36 +19,18 @@ log = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def docker_client():
if docker is None:
pytest.skip("The docker python library is not available")
if salt.utils.path.which("docker") is None:
pytest.skip("The docker binary is not available")
try:
client = docker.from_env()
connectable = Container.client_connectable(client)
if connectable is not True: # pragma: no cover
pytest.skip(connectable)
return client
except DockerException:
pytest.skip("Failed to get a connection to docker running on the system")
def docker_network_name():
return random_string("salt-perf-", uppercase=False)
@pytest.fixture(scope="session")
def network():
return "salt-performance"
@pytest.fixture(scope="session")
def host_docker_network_ip_address(network):
def host_docker_network_ip_address(docker_network_name):
sminion = create_sminion()
network_name = network
network_subnet = "10.0.21.0/24"
network_gateway = "10.0.21.1"
try:
ret = sminion.states.docker_network.present(
network_name,
docker_network_name,
driver="bridge",
ipam_pools=[{"subnet": network_subnet, "gateway": network_gateway}],
)
@ -66,7 +41,7 @@ def host_docker_network_ip_address(network):
pytest.skip(f"Failed to create docker network: {ret}")
yield network_gateway
finally:
sminion.states.docker_network.absent(network_name)
sminion.states.docker_network.absent(docker_network_name)
@pytest.fixture(scope="session")

View file

@ -1,40 +1,22 @@
import logging
import os
import shutil
import time
import sys
import pytest
from pytestshellutils.utils import ports
from saltfactories.daemons import master
from saltfactories.daemons.container import SaltDaemon, SaltMinion
from saltfactories.daemons.container import SaltMaster, SaltMinion
from saltfactories.utils import random_string
from salt.version import SaltVersionsInfo, __version__
from salt.version import SaltVersionsInfo
from tests.conftest import CODE_DIR
pytestmark = [pytest.mark.skip_if_binaries_missing("docker")]
log = logging.getLogger(__name__)
class ContainerMaster(SaltDaemon, master.SaltMaster):
"""
Containerized salt master that has no check events
"""
def get_display_name(self):
return master.SaltMaster.get_display_name(self)
def get_check_events(self):
return []
class ContainerMinion(SaltMinion):
"""
Containerized salt minion that has no check events
"""
def get_check_events(self):
return []
# ---------------------- Previous Version Setup ----------------------
pytestmark = [
pytest.mark.skip_on_photonos,
pytest.mark.skip_if_binaries_missing("docker"),
]
@pytest.fixture
@ -42,14 +24,32 @@ def prev_version():
return str(SaltVersionsInfo.previous_release().info[0])
@pytest.fixture
def prev_container_image(shell, prev_version):
container = f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}"
ret = shell.run("docker", "pull", container, check=False)
if ret.returncode:
pytest.skip(f"Failed to pull docker image '{container}':\n{ret}")
return container
@pytest.fixture
def curr_version():
return str(SaltVersionsInfo.current_release().info[0])
@pytest.fixture
def curr_container_image(shell):
container = "ghcr.io/saltstack/salt-ci-containers/salt:latest"
ret = shell.run("docker", "pull", container, check=False)
if ret.returncode:
pytest.skip(f"Failed to pull docker image '{container}':\n{ret}")
return container
@pytest.fixture
def prev_master_id():
return random_string("master-performance-prev-", uppercase=False)
return random_string("master-perf-prev-", uppercase=False)
@pytest.fixture
@ -57,10 +57,10 @@ def prev_master(
request,
salt_factories,
host_docker_network_ip_address,
network,
docker_network_name,
prev_version,
docker_client,
prev_master_id,
prev_container_image,
):
root_dir = salt_factories.get_root_dir_for_daemon(prev_master_id)
conf_dir = root_dir / "conf"
@ -69,35 +69,36 @@ def prev_master(
config_defaults = {
"root_dir": str(root_dir),
"transport": request.config.getoption("--transport"),
"user": False,
"user": "root",
}
publish_port = ports.get_unused_localhost_port()
ret_port = ports.get_unused_localhost_port()
config_overrides = {
"open_mode": True,
"interface": "0.0.0.0",
"publish_port": publish_port,
"ret_port": ret_port,
"publish_port": ports.get_unused_localhost_port(),
"ret_port": ports.get_unused_localhost_port(),
"log_level_logfile": "quiet",
"pytest-master": {
"log": {"host": host_docker_network_ip_address},
"returner_address": {"host": host_docker_network_ip_address},
},
}
factory = salt_factories.salt_master_daemon(
prev_master_id,
name=prev_master_id,
defaults=config_defaults,
overrides=config_overrides,
factory_class=ContainerMaster,
image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version),
factory_class=SaltMaster,
base_script_args=["--log-level=debug"],
image=prev_container_image,
container_run_kwargs={
"network": network,
"network": docker_network_name,
"hostname": prev_master_id,
},
docker_client=docker_client,
name=prev_master_id,
start_timeout=120,
max_start_attempts=1,
max_start_attempts=3,
pull_before_start=False,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
)
with factory.started():
@ -122,7 +123,7 @@ def prev_salt_run_cli(prev_master):
@pytest.fixture
def prev_minion_id():
return random_string(
"minion-performance-prev-",
"minion-perf-prev-",
uppercase=False,
)
@ -131,34 +132,38 @@ def prev_minion_id():
def prev_minion(
prev_minion_id,
prev_master,
docker_client,
prev_version,
host_docker_network_ip_address,
network,
prev_master_id,
docker_network_name,
prev_container_image,
):
config_overrides = {
"master": prev_master_id,
"user": False,
"pytest-minion": {"log": {"host": host_docker_network_ip_address}},
"master": prev_master.id,
"open_mode": True,
"user": "root",
"pytest-minion": {
"log": {"host": host_docker_network_ip_address},
"returner_address": {"host": host_docker_network_ip_address},
},
}
factory = prev_master.salt_minion_daemon(
prev_minion_id,
overrides=config_overrides,
factory_class=ContainerMinion,
# SaltMinion kwargs
name=prev_minion_id,
image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version),
docker_client=docker_client,
start_timeout=120,
pull_before_start=False,
skip_if_docker_client_not_connectable=True,
overrides=config_overrides,
factory_class=SaltMinion,
base_script_args=["--log-level=debug"],
image=prev_container_image,
container_run_kwargs={
"network": network,
"network": docker_network_name,
"hostname": prev_minion_id,
},
max_start_attempts=1,
start_timeout=120,
max_start_attempts=3,
pull_before_start=False,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
)
factory.python_executable = "python3"
factory.after_terminate(
pytest.helpers.remove_stale_minion_key, prev_master, factory.id
)
@ -172,21 +177,38 @@ def prev_sls(sls_contents, state_tree, tmp_path):
location = tmp_path / "prev" / "testfile"
location.parent.mkdir()
with pytest.helpers.temp_file(
"{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree
f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree
):
yield sls_name
# ---------------------- Current Version Setup ----------------------
def _install_salt_in_container(container):
ret = container.run(
"python3",
"-c",
"import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))",
)
assert ret.returncode == 0
if not ret.stdout:
requirements_py_version = "{}.{}".format(*sys.version_info)
else:
requirements_py_version = ret.stdout.strip()
def _install_local_salt(factory):
factory.run("pip install /saltcode")
ret = container.run(
"python3",
"-m",
"pip",
"install",
f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt",
"/salt",
)
log.debug("Install Salt in the container: %s", ret)
assert ret.returncode == 0
@pytest.fixture
def curr_master_id():
return random_string("master-performance-", uppercase=False)
return random_string("master-perf-curr-", uppercase=False)
@pytest.fixture
@ -194,9 +216,9 @@ def curr_master(
request,
salt_factories,
host_docker_network_ip_address,
network,
docker_client,
docker_network_name,
curr_master_id,
curr_container_image,
):
root_dir = salt_factories.get_root_dir_for_daemon(curr_master_id)
conf_dir = root_dir / "conf"
@ -205,43 +227,46 @@ def curr_master(
config_defaults = {
"root_dir": str(root_dir),
"transport": request.config.getoption("--transport"),
"user": False,
"user": "root",
}
publish_port = ports.get_unused_localhost_port()
ret_port = ports.get_unused_localhost_port()
config_overrides = {
"open_mode": True,
"interface": "0.0.0.0",
"publish_port": publish_port,
"ret_port": ret_port,
"log_level_logfile": "quiet",
"pytest-master": {
"log": {"host": host_docker_network_ip_address},
"returner_address": {"host": host_docker_network_ip_address},
},
}
factory = salt_factories.salt_master_daemon(
curr_master_id,
name=curr_master_id,
defaults=config_defaults,
overrides=config_overrides,
factory_class=ContainerMaster,
image="ghcr.io/saltstack/salt-ci-containers/salt:current",
factory_class=SaltMaster,
base_script_args=["--log-level=debug"],
image=curr_container_image,
container_run_kwargs={
"network": network,
"network": docker_network_name,
"hostname": curr_master_id,
# Bind the current code to a directory for pip installing
"volumes": {
os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"}
str(CODE_DIR): {"bind": "/salt", "mode": "z"},
},
},
docker_client=docker_client,
name=curr_master_id,
start_timeout=120,
max_start_attempts=1,
max_start_attempts=3,
pull_before_start=False,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
)
factory.before_start(_install_local_salt, factory)
factory.before_start(_install_salt_in_container, factory)
with factory.started():
yield factory
@ -264,7 +289,7 @@ def curr_salt_key_cli(curr_master):
@pytest.fixture
def curr_minion_id():
return random_string(
"minion-performance-curr-",
"minion-perf-curr-",
uppercase=False,
)
@ -273,38 +298,41 @@ def curr_minion_id():
def curr_minion(
curr_minion_id,
curr_master,
docker_client,
host_docker_network_ip_address,
network,
curr_master_id,
docker_network_name,
curr_container_image,
):
config_overrides = {
"master": curr_master_id,
"user": False,
"pytest-minion": {"log": {"host": host_docker_network_ip_address}},
"master": curr_master.id,
"open_mode": True,
"user": "root",
"pytest-minion": {
"log": {"host": host_docker_network_ip_address},
"returner_address": {"host": host_docker_network_ip_address},
},
}
factory = curr_master.salt_minion_daemon(
curr_minion_id,
overrides=config_overrides,
factory_class=ContainerMinion,
# SaltMinion kwargs
name=curr_minion_id,
image="ghcr.io/saltstack/salt-ci-containers/salt:current",
docker_client=docker_client,
start_timeout=120,
pull_before_start=False,
skip_if_docker_client_not_connectable=True,
overrides=config_overrides,
factory_class=SaltMinion,
base_script_args=["--log-level=debug"],
image=curr_container_image,
container_run_kwargs={
"network": network,
"network": docker_network_name,
"hostname": curr_minion_id,
# Bind the current code to a directory for pip installing
"volumes": {
os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"}
str(CODE_DIR): {"bind": "/salt", "mode": "z"},
},
},
max_start_attempts=1,
start_timeout=120,
max_start_attempts=3,
pull_before_start=False,
skip_on_pull_failure=True,
skip_if_docker_client_not_connectable=True,
)
factory.before_start(_install_local_salt, factory)
factory.before_start(_install_salt_in_container, factory)
factory.after_terminate(
pytest.helpers.remove_stale_minion_key, curr_master, factory.id
)
@ -318,25 +346,25 @@ def curr_sls(sls_contents, state_tree, tmp_path):
location = tmp_path / "curr" / "testfile"
location.parent.mkdir()
with pytest.helpers.temp_file(
"{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree
f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree
):
yield sls_name
def _wait_for_stdout(expected, func, *args, timeout=120, **kwargs):
start = time.time()
while time.time() < start + timeout:
ret = func(*args, **kwargs)
if ret and ret.stdout and expected in ret.stdout:
break
time.sleep(1)
else:
pytest.skip(
f"Skipping test, one or more daemons failed to start: {expected} not found in {ret}"
)
@pytest.fixture
def perf_state_name(state_tree, curr_master, prev_master):
# Copy all of the needed files to both master file roots directories
subdir = random_string("perf-state-")
shutil.copytree(
state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir)
)
shutil.copytree(
state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir)
)
return subdir
@pytest.mark.flaky(max_runs=4)
def test_performance(
prev_salt_cli,
prev_minion,
@ -353,48 +381,8 @@ def test_performance(
prev_sls,
curr_sls,
curr_version,
perf_state_name,
):
# Copy all of the needed files to both master file roots directories
subdir = random_string("performance-")
shutil.copytree(
state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir)
)
shutil.copytree(
state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir)
)
# Wait for the old master and minion to start
_wait_for_stdout(
prev_version, prev_master.run, *prev_salt_run_cli.cmdline("--version")
)
salt_key_cmd = [
comp
for comp in prev_salt_key_cli.cmdline("-Ay")
if not comp.startswith("--log-level")
]
_wait_for_stdout(prev_minion.id, prev_master.run, *salt_key_cmd)
_wait_for_stdout(
"Salt: {}".format(prev_version),
prev_master.run,
*prev_salt_cli.cmdline("test.versions", minion_tgt=prev_minion.id),
)
# Wait for the new master and minion to start
_wait_for_stdout(
curr_version, curr_master.run, *curr_salt_run_cli.cmdline("--version")
)
curr_key_cmd = [
comp
for comp in curr_salt_key_cli.cmdline("-Ay")
if not comp.startswith("--log-level")
]
_wait_for_stdout(curr_minion.id, curr_master.run, *curr_key_cmd)
_wait_for_stdout(
"Salt: {}".format(curr_version),
curr_master.run,
*curr_salt_cli.cmdline("test.versions", minion_tgt=curr_minion.id),
)
# Let's now apply the states
applies = os.environ.get("SALT_PERFORMANCE_TEST_APPLIES", 3)
@ -423,7 +411,9 @@ def test_performance(
for _ in range(applies):
prev_state_ret = prev_master.run(
*prev_salt_cli.cmdline(
"state.apply", f"{subdir}.{prev_sls}", minion_tgt=prev_minion.id
"state.apply",
f"{perf_state_name}.{prev_sls}",
minion_tgt=prev_minion.id,
)
)
prev_duration += _gather_durations(prev_state_ret, prev_minion.id)
@ -431,7 +421,9 @@ def test_performance(
for _ in range(applies):
curr_state_ret = curr_master.run(
*curr_salt_cli.cmdline(
"state.apply", f"{subdir}.{curr_sls}", minion_tgt=curr_minion.id
"state.apply",
f"{perf_state_name}.{curr_sls}",
minion_tgt=curr_minion.id,
)
)
curr_duration += _gather_durations(curr_state_ret, curr_minion.id)

View file

@ -19,17 +19,13 @@ log = logging.getLogger(__name__)
@pytest.fixture
def opts(tmp_path):
return {
"argv": [
"ssh.set_auth_key",
"root",
"hobn+amNAXSBTiOXEqlBjGB...rsa root@master",
],
"__role": "master",
"cachedir": str(tmp_path),
"extension_modules": str(tmp_path / "extmods"),
}
def opts(master_opts):
master_opts["argv"] = [
"ssh.set_auth_key",
"root",
"hobn+amNAXSBTiOXEqlBjGB...rsa root@master",
]
return master_opts
@pytest.fixture
@ -411,6 +407,10 @@ def test_run_ssh_pre_flight_no_connect(opts, target, tmp_path, caplog):
with caplog.at_level(logging.TRACE):
with patch_send, patch_exec_cmd, patch_tmp:
ret = single.run_ssh_pre_flight()
# Flush the logging handler just to be sure
caplog.handler.flush()
assert "Copying the pre flight script" in caplog.text
assert "Could not copy the pre flight script to target" in caplog.text
assert ret == ret_send
@ -503,6 +503,9 @@ def test_run_ssh_pre_flight_connect(opts, target, tmp_path, caplog):
with patch_send, patch_exec_cmd, patch_tmp:
ret = single.run_ssh_pre_flight()
# Flush the logging handler just to be sure
caplog.handler.flush()
assert "Executing the pre flight script on target" in caplog.text
assert ret == ret_exec_cmd
assert send_mock.call_args_list[0][0][0] == tmp_file

View file

@ -0,0 +1,82 @@
import logging
import pytest
from saltfactories.utils.tempfiles import temp_directory
import salt.client.ssh.__init__ as dunder_ssh
from salt.exceptions import SaltClientError, SaltSystemExit
from tests.support.mock import MagicMock, patch
pytestmark = [pytest.mark.skip_unless_on_linux(reason="Test ssh only run on Linux")]
log = logging.getLogger(__name__)
def test_salt_refs():
data_strg_cats = "cats"
ret = dunder_ssh.salt_refs(data_strg_cats)
assert ret == []
data_strg_proto = "salt://test_salt_ref"
ret = dunder_ssh.salt_refs(data_strg_proto)
assert ret == [data_strg_proto]
data_list_no_proto = ["cats"]
ret = dunder_ssh.salt_refs(data_list_no_proto)
assert ret == []
data_list_proto = ["salt://test_salt_ref1", "salt://test_salt_ref2", "cats"]
ret = dunder_ssh.salt_refs(data_list_proto)
assert ret == ["salt://test_salt_ref1", "salt://test_salt_ref2"]
def test_convert_args():
test_args = [
"arg1",
{"key1": "value1", "key2": "value2", "__kwarg__": "kwords"},
"dog1",
]
expected = ["arg1", "key1=value1", "key2=value2", "dog1"]
ret = dunder_ssh._convert_args(test_args)
assert ret == expected
def test_ssh_class():
with temp_directory() as temp_dir:
assert temp_dir.is_dir()
opts = {
"sock_dir": temp_dir,
"regen_thin": False,
"__master_opts__": {"pki_dir": "pki"},
"selected_target_option": None,
"tgt": "*",
"tgt_type": "glob",
"fileserver_backend": ["roots"],
"cachedir": "/tmp",
"thin_extra_mods": "",
"ssh_ext_alternatives": None,
}
with patch("salt.utils.path.which", return_value=""), pytest.raises(
SaltSystemExit
) as err:
test_ssh = dunder_ssh.SSH(opts)
assert (
"salt-ssh could not be run because it could not generate keys."
in str(err.value)
)
with patch("salt.utils.path.which", return_value="/usr/bin/ssh"), patch(
"os.path.isfile", return_value=False
), patch(
"salt.client.ssh.shell.gen_key", MagicMock(side_effect=OSError())
), pytest.raises(
SaltClientError
) as err:
test_ssh = dunder_ssh.SSH(opts)
assert (
"salt-ssh could not be run because it could not generate keys."
in err.value
)

View file

@ -0,0 +1,275 @@
import pytest
import salt.client
from salt.exceptions import SaltInvocationError
@pytest.fixture
def local_client():
return salt.client.get_local_client()
def test_get_local_client(local_client):
"""
Test that a local client is created
"""
assert isinstance(local_client, salt.client.LocalClient)
def test_get_local_client_mopts(master_opts):
master_opts["rest_cherrypy"] = {"port": 8000}
local_client = salt.client.get_local_client(mopts=master_opts)
assert isinstance(local_client, salt.client.LocalClient)
assert local_client.opts == master_opts
@pytest.mark.parametrize(
"val, expected",
((None, 5), (7, 7), ("9", 9), ("eleven", 5), (["13"], 5)),
)
def test_local_client_get_timeout(local_client, val, expected):
assert local_client._get_timeout(timeout=val) == expected
@pytest.mark.parametrize(
"val, expected",
(
("group1", ["L@spongebob,patrick"]),
("group2", ["G@os:squidward"]),
("group3", ["(", "G@os:plankton", "and", "(", "L@spongebob,patrick", ")", ")"]),
),
)
def test_resolve_nodegroup(master_opts, val, expected):
master_opts["nodegroups"] = {
"group1": "L@spongebob,patrick",
"group2": "G@os:squidward",
"group3": "G@os:plankton and N@group1",
}
local_client = salt.client.get_local_client(mopts=master_opts)
assert local_client._resolve_nodegroup(val) == expected
def test_resolve_nodegroup_error(master_opts):
master_opts["nodegroups"] = {
"group1": "L@spongebob,patrick",
"group2": "G@os:squidward",
"group3": "G@os:plankton and N@group1",
}
local_client = salt.client.get_local_client(mopts=master_opts)
with pytest.raises(SaltInvocationError):
local_client._resolve_nodegroup("missing")
def test_prep_pub(local_client):
result = local_client._prep_pub(
tgt="*",
fun="test.ping",
arg="",
tgt_type="glob",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "*",
"tgt_type": "glob",
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_kwargs(local_client):
result = local_client._prep_pub(
tgt="*",
fun="test.ping",
arg="",
tgt_type="glob",
ret="",
jid="123",
timeout=7,
some_kwarg="spongebob",
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "*",
"tgt_type": "glob",
"user": local_client.salt_user,
"kwargs": {
"some_kwarg": "spongebob",
},
}
assert result == expected
def test_prep_pub_order_masters(master_opts):
master_opts["order_masters"] = True
local_client = salt.client.get_local_client(mopts=master_opts)
result = local_client._prep_pub(
tgt="*",
fun="test.ping",
arg="",
tgt_type="glob",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "*",
"tgt_type": "glob",
"to": 7,
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_nodegroup(master_opts):
master_opts["nodegroups"] = {
"group1": "L@spongebob,patrick",
"group2": "G@os:squidward",
"group3": "G@os:plankton and N@group1",
}
local_client = salt.client.get_local_client(mopts=master_opts)
result = local_client._prep_pub(
tgt="group1",
fun="test.ping",
arg="",
tgt_type="nodegroup",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "L@spongebob,patrick",
"tgt_type": "compound",
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_compound(local_client):
result = local_client._prep_pub(
tgt="spongebob,patrick",
fun="test.ping",
arg="",
tgt_type="compound",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "spongebob,patrick",
"tgt_type": "compound",
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_compound_nodegroup(master_opts):
master_opts["nodegroups"] = {
"group1": "L@spongebob,patrick",
"group2": "G@os:squidward",
"group3": "G@os:plankton and N@group1",
}
local_client = salt.client.get_local_client(mopts=master_opts)
result = local_client._prep_pub(
tgt="N@group1",
fun="test.ping",
arg="",
tgt_type="compound",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "",
"tgt": "L@spongebob,patrick",
"tgt_type": "compound",
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_ext_job_cache(master_opts):
master_opts["ext_job_cache"] = "mysql"
local_client = salt.client.get_local_client(mopts=master_opts)
result = local_client._prep_pub(
tgt="spongebob,patrick",
fun="test.ping",
arg="",
tgt_type="glob",
ret="",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "mysql",
"tgt": "spongebob,patrick",
"tgt_type": "glob",
"user": local_client.salt_user,
}
assert result == expected
def test_prep_pub_ext_job_cache_existing(master_opts):
master_opts["ext_job_cache"] = "mysql"
local_client = salt.client.get_local_client(mopts=master_opts)
result = local_client._prep_pub(
tgt="spongebob,patrick",
fun="test.ping",
arg="",
tgt_type="glob",
ret="postgres",
jid="123",
timeout=7,
)
expected = {
"arg": "",
"cmd": "publish",
"fun": "test.ping",
"jid": "123",
"key": "",
"ret": "postgres,mysql",
"tgt": "spongebob,patrick",
"tgt_type": "glob",
"user": local_client.salt_user,
}
assert result == expected

View file

@ -192,6 +192,7 @@ def test_vm_config_merger_nooverridevalue():
assert expected == vm
@pytest.mark.skip_on_fips_enabled_platform
def test_cloud_run_profile_create_returns_boolean(master_config):
master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}}

View file

@ -99,6 +99,8 @@ def salt_cloud_config_file(salt_master_factory):
return os.path.join(salt_master_factory.config_dir, "cloud")
# The cloud map merge uses python's multiprocessing manager which authenticates using HMAC and MD5
@pytest.mark.skip_on_fips_enabled_platform
def test_cloud_map_merge_conf(salt_cloud_config_file, grains):
"""
Ensure that nested values can be selectivly overridden in a map file

View file

@ -61,6 +61,7 @@ def test_base64_decodestring(the_string, the_string_base64):
assert hashutil.base64_decodestring(the_string_base64) == the_string
@pytest.mark.skip_on_fips_enabled_platform
def test_md5_digest(the_string, the_string_md5):
assert hashutil.md5_digest(the_string) == the_string_md5

File diff suppressed because it is too large Load diff

View file

@ -2,6 +2,7 @@ import datetime
import re
import pytest
from pytestskipmarkers.utils import platform
import salt.modules.config as configmod
import salt.modules.postgres as postgres
@ -117,6 +118,8 @@ def idfn(val):
ids=idfn,
)
def test_verify_password(role, password, verifier, method, result):
if platform.is_fips_enabled() and (method == "md5" or verifier == md5_pw):
pytest.skip("Test cannot run on a FIPS enabled platform")
assert postgres._verify_password(role, password, verifier, method) == result
@ -971,6 +974,7 @@ def test_user_update3():
)
@pytest.mark.skip_on_fips_enabled_platform
def test_user_update_encrypted_passwd():
with patch(
"salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0})
@ -1226,6 +1230,7 @@ def test_create_extension_newerthan():
assert not postgres.create_extension("foo", ext_version="a", schema="b")
@pytest.mark.skip_on_fips_enabled_platform
def test_encrypt_passwords():
assert postgres._maybe_encrypt_password("foo", "bar", False) == "bar"
assert (

View file

@ -1,3 +1,5 @@
import pathlib
import pytest
import salt.modules.saltutil as saltutil
@ -7,8 +9,14 @@ from tests.support.mock import sentinel as s
@pytest.fixture
def configure_loader_modules():
return {saltutil: {"__opts__": {"file_client": "local"}}}
def configure_loader_modules(minion_opts):
minion_opts["file_client"] = "local"
minion_opts["master_uri"] = "tcp://127.0.0.1:4505"
return {
saltutil: {
"__opts__": minion_opts,
}
}
def test_exec_kwargs():
@ -90,12 +98,24 @@ def test_refresh_grains_default_clean_pillar_cache():
refresh_pillar.assert_called_with(clean_cache=False)
def test_refresh_grains_default_clean_pillar_cache_with_refresh_false():
with patch("salt.modules.saltutil.refresh_modules") as refresh_modules:
saltutil.refresh_grains(refresh_pillar=False)
refresh_modules.assert_called()
def test_refresh_grains_clean_pillar_cache():
with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar:
saltutil.refresh_grains(clean_pillar_cache=True)
refresh_pillar.assert_called_with(clean_cache=True)
def test_refresh_grains_clean_pillar_cache_with_refresh_false():
with patch("salt.modules.saltutil.refresh_modules") as refresh_modules:
saltutil.refresh_grains(clean_pillar_cache=True, refresh_pillar=False)
refresh_modules.assert_called()
def test_sync_grains_default_clean_pillar_cache():
with patch("salt.modules.saltutil._sync"):
with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar:
@ -136,3 +156,42 @@ def test_sync_all_clean_pillar_cache():
with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar:
saltutil.sync_all(clean_pillar_cache=True)
refresh_pillar.assert_called_with(clean_cache=True)
def test_list_extmods(salt_call_cli, minion_opts):
pathlib.Path(minion_opts["cachedir"], "extmods", "dummydir").mkdir(
parents=True, exist_ok=True
)
ret = saltutil.list_extmods()
assert "dummydir" in ret
assert ret["dummydir"] == []
def test_refresh_beacons():
ret = saltutil.refresh_beacons()
assert ret is False
def test_refresh_matchers():
ret = saltutil.refresh_matchers()
assert ret is False
def test_refresh_modules_async_false():
kwargs = {"async": False}
ret = saltutil.refresh_modules(**kwargs)
assert ret is False
def test_clear_job_cache(salt_call_cli, minion_opts):
pathlib.Path(minion_opts["cachedir"], "minion_jobs", "dummydir").mkdir(
parents=True, exist_ok=True
)
ret = saltutil.clear_job_cache(hours=1)
assert ret is True
@pytest.mark.destructive_test
def test_regen_keys(salt_call_cli, minion_opts):
pathlib.Path(minion_opts["pki_dir"], "dummydir").mkdir(parents=True, exist_ok=True)
saltutil.regen_keys()

View file

@ -1,3 +1,5 @@
import re
import pytest
import salt.modules.selinux as selinux
@ -376,3 +378,35 @@ SELINUXTYPE=targeted
for line in writes:
if line.startswith("SELINUX="):
assert line == "SELINUX=disabled"
@pytest.mark.parametrize(
"name,sel_type",
(
("/srv/ssl/ldap/.*[.]key", "slapd_cert_t"),
("/srv/ssl/ldap(/.*[.](pem|crt))?", "cert_t"),
),
)
def test_selinux_add_policy_regex(name, sel_type):
"""
Test adding policy with regex components parsing the stdout response of restorecon used in fcontext_policy_applied, new style.
"""
mock_cmd_shell = MagicMock(return_value={"retcode": 0})
mock_cmd_run_all = MagicMock(return_value={"retcode": 0})
with patch.dict(selinux.__salt__, {"cmd.shell": mock_cmd_shell}), patch.dict(
selinux.__salt__, {"cmd.run_all": mock_cmd_run_all}
):
selinux.fcontext_add_policy(name, sel_type=sel_type)
filespec = re.escape(name)
expected_cmd_shell = f"semanage fcontext -l | egrep '{filespec}'"
mock_cmd_shell.assert_called_once_with(
expected_cmd_shell,
ignore_retcode=True,
)
expected_cmd_run_all = (
f"semanage fcontext --modify --type {sel_type} {filespec}"
)
mock_cmd_run_all.assert_called_once_with(
expected_cmd_run_all,
)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,410 @@
import socket
import pytest
import salt.modules.cmdmod
import salt.modules.win_file
import salt.modules.win_lgpo as win_lgpo
from salt.exceptions import CommandExecutionError
from tests.support.mock import patch
try:
import win32security as ws
HAS_WIN32 = True
except ImportError:
HAS_WIN32 = False
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.slow_test,
pytest.mark.skipif(not HAS_WIN32, reason="Failed to import win32security"),
]
@pytest.fixture
def configure_loader_modules():
return {
win_lgpo: {
"__salt__": {
"cmd.run": salt.modules.cmdmod.run,
"file.file_exists": salt.modules.win_file.file_exists,
"file.remove": salt.modules.win_file.remove,
},
},
}
@pytest.fixture(scope="module")
def pol_info():
return win_lgpo._policy_info()
@pytest.mark.parametrize(
"val, expected",
(
(0, False),
(1, True),
("", False),
("text", True),
([], False),
([1, 2, 3], True),
),
)
def test_notEmpty(pol_info, val, expected):
assert pol_info._notEmpty(val) is expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
(0, 0),
(86400, 1),
),
)
def test_seconds_to_days(pol_info, val, expected):
assert pol_info._seconds_to_days(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
(0, 0),
(1, 86400),
),
)
def test_days_to_seconds(pol_info, val, expected):
assert pol_info._days_to_seconds(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
(0, 0),
(60, 1),
),
)
def test_seconds_to_minutes(pol_info, val, expected):
assert pol_info._seconds_to_minutes(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
(0, 0),
(1, 60),
),
)
def test_minutes_to_seconds(pol_info, val, expected):
assert pol_info._minutes_to_seconds(val) == expected
def test_strip_quotes(pol_info):
assert pol_info._strip_quotes('"spongebob"') == "spongebob"
def test_add_quotes(pol_info):
assert pol_info._add_quotes("squarepants") == '"squarepants"'
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
(chr(0), "Disabled"),
(chr(1), "Enabled"),
(chr(2), "Invalid Value: {!r}".format(chr(2))),
("patrick", "Invalid Value"),
),
)
def test_binary_enable_zero_disable_one_conversion(pol_info, val, expected):
assert pol_info._binary_enable_zero_disable_one_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, None),
("Disabled", chr(0)),
("Enabled", chr(1)),
("Junk", None),
),
)
def test_binary_enable_zero_disable_one_reverse_conversion(pol_info, val, expected):
assert pol_info._binary_enable_zero_disable_one_reverse_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
("0", "Administrators"),
(0, "Administrators"),
("", "Administrators"),
("1", "Administrators and Power Users"),
(1, "Administrators and Power Users"),
("2", "Administrators and Interactive Users"),
(2, "Administrators and Interactive Users"),
(3, "Not Defined"),
),
)
def test_dasd_conversion(pol_info, val, expected):
assert pol_info._dasd_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
("Administrators", "0"),
("Administrators and Power Users", "1"),
("Administrators and Interactive Users", "2"),
("Not Defined", "9999"),
("Plankton", "Invalid Value"),
),
)
def test_dasd_reverse_conversion(pol_info, val, expected):
assert pol_info._dasd_reverse_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
("Not Defined", True),
(None, False),
(1, True),
(3, False),
("spongebob", False),
),
)
def test_in_range_inclusive(pol_info, val, expected):
assert pol_info._in_range_inclusive(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
("3,1,2", "Not Defined"),
("3,0", "Silently Succeed"),
("3,1", "Warn but allow installation"),
("3,2", "Do not allow installation"),
("3,Not Defined", "Not Defined"),
("3,spongebob", "Invalid Value"),
),
)
def test_driver_signing_reg_conversion(pol_info, val, expected):
assert pol_info._driver_signing_reg_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Defined"),
("Silently Succeed", "3,0"),
("Warn but allow installation", f"3,{chr(1)}"),
("Do not allow installation", f"3,{chr(2)}"),
("spongebob", "Invalid Value"),
),
)
def test_driver_signing_reg_reverse_conversion(pol_info, val, expected):
assert pol_info._driver_signing_reg_reverse_conversion(val) == expected
# For the next 3 tests we can't use the parametrized decorator because the
# decorator is evaluated before the imports happen, so the HAS_WIN32 is ignored
# and the decorator tries to evaluate the win32security library on systems
# without pyWin32
def test_sidConversion_no_conversion(pol_info):
val = ws.ConvertStringSidToSid("S-1-5-0")
expected = ["S-1-5-0"]
assert pol_info._sidConversion([val]) == expected
def test_sidConversion_everyone(pol_info):
val = ws.ConvertStringSidToSid("S-1-1-0")
expected = ["Everyone"]
assert pol_info._sidConversion([val]) == expected
def test_sidConversion_administrator(pol_info):
val = ws.LookupAccountName("", "Administrator")[0]
expected = [f"{socket.gethostname()}\\Administrator"]
assert pol_info._sidConversion([val]) == expected
@pytest.mark.parametrize(
"val, expected",
(
(None, None),
("", ""),
),
)
def test_usernamesToSidObjects_empty_value(pol_info, val, expected):
assert pol_info._usernamesToSidObjects(val) == expected
def test_usernamesToSidObjects_string_list(pol_info):
val = "Administrator,Guest"
admin_sid = ws.LookupAccountName("", "Administrator")[0]
guest_sid = ws.LookupAccountName("", "Guest")[0]
expected = [admin_sid, guest_sid]
assert pol_info._usernamesToSidObjects(val) == expected
def test_usernamesToSidObjects_string_list_error(pol_info):
val = "spongebob,squarepants"
with pytest.raises(CommandExecutionError):
pol_info._usernamesToSidObjects(val)
@pytest.mark.parametrize(
"val, expected",
(
(None, "Not Configured"),
("None", "Not Configured"),
("true", "Run Windows PowerShell scripts first"),
("false", "Run Windows PowerShell scripts last"),
("spongebob", "Invalid Value"),
),
)
def test_powershell_script_order_conversion(pol_info, val, expected):
assert pol_info._powershell_script_order_conversion(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
("Not Configured", None),
("Run Windows PowerShell scripts first", "true"),
("Run Windows PowerShell scripts last", "false"),
("spongebob", "Invalid Value"),
),
)
def test_powershell_script_order_reverse_conversion(pol_info, val, expected):
assert pol_info._powershell_script_order_reverse_conversion(val) == expected
def test_dict_lookup(pol_info):
lookup = {
"spongebob": "squarepants",
"patrick": "squidward",
"plankton": "mr.crabs",
}
assert pol_info._dict_lookup("spongebob", lookup=lookup) == "squarepants"
assert (
pol_info._dict_lookup("squarepants", lookup=lookup, value_lookup=True)
== "spongebob"
)
assert pol_info._dict_lookup("homer", lookup=lookup) == "Invalid Value"
assert (
pol_info._dict_lookup("homer", lookup=lookup, value_lookup=True)
== "Invalid Value"
)
assert pol_info._dict_lookup("homer") == "Invalid Value"
def test_dict_lookup_bitwise_add(pol_info):
lookup = {
0: "spongebob",
1: "squarepants",
2: "patrick",
}
assert pol_info._dict_lookup_bitwise_add("Not Defined") is None
assert (
pol_info._dict_lookup_bitwise_add("not a list", value_lookup=True)
== "Invalid Value: Not a list"
)
assert (
pol_info._dict_lookup_bitwise_add([], value_lookup=True)
== "Invalid Value: No lookup passed"
)
assert (
pol_info._dict_lookup_bitwise_add("not an int") == "Invalid Value: Not an int"
)
assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == []
assert (
pol_info._dict_lookup_bitwise_add(
["spongebob", "squarepants"], lookup=lookup, value_lookup=True
)
== 1
)
assert pol_info._dict_lookup_bitwise_add(1, lookup=lookup) == ["squarepants"]
assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == []
assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup, test_zero=True) == [
"spongebob"
]
@pytest.mark.parametrize(
"val, expected",
(
(["list", "of", "items"], ["list", "of", "items"]),
("Not Defined", None),
("list,of,items", ["list", "of", "items"]),
(7, "Invalid Value"),
),
)
def test_multi_string_put_transform(pol_info, val, expected):
assert pol_info._multi_string_put_transform(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
(["list", "of", "items"], ["list", "of", "items"]),
(None, "Not Defined"),
("list,of,items", "Invalid Value"),
(7, "Invalid Value"),
),
)
def test_multi_string_get_transform(pol_info, val, expected):
assert pol_info._multi_string_get_transform(val) == expected
@pytest.mark.parametrize(
"val, expected",
(
("String Item", "String Item"),
("Not Defined", None),
(7, None),
),
)
def test_string_put_transform(pol_info, val, expected):
assert pol_info._string_put_transform(val) == expected
def test__virtual__(pol_info):
assert win_lgpo.__virtual__() == "lgpo"
with patch("salt.utils.platform.is_windows", return_value=False):
assert win_lgpo.__virtual__() == (
False,
"win_lgpo: Not a Windows System",
)
with patch.object(win_lgpo, "HAS_WINDOWS_MODULES", False):
assert win_lgpo.__virtual__() == (
False,
"win_lgpo: Required modules failed to load",
)
@pytest.mark.parametrize(
"val, expected",
(
(None, b"\x00\x00"),
("spongebob", b"s\x00p\x00o\x00n\x00g\x00e\x00b\x00o\x00b\x00\x00\x00"),
),
)
def test_encode_string(val, expected):
assert win_lgpo._encode_string(val) == expected
def test_encode_string_error():
with pytest.raises(TypeError):
win_lgpo._encode_string(1)

View file

@ -0,0 +1,168 @@
import pytest
import salt.modules.win_file as win_file
import salt.modules.win_lgpo as win_lgpo
import salt.utils.win_dacl as win_dacl
import salt.utils.win_lgpo_auditpol as auditpol
from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.destructive_test,
pytest.mark.slow_test,
]
@pytest.fixture
def configure_loader_modules(tmp_path):
cachedir = tmp_path / "__test_admx_policy_cache_dir"
cachedir.mkdir(parents=True, exist_ok=True)
return {
win_lgpo: {
"__opts__": {"cachedir": cachedir},
"__salt__": {
"file.copy": win_file.copy,
"file.file_exists": win_file.file_exists,
"file.makedirs": win_file.makedirs_,
"file.remove": win_file.remove,
"file.write": win_file.write,
},
"__utils__": {
"auditpol.get_auditpol_dump": auditpol.get_auditpol_dump,
"auditpol.set_setting": auditpol.set_setting,
},
},
auditpol: {
"__context__": {},
},
win_file: {
"__utils__": {
"dacl.set_perms": win_dacl.set_perms,
},
},
}
@pytest.fixture(scope="module")
def disable_legacy_auditing():
# To test and use these policy settings we have to set one of the policies to Enabled
# Location: Windows Settings -> Security Settings -> Local Policies -> Security Options
# Policy: "Audit: Force audit policy subcategory settings..."
# Short Name: SceNoApplyLegacyAuditPolicy
from tests.support.sminion import create_sminion
salt_minion = create_sminion()
test_setting = "Enabled"
pre_security_setting = salt_minion.functions.lgpo.get_policy(
policy_name="SceNoApplyLegacyAuditPolicy", policy_class="machine"
)
pre_audit_setting = salt_minion.functions.lgpo.get_policy(
policy_name="Audit User Account Management", policy_class="machine"
)
try:
if pre_security_setting != test_setting:
salt_minion.functions.lgpo.set_computer_policy(
name="SceNoApplyLegacyAuditPolicy", setting=test_setting
)
assert (
salt_minion.functions.lgpo.get_policy(
policy_name="SceNoApplyLegacyAuditPolicy", policy_class="machine"
)
== test_setting
)
yield
finally:
salt_minion.functions.lgpo.set_computer_policy(
name="SceNoApplyLegacyAuditPolicy", setting=pre_security_setting
)
salt_minion.functions.lgpo.set_computer_policy(
name="Audit User Account Management", setting=pre_audit_setting
)
@pytest.fixture
def clear_policy():
# Ensure the policy is not set
test_setting = "No Auditing"
win_lgpo.set_computer_policy(
name="Audit User Account Management", setting=test_setting
)
assert (
win_lgpo.get_policy(
policy_name="Audit User Account Management", policy_class="machine"
)
== test_setting
)
@pytest.fixture
def set_policy():
# Ensure the policy is set
test_setting = "Success"
win_lgpo.set_computer_policy(
name="Audit User Account Management", setting=test_setting
)
assert (
win_lgpo.get_policy(
policy_name="Audit User Account Management", policy_class="machine"
)
== test_setting
)
@pytest.mark.parametrize(
"setting, expected",
[
("No Auditing", "0"),
("Success", "1"),
("Failure", "2"),
("Success and Failure", "3"),
],
)
def test_get_value(setting, expected):
"""
Helper function to set an audit setting and assert that it was successful
"""
win_lgpo.set_computer_policy(name="Audit User Account Management", setting=setting)
# Clear the context so we're getting the actual settings from the machine
result = win_lgpo._get_advaudit_value("Audit User Account Management", refresh=True)
assert result == expected
def test_get_defaults():
patch_context = patch.dict(win_lgpo.__context__, {})
patch_salt = patch.dict(
win_lgpo.__utils__, {"auditpol.get_auditpol_dump": auditpol.get_auditpol_dump}
)
with patch_context, patch_salt:
assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames")
audit_defaults = {"junk": "defaults"}
patch_context = patch.dict(
win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults}
)
with patch_context, patch_salt:
assert win_lgpo._get_advaudit_defaults() == audit_defaults
def test_set_value_error():
mock_set_file_data = MagicMock(return_value=False)
with patch.object(win_lgpo, "_set_advaudit_file_data", mock_set_file_data):
with pytest.raises(CommandExecutionError):
win_lgpo._set_advaudit_value("Audit User Account Management", "None")
def test_set_value_log_messages(caplog):
mock_set_file_data = MagicMock(return_value=True)
mock_set_pol_data = MagicMock(return_value=False)
mock_context = {"lgpo.adv_audit_data": {"test_option": "test_value"}}
with patch.object(
win_lgpo, "_set_advaudit_file_data", mock_set_file_data
), patch.object(win_lgpo, "_set_advaudit_pol_data", mock_set_pol_data), patch.dict(
win_lgpo.__context__, mock_context
):
win_lgpo._set_advaudit_value("test_option", None)
assert "Failed to apply audit setting:" in caplog.text
assert "LGPO: Removing Advanced Audit data:" in caplog.text

View file

@ -0,0 +1,135 @@
import pytest
import salt.modules.win_lgpo as win_lgpo
from tests.support.mock import MagicMock, patch
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.destructive_test,
pytest.mark.slow_test,
]
@pytest.fixture
def configure_loader_modules():
return {win_lgpo: {}}
def test_get_netsh_value():
with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}):
win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured")
with patch.dict(win_lgpo.__context__, {}):
assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured"
context = {
"lgpo.netsh_data": {
"domain": {
"State": "ONContext",
"Inbound": "NotConfigured",
"Outbound": "NotConfigured",
"LocalFirewallRules": "NotConfigured",
},
},
}
with patch.dict(win_lgpo.__context__, context):
assert win_lgpo._get_netsh_value("domain", "State") == "ONContext"
def test_set_value_error():
with pytest.raises(ValueError):
win_lgpo._set_netsh_value("domain", "bad_section", "junk", "junk")
def test_set_value_firewall():
mock_context = {"lgpo.netsh_data": {"domain": "junk"}}
with patch(
"salt.utils.win_lgpo_netsh.set_firewall_settings", MagicMock()
) as mock, patch.dict(win_lgpo.__context__, mock_context):
win_lgpo._set_netsh_value(
profile="domain",
section="firewallpolicy",
option="Inbound",
value="spongebob",
)
mock.assert_called_once_with(
profile="domain",
inbound="spongebob",
outbound=None,
store="lgpo",
)
def test_set_value_settings():
mock_context = {"lgpo.netsh_data": {"domain": "junk"}}
with patch(
"salt.utils.win_lgpo_netsh.set_settings", MagicMock()
) as mock, patch.dict(win_lgpo.__context__, mock_context):
win_lgpo._set_netsh_value(
profile="domain",
section="settings",
option="spongebob",
value="squarepants",
)
mock.assert_called_once_with(
profile="domain",
setting="spongebob",
value="squarepants",
store="lgpo",
)
def test_set_value_state():
mock_context = {"lgpo.netsh_data": {"domain": "junk"}}
with patch("salt.utils.win_lgpo_netsh.set_state", MagicMock()) as mock, patch.dict(
win_lgpo.__context__, mock_context
):
win_lgpo._set_netsh_value(
profile="domain",
section="state",
option="junk",
value="spongebob",
)
mock.assert_called_once_with(
profile="domain",
state="spongebob",
store="lgpo",
)
def test_set_value_logging_filename():
mock_context = {"lgpo.netsh_data": {"domain": "junk"}}
with patch(
"salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock()
) as mock, patch.dict(win_lgpo.__context__, mock_context):
win_lgpo._set_netsh_value(
profile="domain",
section="logging",
option="FileName",
value="Not configured",
)
mock.assert_called_once_with(
profile="domain",
setting="FileName",
value="notconfigured",
store="lgpo",
)
def test_set_value_logging_log():
mock_context = {"lgpo.netsh_data": {"domain": "junk"}}
with patch(
"salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock()
) as mock, patch.dict(win_lgpo.__context__, mock_context):
win_lgpo._set_netsh_value(
profile="domain",
section="logging",
option="LogSpongebob",
value="Junk",
)
mock.assert_called_once_with(
profile="domain",
setting="Spongebob",
value="Junk",
store="lgpo",
)

View file

@ -5,6 +5,7 @@ import pytest
import salt.modules.win_file as win_file
import salt.modules.win_lgpo as win_lgpo
from tests.support.mock import MagicMock, patch
pytestmark = [
pytest.mark.windows_whitelisted,
@ -42,6 +43,18 @@ def test_get_policy_name():
assert result == expected
def test_get_adml_display_name_bad_name():
result = win_lgpo._getAdmlDisplayName("junk", "spongbob")
assert result is None
def test_get_adml_display_name_no_results():
patch_xpath = patch.object(win_lgpo, "ADML_DISPLAY_NAME_XPATH", return_value=[])
with patch_xpath:
result = win_lgpo._getAdmlDisplayName("junk", "$(spongbob.squarepants)")
assert result is None
def test_get_policy_id():
result = win_lgpo.get_policy(
policy_name="WfwPublicSettingsNotification",
@ -156,3 +169,78 @@ def test_get_policy_id_full_return_full_names_hierarchical():
}
}
assert result == expected
def test_transform_value_missing_type():
policy = {"Transform": {"some_type": "junk"}}
result = win_lgpo._transform_value(
value="spongebob",
policy=policy,
transform_type="different_type",
)
assert result == "spongebob"
def test_transform_value_registry():
policy = {"Registry": {}}
result = win_lgpo._transform_value(
value="spongebob",
policy=policy,
transform_type="different_type",
)
assert result == "spongebob"
def test_transform_value_registry_not_set():
policy = {"Registry": {}}
result = win_lgpo._transform_value(
value="(value not set)",
policy=policy,
transform_type="different_type",
)
assert result == "Not Defined"
def test_validate_setting_not_in_list():
policy = {"Settings": ["junk"]}
result = win_lgpo._validateSetting(value="spongebob", policy=policy)
assert not result
def test_validate_setting_in_list():
policy = {"Settings": ["spongebob"]}
result = win_lgpo._validateSetting(value="spongebob", policy=policy)
assert result
def test_validate_setting_not_list_or_dict():
policy = {"Settings": "spongebob"}
result = win_lgpo._validateSetting(value="spongebob", policy=policy)
assert result
def test_add_account_rights_error():
patch_w32sec = patch(
"win32security.LsaOpenPolicy", MagicMock(side_effect=Exception)
)
with patch_w32sec:
assert win_lgpo._addAccountRights("spongebob", "junk") is False
def test_del_account_rights_error():
patch_w32sec = patch(
"win32security.LsaOpenPolicy", MagicMock(side_effect=Exception)
)
with patch_w32sec:
assert win_lgpo._delAccountRights("spongebob", "junk") is False
def test_validate_setting_no_function():
policy = {
"Settings": {
"Function": "_in_range_inclusive",
"Args": {"min": 0, "max": 24},
},
}
result = win_lgpo._validateSetting(value="spongebob", policy=policy)
assert not result

View file

@ -0,0 +1,53 @@
"""
:codeauthor: Shane Lee <slee@saltstack.com>
"""
import pytest
import salt.modules.win_lgpo as win_lgpo
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.slow_test,
]
@pytest.fixture
def reg_pol_dword():
data = (
b"PReg\x01\x00\x00\x00" # Header
b"[\x00" # Opening list of policies
b"S\x00o\x00m\x00e\x00\\\x00K\x00e\x00y\x00\x00\x00;\x00" # Key
b"V\x00a\x00l\x00u\x00e\x00N\x00a\x00m\x00e\x00\x00\x00;\x00" # Value
b"\x04\x00\x00\x00;\x00" # Reg DWord Type
b"\x04\x00\x00\x00;\x00" # Size
# b"\x01\x00\x00\x00" # Reg Dword Data
b"\x00\x00\x00\x00" # No Data
b"]\x00" # Closing list of policies
)
yield data
def test_get_data_from_reg_pol_data(reg_pol_dword):
encoded_name = "ValueName".encode("utf-16-le")
encoded_null = chr(0).encode("utf-16-le")
encoded_semicolon = ";".encode("utf-16-le")
encoded_type = chr(4).encode("utf-16-le")
encoded_size = chr(4).encode("utf-16-le")
search_string = b"".join(
[
encoded_semicolon,
encoded_name,
encoded_null,
encoded_semicolon,
encoded_type,
encoded_null,
encoded_semicolon,
encoded_size,
encoded_null,
]
)
result = win_lgpo._getDataFromRegPolData(
search_string, reg_pol_dword, return_value_name=True
)
assert result == {"ValueName": 0}

View file

@ -0,0 +1,83 @@
import pytest
import salt.modules.cmdmod as cmd
import salt.modules.win_file as win_file
import salt.modules.win_lgpo as win_lgpo
from tests.support.mock import MagicMock, patch
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
pytest.mark.destructive_test,
pytest.mark.slow_test,
]
@pytest.fixture
def configure_loader_modules(tmp_path):
cachedir = tmp_path / "__test_admx_policy_cache_dir"
cachedir.mkdir(parents=True, exist_ok=True)
return {
win_lgpo: {
"__salt__": {
"cmd.run": cmd.run,
"file.file_exists": win_file.file_exists,
"file.remove": win_file.remove,
},
"__opts__": {
"cachedir": str(cachedir),
},
},
}
def test_load_secedit_data():
result = win_lgpo._load_secedit_data()
result = [x.strip() for x in result]
assert "[Unicode]" in result
assert "[System Access]" in result
def test_get_secedit_data():
with patch.dict(win_lgpo.__context__, {}):
result = win_lgpo._get_secedit_data()
result = [x.strip() for x in result]
assert "[Unicode]" in result
assert "[System Access]" in result
def test_get_secedit_data_existing_context():
mock_context = {"lgpo.secedit_data": ["spongebob", "squarepants"]}
with patch.dict(win_lgpo.__context__, mock_context):
result = win_lgpo._get_secedit_data()
result = [x.strip() for x in result]
assert "spongebob" in result
assert "squarepants" in result
def test_get_secedit_value():
result = win_lgpo._get_secedit_value("AuditDSAccess")
assert result == "0"
def test_get_secedit_value_not_defined():
result = win_lgpo._get_secedit_value("Spongebob")
assert result == "Not Defined"
def test_write_secedit_data_import_fail(caplog):
patch_cmd_retcode = patch.dict(
win_lgpo.__salt__, {"cmd.retcode": MagicMock(return_value=1)}
)
with patch_cmd_retcode:
assert win_lgpo._write_secedit_data("spongebob") is False
assert "Secedit failed to import template data" in caplog.text
def test_write_secedit_data_configure_fail(caplog):
patch_cmd_retcode = patch.dict(
win_lgpo.__salt__, {"cmd.retcode": MagicMock(side_effect=[0, 1])}
)
with patch_cmd_retcode:
assert win_lgpo._write_secedit_data("spongebob") is False
assert "Secedit failed to apply security database" in caplog.text

View file

@ -1,4 +1,5 @@
import pytest
from pytestskipmarkers.utils import platform
import salt.modules.postgres as postgres
import salt.states.postgres_group as postgres_group
@ -19,6 +20,8 @@ def fixture_db_args():
@pytest.fixture(name="md5_pw")
def fixture_md5_pw():
if platform.is_fips_enabled():
pytest.skip("Test cannot run on a FIPS enabled platform")
# 'md5' + md5('password' + 'groupname')
return "md58b14c378fab8ef0dc227f4e6d6787a87"
@ -79,6 +82,7 @@ def configure_loader_modules(mocks):
# ==========
@pytest.mark.skip_on_fips_enabled_platform
def test_present_create_basic(mocks, db_args):
assert postgres_group.present("groupname") == {
"name": "groupname",
@ -343,6 +347,7 @@ def test_present_update_md5_password(mocks, existing_group, md5_pw, db_args):
)
@pytest.mark.skip_on_fips_enabled_platform
def test_present_update_error(mocks, existing_group):
existing_group["password"] = "md500000000000000000000000000000000"
mocks["postgres.role_get"].return_value = existing_group

View file

@ -1,4 +1,5 @@
import pytest
from pytestskipmarkers.utils import platform
import salt.modules.postgres as postgres
import salt.states.postgres_user as postgres_user
@ -25,6 +26,8 @@ def fixture_db_args():
@pytest.fixture(name="md5_pw")
def fixture_md5_pw():
# 'md5' + md5('password' + 'username')
if platform.is_fips_enabled():
pytest.skip("Test cannot run on a FIPS enabled platform")
return "md55a231fcdb710d73268c4f44283487ba2"

View file

@ -17,6 +17,7 @@ log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_on_fips_enabled_platform,
]

View file

@ -18,6 +18,7 @@ log = logging.getLogger(__name__)
pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_on_fips_enabled_platform,
]

View file

@ -42,26 +42,124 @@ def test_query():
with patch.dict(http.__salt__, {"http.query": mock}):
assert http.query("salt", "Dude", "stack") == ret[1]
with patch.dict(http.__opts__, {"test": False}):
mock = MagicMock(return_value={"body": "http body", "status": 200})
expected = {
"name": "http://example.com/",
"result": True,
"comment": "Status 200 was found.",
"changes": {},
"data": {"body": "http body", "status": 200},
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(name="http://example.com/", status=200, decode=False)
== expected
)
with patch.dict(http.__opts__, {"test": False}):
mock = MagicMock(return_value={"body": "http body", "status": 200})
expected = {
"name": "http://example.com/",
"result": True,
"comment": "Status 200 was found.",
"changes": {},
"data": {"body": "http body", "status": 200},
}
with patch.dict(http.__salt__, {"http.wait_for_successful_query": mock}):
assert (
http.query(name="http://example.com/", status=200, wait_for=300)
== expected
)
with patch.dict(http.__opts__, {"test": True}):
mock = MagicMock(return_value={"body": "http body", "status": 200})
expected = {
"name": "http://example.com/",
"result": None,
"comment": "Status 200 was found. (TEST MODE, TEST URL WAS: http://status.example.com)",
"changes": {},
"data": {"body": "http body", "status": 200},
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
name="http://example.com/",
status=200,
test_url="http://status.example.com",
)
== expected
)
def test_query_pcre_statustype():
"""
Test to perform an HTTP query with a regex used to match the status code and statefully return the result
"""
testurl = "salturl"
http_result = {"text": "This page returned a 201 status code", "status": "201"}
state_return = {
"changes": {},
"comment": (
'Match text "This page returned" was found. Status pattern "200|201" was'
" found."
),
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
mock = MagicMock(return_value=http_result)
state_return = {
"changes": {},
"comment": (
'Match text "This page returned" was found. Status pattern "200|201" was'
" found."
),
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
match="This page returned",
status="200|201",
status_type="pcre",
)
== state_return
)
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
mock = MagicMock(return_value=http_result)
state_return = {
"changes": {},
"comment": ('Status pattern "200|201" was found.'),
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
status="200|201",
status_type="pcre",
)
== state_return
)
http_result = {"text": "This page returned a 403 status code", "status": "403"}
mock = MagicMock(return_value=http_result)
state_return = {
"name": "salturl",
"result": False,
"comment": 'Match text "This page returned" was found. Status pattern "200|201" was not found.',
"changes": {},
"data": {"text": "This page returned a 403 status code", "status": "403"},
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
@ -74,23 +172,109 @@ def test_query_pcre_statustype():
)
def test_query_pcre_matchtype():
"""
Test to perform an HTTP query with a regex used to match the returned text and statefully return the result
"""
testurl = "salturl"
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
mock = MagicMock(return_value=http_result)
state_return = {
"changes": {},
"comment": ('Match pattern "This page returned" was found.'),
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
match="This page returned",
match_type="pcre",
)
== state_return
)
http_result = {
"text": "This page did not return a 201 status code",
"status": "403",
}
mock = MagicMock(return_value=http_result)
state_return = {
"changes": {},
"comment": ('Match pattern "This page returned" was not found.'),
"data": {
"status": "403",
"text": "This page did not return a 201 status code",
},
"name": testurl,
"result": False,
}
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
match="This page returned",
match_type="pcre",
)
== state_return
)
def test_query_stringstatustype():
"""
Test to perform an HTTP query with a string status code and statefully return the result
"""
testurl = "salturl"
http_result = {"text": "This page returned a 201 status code", "status": "201"}
state_return = {
"changes": {},
"comment": 'Match text "This page returned" was found. Status 201 was found.',
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
state_return = {
"changes": {},
"comment": 'Match text "This page returned" was found. Status 201 was found.',
"data": {
"status": "201",
"text": "This page returned a 201 status code",
},
"name": testurl,
"result": True,
}
assert (
http.query(
testurl,
match="This page returned",
status="201",
status_type="string",
)
== state_return
)
http_result = {"text": "This page returned a 403 status code", "status": "403"}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
state_return = {
"name": "salturl",
"result": False,
"comment": 'Match text "This page returned" was found. Status 201 was not found.',
"changes": {},
"data": {
"text": "This page returned a 403 status code",
"status": "403",
},
}
assert (
http.query(
testurl,
@ -102,21 +286,54 @@ def test_query_stringstatustype():
)
def test_query_invalidstatustype():
"""
Test to perform an HTTP query with a string status code and statefully return the result
"""
testurl = "salturl"
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
state_return = {
"name": "salturl",
"result": None,
"comment": "",
"changes": {},
"data": {
"text": "This page returned a 201 status code",
"status": "201",
},
}
assert (
http.query(
testurl,
status="201",
status_type="invalid",
)
== state_return
)
def test_query_liststatustype():
"""
Test to perform an HTTP query with a list of status codes and statefully return the result
"""
testurl = "salturl"
http_result = {"text": "This page returned a 201 status code", "status": "201"}
state_return = {
"changes": {},
"comment": 'Match text "This page returned" was found. Status 201 was found.',
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
state_return = {
"changes": {},
"comment": 'Match text "This page returned" was found. Status 201 was found.',
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
@ -129,6 +346,48 @@ def test_query_liststatustype():
== state_return
)
with patch.dict(http.__opts__, {"test": False}):
http_result = {"text": "This page returned a 201 status code", "status": "201"}
state_return = {
"changes": {},
"comment": "Status 201 was found.",
"data": {"status": "201", "text": "This page returned a 201 status code"},
"name": testurl,
"result": True,
}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
status=["200", "201"],
status_type="list",
)
== state_return
)
http_result = {"text": "This page returned a 403 status code", "status": "403"}
state_return = {
"name": "salturl",
"result": False,
"comment": "Match text \"This page returned a 200\" was not found. Statuses ['200', '201'] were not found.",
"changes": {},
"data": {"text": "This page returned a 403 status code", "status": "403"},
}
mock = MagicMock(return_value=http_result)
with patch.dict(http.__salt__, {"http.query": mock}):
assert (
http.query(
testurl,
match="This page returned a 200",
status=["200", "201"],
status_type="list",
)
== state_return
)
def test_wait_for_with_interval():
"""
@ -156,3 +415,22 @@ def test_wait_for_without_interval():
with patch("time.sleep", MagicMock()) as sleep_mock:
assert http.wait_for_successful_query("url", status=200) == {"result": True}
sleep_mock.assert_not_called()
query_mock = MagicMock(return_value={"result": False})
with patch.object(http, "query", query_mock):
with patch(
"time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958])
):
assert http.wait_for_successful_query("url", status=200) == {
"result": False
}
query_mock = MagicMock(side_effect=Exception())
with patch.object(http, "query", query_mock):
with patch(
"time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958])
):
with pytest.raises(Exception):
http.wait_for_successful_query("url", status=200)

View file

@ -0,0 +1,21 @@
"""
Unit tests for salt.transport.base.
"""
import pytest
import salt.transport.base
pytestmark = [
pytest.mark.core_test,
]
def test_unclosed_warning():
transport = salt.transport.base.Transport()
assert transport._closing is False
assert transport._connect_called is False
transport.connect()
assert transport._connect_called is True
with pytest.warns(salt.transport.base.TransportWarning):
del transport

View file

@ -53,3 +53,33 @@ def test_pub_client_init(minion_opts, io_loop):
minion_opts, io_loop, host=minion_opts["master_ip"], port=121212
) as client:
client.send(b"asf")
async def test_unclosed_request_client(minion_opts, io_loop):
minion_opts["master_uri"] = "tcp://127.0.0.1:4506"
client = salt.transport.zeromq.RequestClient(minion_opts, io_loop)
await client.connect()
try:
assert client._closing is False
with pytest.warns(salt.transport.base.TransportWarning):
client.__del__()
finally:
client.close()
async def test_unclosed_publish_client(minion_opts, io_loop):
minion_opts["id"] = "minion"
minion_opts["__role"] = "minion"
minion_opts["master_ip"] = "127.0.0.1"
minion_opts["zmq_filtering"] = True
minion_opts["zmq_monitor"] = True
client = salt.transport.zeromq.PublishClient(
minion_opts, io_loop, host=minion_opts["master_ip"], port=121212
)
await client.connect()
try:
assert client._closing is False
with pytest.warns(salt.transport.base.TransportWarning):
client.__del__()
finally:
client.close()

View file

@ -46,7 +46,6 @@ def minion_opts(tmp_path, minion_opts):
"file_roots": {"test": [str(tmp_path / "templates")]},
"pillar_roots": {"test": [str(tmp_path / "templates")]},
"fileserver_backend": ["roots"],
"hash_type": "md5",
"extension_modules": os.path.join(
os.path.dirname(os.path.abspath(__file__)), "extmods"
),
@ -1041,6 +1040,7 @@ def test_method_call(minion_opts, local_salt):
assert rendered == "None"
@pytest.mark.skip_on_fips_enabled_platform
def test_md5(minion_opts, local_salt):
"""
Test the `md5` Jinja filter.

View file

@ -61,7 +61,6 @@ def minion_opts(tmp_path, minion_opts):
"file_roots": {"test": [str(tmp_path / "files" / "test")]},
"pillar_roots": {"test": [str(tmp_path / "files" / "test")]},
"fileserver_backend": ["roots"],
"hash_type": "md5",
"extension_modules": os.path.join(
os.path.dirname(os.path.abspath(__file__)), "extmods"
),

View file

@ -2,11 +2,10 @@
Tests for salt.utils.jinja
"""
import copy
import os
import pytest
from jinja2 import Environment, exceptions
from jinja2 import Environment, TemplateNotFound, exceptions
# dateutils is needed so that the strftime jinja filter is loaded
import salt.utils.dateutils # pylint: disable=unused-import
@ -15,7 +14,7 @@ import salt.utils.json # pylint: disable=unused-import
import salt.utils.stringutils # pylint: disable=unused-import
import salt.utils.yaml # pylint: disable=unused-import
from salt.utils.jinja import SaltCacheLoader
from tests.support.mock import Mock, call, patch
from tests.support.mock import MagicMock, call, patch
@pytest.fixture
@ -25,7 +24,7 @@ def minion_opts(tmp_path, minion_opts):
"file_buffer_size": 1048576,
"cachedir": str(tmp_path),
"file_roots": {"test": [str(tmp_path / "files" / "test")]},
"pillar_roots": {"test": [str(tmp_path / "files" / "test")]},
"pillar_roots": {"test": [str(tmp_path / "pillar" / "test")]},
"extension_modules": os.path.join(
os.path.dirname(os.path.abspath(__file__)), "extmods"
),
@ -108,7 +107,7 @@ def get_loader(mock_file_client, minion_opts):
if opts is None:
opts = minion_opts
mock_file_client.opts = opts
loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client)
loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client, **kwargs)
# Create a mock file client and attach it to the loader
return loader
@ -128,10 +127,27 @@ def test_searchpath(minion_opts, get_loader, tmp_path):
"""
The searchpath is based on the cachedir option and the saltenv parameter
"""
opts = copy.deepcopy(minion_opts)
opts.update({"cachedir": str(tmp_path)})
loader = get_loader(opts=minion_opts, saltenv="test")
assert loader.searchpath == [str(tmp_path / "files" / "test")]
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
assert loader.searchpath == minion_opts["file_roots"][saltenv]
def test_searchpath_pillar_rend(minion_opts, get_loader):
"""
The searchpath is based on the pillar_rend if it is True
"""
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True)
assert loader.searchpath == minion_opts["pillar_roots"][saltenv]
def test_searchpath_bad_pillar_rend(minion_opts, get_loader):
"""
The searchpath is based on the pillar_rend if it is True
"""
saltenv = "bad_env"
loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True)
assert loader.searchpath == []
def test_mockclient(minion_opts, template_dir, hello_simple, get_loader):
@ -208,7 +224,7 @@ def test_cached_file_client(get_loader, minion_opts):
"""
Multiple instantiations of SaltCacheLoader use the cached file client
"""
with patch("salt.channel.client.ReqChannel.factory", Mock()):
with patch("salt.channel.client.ReqChannel.factory", MagicMock()):
loader_a = SaltCacheLoader(minion_opts)
loader_b = SaltCacheLoader(minion_opts)
assert loader_a._file_client is loader_b._file_client
@ -230,7 +246,7 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
file_client does not have a destroy method
"""
# Test SaltCacheLoader creating and destroying the file client created
file_client = Mock()
file_client = MagicMock()
with patch("salt.fileclient.get_file_client", return_value=file_client):
loader = SaltCacheLoader(minion_opts)
assert loader._file_client is None
@ -240,9 +256,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
assert file_client.mock_calls == [call.destroy()]
# Test SaltCacheLoader reusing the file client passed
file_client = Mock()
file_client = MagicMock()
file_client.opts = {"file_roots": minion_opts["file_roots"]}
with patch("salt.fileclient.get_file_client", return_value=Mock()):
with patch("salt.fileclient.get_file_client", return_value=MagicMock()):
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
assert loader._file_client is file_client
with loader:
@ -254,9 +270,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
# passed because the "file_roots" option is different, and, as such,
# the destroy method on the new file client is called, but not on the
# file client passed in.
file_client = Mock()
file_client = MagicMock()
file_client.opts = {"file_roots": ""}
new_file_client = Mock()
new_file_client = MagicMock()
with patch("salt.fileclient.get_file_client", return_value=new_file_client):
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
assert loader._file_client is file_client
@ -266,3 +282,65 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
assert loader._file_client is None
assert file_client.mock_calls == []
assert new_file_client.mock_calls == [call.destroy()]
def test_check_cache_miss(get_loader, minion_opts, hello_simple):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
with patch.object(loader, "cached", []):
with patch.object(loader, "cache_file") as cache_mock:
loader.check_cache(str(hello_simple))
cache_mock.assert_called_once()
def test_check_cache_hit(get_loader, minion_opts, hello_simple):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
with patch.object(loader, "cached", [str(hello_simple)]):
with patch.object(loader, "cache_file") as cache_mock:
loader.check_cache(str(hello_simple))
cache_mock.assert_not_called()
def test_get_source_no_environment(
get_loader, minion_opts, relative_rhello, relative_dir
):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
with pytest.raises(TemplateNotFound):
loader.get_source(None, str(".." / relative_rhello.relative_to(relative_dir)))
def test_get_source_relative_no_tpldir(
get_loader, minion_opts, relative_rhello, relative_dir
):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
with pytest.raises(TemplateNotFound):
loader.get_source(
MagicMock(globals={}), str(".." / relative_rhello.relative_to(relative_dir))
)
def test_get_source_template_doesnt_exist(get_loader, minion_opts):
saltenv = "test"
fake_path = "fake_path"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
with pytest.raises(TemplateNotFound):
loader.get_source(None, fake_path)
def test_get_source_template_removed(get_loader, minion_opts, hello_simple):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
contents, filepath, uptodate = loader.get_source(None, str(hello_simple))
hello_simple.unlink()
assert uptodate() is False
def test_no_destroy_method_on_file_client(get_loader, minion_opts):
saltenv = "test"
loader = get_loader(opts=minion_opts, saltenv=saltenv)
loader._close_file_client = True
# This should fail silently, thus no error catching
loader.destroy()

View file

@ -0,0 +1,78 @@
"""
Tests the PIDfile deletion in the DaemonMixIn.
"""
import logging
import pytest
import salt.utils.parsers
from tests.support.mock import ANY, MagicMock, patch
@pytest.fixture
def daemon_mixin():
mixin = salt.utils.parsers.DaemonMixIn()
mixin.config = {"pidfile": "/some/fake.pid"}
return mixin
def test_pid_file_deletion(daemon_mixin):
"""
PIDfile deletion without exception.
"""
with patch("os.unlink", MagicMock()) as unlink_mock:
with patch("os.path.isfile", MagicMock(return_value=True)):
with patch("salt.utils.parsers.log", MagicMock()) as log_mock:
daemon_mixin._mixin_before_exit()
unlink_mock.assert_called_once()
log_mock.info.assert_not_called()
log_mock.debug.assert_not_called()
def test_pid_deleted_oserror_as_root(daemon_mixin):
"""
PIDfile deletion with exception, running as root.
"""
with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock:
with patch("os.path.isfile", MagicMock(return_value=True)):
with patch("salt.utils.parsers.log", MagicMock()) as log_mock:
if salt.utils.platform.is_windows():
patch_args = (
"salt.utils.win_functions.is_admin",
MagicMock(return_value=True),
)
else:
patch_args = ("os.getuid", MagicMock(return_value=0))
with patch(*patch_args):
daemon_mixin._mixin_before_exit()
assert unlink_mock.call_count == 1
log_mock.info.assert_called_with(
"PIDfile(%s) could not be deleted: %s",
format(daemon_mixin.config["pidfile"], ""),
ANY,
exc_info_on_loglevel=logging.DEBUG,
)
def test_pid_deleted_oserror_as_non_root(daemon_mixin):
"""
PIDfile deletion with exception, running as non-root.
"""
with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock:
with patch("os.path.isfile", MagicMock(return_value=True)):
with patch("salt.utils.parsers.log", MagicMock()) as log_mock:
if salt.utils.platform.is_windows():
patch_args = (
"salt.utils.win_functions.is_admin",
MagicMock(return_value=False),
)
else:
patch_args = ("os.getuid", MagicMock(return_value=1000))
with patch(*patch_args):
daemon_mixin._mixin_before_exit()
assert unlink_mock.call_count == 1
log_mock.info.assert_not_called()
log_mock.debug.assert_not_called()

View file

@ -0,0 +1,784 @@
"""
:codeauthor: Denys Havrysh <denys.gavrysh@gmail.com>
"""
import logging
import os
import pprint
import pytest
import salt._logging
import salt.config
import salt.syspaths
import salt.utils.jid
import salt.utils.parsers
import salt.utils.platform
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
class LogImplMock:
"""
Logger setup
"""
def __init__(self):
"""
init
"""
self.log_level_console = None
self.log_file = None
self.log_level_logfile = None
self.config = self.original_config = None
logging_options = salt._logging.get_logging_options_dict()
if logging_options:
self.config = logging_options.copy()
self.original_config = self.config.copy()
self.temp_log_level = None
self._console_handler_configured = False
self._extended_logging_configured = False
self._logfile_handler_configured = False
self._real_set_logging_options_dict = salt._logging.set_logging_options_dict
self._real_get_logging_options_dict = salt._logging.get_logging_options_dict
self._real_setup_logfile_handler = salt._logging.setup_logfile_handler
def _destroy(self):
salt._logging.set_logging_options_dict.__options_dict__ = self.original_config
salt._logging.shutdown_logfile_handler()
def setup_temp_handler(self, log_level=None):
"""
Set temp handler loglevel
"""
log.debug("Setting temp handler log level to: %s", log_level)
self.temp_log_level = log_level
def is_console_handler_configured(self):
log.debug("Calling is_console_handler_configured")
return self._console_handler_configured
def setup_console_handler(
self, log_level="error", **kwargs
): # pylint: disable=unused-argument
"""
Set console loglevel
"""
log.debug("Setting console handler log level to: %s", log_level)
self.log_level_console = log_level
self._console_handler_configured = True
def shutdown_console_handler(self):
log.debug("Calling shutdown_console_handler")
self._console_handler_configured = False
def is_extended_logging_configured(self):
log.debug("Calling is_extended_logging_configured")
return self._extended_logging_configured
def setup_extended_logging(self, opts):
"""
Set opts
"""
log.debug("Calling setup_extended_logging")
self._extended_logging_configured = True
def shutdown_extended_logging(self):
log.debug("Calling shutdown_extended_logging")
self._extended_logging_configured = False
def is_logfile_handler_configured(self):
log.debug("Calling is_logfile_handler_configured")
return self._logfile_handler_configured
def setup_logfile_handler(
self, log_path, log_level=None, **kwargs
): # pylint: disable=unused-argument
"""
Set logfile and loglevel
"""
log.debug("Setting log file handler path to: %s", log_path)
log.debug("Setting log file handler log level to: %s", log_level)
self.log_file = log_path
self.log_level_logfile = log_level
self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs)
self._logfile_handler_configured = True
def shutdown_logfile_handler(self):
log.debug("Calling shutdown_logfile_handler")
self._logfile_handler_configured = False
def get_logging_options_dict(self):
log.debug("Calling get_logging_options_dict")
return self.config
def set_logging_options_dict(self, opts):
log.debug("Calling set_logging_options_dict")
self._real_set_logging_options_dict(opts)
self.config = self._real_get_logging_options_dict()
log.debug("Logging options dict:\n%s", pprint.pformat(self.config))
def setup_log_granular_levels(self, opts):
log.debug("Calling setup_log_granular_levels")
def setup_logging(self):
log.debug("Mocked setup_logging called")
# Wether daemonizing or not, either on the main process or on a separate process
# The log file is going to be configured.
# The console is the only handler not configured if daemonizing
# These routines are what happens on salt._logging.setup_logging
opts = self.get_logging_options_dict()
if (
opts.get("configure_console_logger", True)
and not self.is_console_handler_configured()
):
self.setup_console_handler(
log_level=opts["log_level"],
log_format=opts["log_fmt_console"],
date_format=opts["log_datefmt"],
)
if (
opts.get("configure_file_logger", True)
and not self.is_logfile_handler_configured()
):
log_file_level = opts["log_level_logfile"] or opts["log_level"]
if log_file_level != "quiet":
self.setup_logfile_handler(
log_path=opts[opts["log_file_key"]],
log_level=log_file_level,
log_format=opts["log_fmt_logfile"],
date_format=opts["log_datefmt_logfile"],
max_bytes=opts["log_rotate_max_bytes"],
backup_count=opts["log_rotate_backup_count"],
user=opts["user"],
)
if not self.is_extended_logging_configured():
self.setup_extended_logging(opts)
self.setup_log_granular_levels(opts["log_granular_levels"])
def __enter__(self):
return self
def __exit__(self, *_):
self._destroy()
# <----------- START TESTS ----------->
@pytest.fixture
def root_dir(tmp_path):
yield tmp_path / "parsers_tests_root_dir"
@pytest.fixture(
params=[
"master",
"minion",
"proxyminion",
"syndic",
"saltcmd",
"saltcp",
"saltkey",
"saltcall",
"saltrun",
"saltssh",
"saltcloud",
"spm",
"saltapi",
]
)
def log_cli_parser(request):
return request.param
@pytest.fixture
def default_config(log_cli_parser):
if log_cli_parser == "master":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "minion":
return salt.config.DEFAULT_MINION_OPTS.copy()
elif log_cli_parser == "proxyminion":
return {
**salt.config.DEFAULT_MINION_OPTS.copy(),
**salt.config.DEFAULT_PROXY_MINION_OPTS.copy(),
}
elif log_cli_parser == "syndic":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltcmd":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltcp":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltkey":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltcall":
return salt.config.DEFAULT_MINION_OPTS.copy()
elif log_cli_parser == "saltrun":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltssh":
return salt.config.DEFAULT_MASTER_OPTS.copy()
elif log_cli_parser == "saltcloud":
return {
**salt.config.DEFAULT_MASTER_OPTS.copy(),
**salt.config.DEFAULT_CLOUD_OPTS.copy(),
}
elif log_cli_parser == "spm":
return {
**salt.config.DEFAULT_MASTER_OPTS.copy(),
**salt.config.DEFAULT_SPM_OPTS.copy(),
}
elif log_cli_parser == "saltapi":
return {
**salt.config.DEFAULT_MASTER_OPTS.copy(),
**salt.config.DEFAULT_API_OPTS.copy(),
}
@pytest.fixture
def parser(log_cli_parser):
param_map = {
"master": salt.utils.parsers.MasterOptionParser,
"minion": salt.utils.parsers.MinionOptionParser,
"proxyminion": salt.utils.parsers.ProxyMinionOptionParser,
"syndic": salt.utils.parsers.SyndicOptionParser,
"saltcmd": salt.utils.parsers.SaltCMDOptionParser,
"saltcp": salt.utils.parsers.SaltCPOptionParser,
"saltkey": salt.utils.parsers.SaltKeyOptionParser,
"saltcall": salt.utils.parsers.SaltCallOptionParser,
"saltrun": salt.utils.parsers.SaltRunOptionParser,
"saltssh": salt.utils.parsers.SaltSSHOptionParser,
"saltcloud": salt.utils.parsers.SaltCloudParser,
"spm": salt.utils.parsers.SPMParser,
"saltapi": salt.utils.parsers.SaltAPIParser,
}
return param_map[log_cli_parser]
@pytest.fixture
def config_func(log_cli_parser):
param_map = {
"master": "salt.config.master_config",
"minion": "salt.config.minion_config",
"proxyminion": "salt.config.proxy_config",
"syndic": "salt.config.syndic_config",
"saltcmd": "salt.config.client_config",
"saltcp": "salt.config.master_config",
"saltkey": "salt.config.client_config",
"saltcall": "salt.config.minion_config",
"saltrun": "salt.config.master_config",
"saltssh": "salt.config.master_config",
"saltcloud": "salt.config.cloud_config",
"spm": "salt.config.spm_config",
"saltapi": "salt.config.api_config",
}
return param_map[log_cli_parser]
@pytest.fixture
def log_file(tmp_path, logfile_config_setting_name):
return str(tmp_path / logfile_config_setting_name)
@pytest.fixture
def args(log_cli_parser):
if log_cli_parser in ("saltcmd", "saltssh"):
return ["foo", "bar.baz"]
elif log_cli_parser == "saltcp":
return ["foo", "bar", "baz"]
elif log_cli_parser in ("saltcall", "saltrun"):
return ["foo.bar"]
elif log_cli_parser == "saltcloud":
return ["-p", "foo", "bar"]
elif log_cli_parser == "spm":
return ["foo", "bar"]
return []
@pytest.fixture
def loglevel_config_setting_name():
return "log_level"
@pytest.fixture
def logfile_config_setting_name(log_cli_parser):
if log_cli_parser == "syndic":
return "syndic_log_file"
elif log_cli_parser == "saltkey":
return "key_logfile"
elif log_cli_parser == "saltssh":
return "ssh_log_file"
elif log_cli_parser == "spm":
return "spm_logfile"
elif log_cli_parser == "saltapi":
return "api_logfile"
return "log_file"
@pytest.fixture
def logfile_loglevel_config_setting_name():
return "log_level_logfile"
@pytest.fixture
def testing_config(default_config, root_dir, logfile_config_setting_name, log_file):
_testing_config = default_config.copy()
_testing_config["root_dir"] = root_dir
for name in ("pki_dir", "cachedir"):
_testing_config[name] = name
_testing_config[logfile_config_setting_name] = log_file
return _testing_config
@pytest.fixture(autouse=True)
def log_impl():
"""
Mock logger functions
"""
with LogImplMock() as _log_impl:
mocked_functions = {}
for name in dir(_log_impl):
if name.startswith("_"):
continue
func = getattr(_log_impl, name)
if not callable(func):
continue
mocked_functions[name] = func
patcher = patch.multiple(salt._logging, **mocked_functions)
with patcher:
yield _log_impl
def test_get_log_level_cli(
testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl
):
"""
Tests that log level match command-line specified value
"""
# Set defaults
default_log_level = testing_config[loglevel_config_setting_name]
# Set log level in CLI
log_level = "critical"
args = ["--log-level", log_level] + args
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
console_log_level = getattr(instance.options, loglevel_config_setting_name)
# Check console log level setting
assert console_log_level == log_level
# Check console logger log level
assert log_impl.log_level_console == log_level
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.temp_log_level == log_level
# Check log file logger log level
assert log_impl.log_level_logfile == default_log_level
def test_get_log_level_config(
testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl
):
"""
Tests that log level match the configured value
"""
# Set log level in config
log_level = "info"
testing_config.update({loglevel_config_setting_name: log_level})
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
console_log_level = getattr(instance.options, loglevel_config_setting_name)
# Check console log level setting
assert console_log_level == log_level
# Check console logger log level
assert log_impl.log_level_console == log_level
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.temp_log_level == "error"
# Check log file logger log level
assert log_impl.log_level_logfile == log_level
def test_get_log_level_default(
testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl
):
"""
Tests that log level match the default value
"""
# Set defaults
log_level = default_log_level = testing_config[loglevel_config_setting_name]
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
console_log_level = getattr(instance.options, loglevel_config_setting_name)
# Check log level setting
assert console_log_level == log_level
# Check console logger log level
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.temp_log_level == "error"
# Check log file logger
assert log_impl.log_level_logfile == default_log_level
# Check help message
assert (
"Default: '{}'.".format(default_log_level)
in instance.get_option("--log-level").help
)
# log file configuration tests
def test_get_log_file_cli(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
log_file,
logfile_config_setting_name,
):
"""
Tests that log file match command-line specified value
"""
# Set defaults
log_level = testing_config[loglevel_config_setting_name]
# Set log file in CLI
log_file = "{}_cli.log".format(log_file)
args = ["--log-file", log_file] + args
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_file_option = getattr(instance.options, logfile_config_setting_name)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_config_setting_name] == log_file
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file setting
assert log_file_option == log_file
# Check log file logger
assert log_impl.log_file == log_file
def test_get_log_file_config(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_config_setting_name,
log_file,
):
"""
Tests that log file match the configured value
"""
# Set defaults
log_level = testing_config[loglevel_config_setting_name]
# Set log file in config
log_file = "{}_config.log".format(log_file)
testing_config.update({logfile_config_setting_name: log_file})
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_file_option = getattr(instance.options, logfile_config_setting_name)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_config_setting_name] == log_file
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file setting
assert log_file_option == log_file
# Check log file logger
assert log_impl.log_file == log_file
def test_get_log_file_default(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_config_setting_name,
default_config,
):
"""
Tests that log file match the default value
"""
# Set defaults
log_level = testing_config[loglevel_config_setting_name]
log_file = testing_config[logfile_config_setting_name]
default_log_file = default_config[logfile_config_setting_name]
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_file_option = getattr(instance.options, logfile_config_setting_name)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_config_setting_name] == log_file
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file setting
assert log_file_option == log_file
# Check log file logger
assert log_impl.log_file == log_file
# Check help message
assert (
"Default: '{}'.".format(default_log_file)
in instance.get_option("--log-file").help
)
# log file log level configuration tests
def test_get_log_file_level_cli(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_loglevel_config_setting_name,
):
"""
Tests that file log level match command-line specified value
"""
# Set defaults
default_log_level = testing_config[loglevel_config_setting_name]
# Set log file level in CLI
log_level_logfile = "error"
args = ["--log-file-level", log_level_logfile] + args
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_level_logfile_option = getattr(
instance.options, logfile_loglevel_config_setting_name
)
# Check console logger
assert log_impl.log_level_console == default_log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == default_log_level
assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file level setting
assert log_level_logfile_option == log_level_logfile
# Check log file logger
assert log_impl.log_level_logfile == log_level_logfile
def test_get_log_file_level_config(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_loglevel_config_setting_name,
):
"""
Tests that log file level match the configured value
"""
# Set defaults
log_level = testing_config[loglevel_config_setting_name]
# Set log file level in config
log_level_logfile = "info"
testing_config.update({logfile_loglevel_config_setting_name: log_level_logfile})
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_level_logfile_option = getattr(
instance.options, logfile_loglevel_config_setting_name
)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file level setting
assert log_level_logfile_option == log_level_logfile
# Check log file logger
assert log_impl.log_level_logfile == log_level_logfile
def test_get_log_file_level_default(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_loglevel_config_setting_name,
):
"""
Tests that log file level match the default value
"""
# Set defaults
default_log_level = testing_config[loglevel_config_setting_name]
log_level = default_log_level
log_level_logfile = default_log_level
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_level_logfile_option = getattr(
instance.options, logfile_loglevel_config_setting_name
)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file level setting
assert log_level_logfile_option == log_level_logfile
# Check log file logger
assert log_impl.log_level_logfile == log_level_logfile
# Check help message
assert (
"Default: '{}'.".format(default_log_level)
in instance.get_option("--log-file-level").help
)
def test_get_console_log_level_with_file_log_level(
testing_config,
loglevel_config_setting_name,
args,
parser,
config_func,
log_impl,
logfile_loglevel_config_setting_name,
): # pylint: disable=invalid-name
"""
Tests that both console log level and log file level setting are working together
"""
log_level = "critical"
log_level_logfile = "debug"
args = ["--log-file-level", log_level_logfile] + args
testing_config.update({loglevel_config_setting_name: log_level})
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
log_level_logfile_option = getattr(
instance.options, logfile_loglevel_config_setting_name
)
# Check console logger
assert log_impl.log_level_console == log_level
# Check extended logger
assert log_impl.config[loglevel_config_setting_name] == log_level
assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile
# Check temp logger
assert log_impl.temp_log_level == "error"
# Check log file level setting
assert log_level_logfile_option == log_level_logfile
# Check log file logger
assert log_impl.log_level_logfile == log_level_logfile
def test_log_created(
testing_config, args, parser, config_func, logfile_config_setting_name, log_file
):
"""
Tests that log file is created
"""
testing_config.update({"log_file": str(log_file)})
log_file_name = str(log_file)
if log_file_name.split(os.sep)[-1] != "log_file":
testing_config.update({log_file_name: str(log_file)})
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
assert os.path.exists(str(log_file_name))
def test_callbacks_uniqueness(parser):
"""
Test that the callbacks are only added once, no matter
how many instances of the parser we create
"""
mixin_container_names = (
"_mixin_setup_funcs",
"_mixin_process_funcs",
"_mixin_after_parsed_funcs",
"_mixin_before_exit_funcs",
)
instance = parser()
nums_1 = {}
for cb_container in mixin_container_names:
obj = getattr(instance, cb_container)
nums_1[cb_container] = len(obj)
# The next time we instantiate the parser, the counts should be equal
instance = parser()
nums_2 = {}
for cb_container in mixin_container_names:
obj = getattr(instance, cb_container)
nums_2[cb_container] = len(obj)
assert nums_1 == nums_2
def test_verify_log_warning_logged(args, config_func, testing_config, parser, caplog):
args = ["--log-level", "debug"] + args
with caplog.at_level(logging.DEBUG):
instance = parser()
with patch(config_func, MagicMock(return_value=testing_config)):
instance.parse_args(args)
assert (
"Insecure logging configuration detected! Sensitive data may be logged."
in caplog.messages
)

View file

@ -0,0 +1,216 @@
"""
Tests the SaltfileMixIn.
"""
import optparse
import shutil
import pytest
import salt.exceptions
import salt.utils.parsers
from tests.support.helpers import patched_environ
from tests.support.mock import patch
class MockSaltfileParser(
salt.utils.parsers.OptionParser,
salt.utils.parsers.SaltfileMixIn,
metaclass=salt.utils.parsers.OptionParserMeta,
):
def __init__(self, *args, **kwargs):
salt.utils.parsers.OptionParser.__init__(self, *args, **kwargs)
self.config = {}
def _mixin_setup(self):
self.add_option(
"-l",
"--log-level",
dest="log_level",
default="warning",
help="The log level for salt.",
)
group = self.output_options_group = optparse.OptionGroup(
self, "Output Options", "Configure your preferred output format."
)
self.add_option_group(group)
group.add_option(
"--out",
"--output",
dest="output",
help=(
"Print the output from the '{}' command using the "
"specified outputter.".format(
self.get_prog_name(),
)
),
)
group.add_option(
"--out-file",
"--output-file",
dest="output_file",
default=None,
help="Write the output to the specified file.",
)
group.add_option(
"--version-arg",
action="version",
help="Option to test no dest",
)
@pytest.fixture
def parser():
return MockSaltfileParser()
@pytest.fixture
def saltfile(tmp_path):
fp = tmp_path / "Saltfile"
fp.touch()
return fp
def test_saltfile_in_environment(parser, saltfile):
"""
Test setting the SALT_SALTFILE environment variable
"""
with patched_environ(SALT_SALTFILE=str(saltfile)):
parser.parse_args([])
assert parser.options.saltfile == str(saltfile)
def test_saltfile_option(parser, saltfile):
"""
Test setting the saltfile via the CLI
"""
parser.parse_args(["--saltfile", str(saltfile)])
assert parser.options.saltfile == str(saltfile)
def test_bad_saltfile_option(parser, saltfile, tmp_path):
"""
Test setting a bad saltfile via the CLI
"""
with pytest.raises(SystemExit):
parser.parse_args(["--saltfile", str(tmp_path / "fake_dir")])
def test_saltfile_cwd(parser, saltfile, tmp_path):
"""
Test using a saltfile in the cwd
"""
with patch("os.getcwd", return_value=str(tmp_path)) as cwd_mock:
parser.parse_args([])
assert parser.options.saltfile == str(saltfile)
cwd_mock.assert_called_once()
def test_saltfile_cwd_doesnt_exist(parser, saltfile, tmp_path):
"""
Test using a saltfile in the cwd that doesn't exist
"""
with patch("os.getcwd", return_value=str(tmp_path / "fake_dir")) as cwd_mock:
parser.parse_args([])
assert parser.options.saltfile is None
def test_saltfile_user_home(parser, saltfile, tmp_path):
"""
Test using a saltfile in ~/.salt/
"""
fake_dir = tmp_path / "fake_dir"
fake_dir.mkdir()
with patch("os.getcwd", return_value=str(fake_dir)) as cwd_mock:
with patch("os.path.expanduser", return_value=str(tmp_path)) as eu_mock:
salt_subdir = tmp_path / ".salt"
salt_subdir.mkdir()
dest = str(salt_subdir / "Saltfile")
shutil.copy(str(saltfile), dest)
parser.parse_args([])
assert parser.options.saltfile == dest
cwd_mock.assert_called_once()
eu_mock.assert_called_with("~")
def test_bad_saltfile(parser, saltfile):
"""
Test a saltfile with bad configuration
"""
contents = """
bad "yaml":
- this is: bad yaml
- bad yaml=data:
- {"bad": yaml, "data": "yaml"}
"""
saltfile.write_text(contents)
# It raises two errors, let's catch them both
with pytest.raises(SystemExit):
with pytest.raises(salt.exceptions.SaltConfigurationError):
parser.parse_args(["--saltfile", str(saltfile)])
def test_saltfile_without_prog_name(parser, saltfile):
"""
Test a saltfile with valid yaml but without the program name in it
"""
contents = "good: yaml"
saltfile.write_text(contents)
# This should just run cleanly
parser.parse_args(["--saltfile", str(saltfile)])
def test_saltfile(parser, saltfile):
"""
Test a valid saltfile
"""
contents = """
__main__.py:
log_level: debug
output: json
"""
saltfile.write_text(contents)
parser.parse_args(["--saltfile", str(saltfile)])
print(parser.option_list)
assert parser.options.log_level == "debug"
assert parser.options.output == "json"
def test_saltfile_unusual_option(parser, saltfile):
"""
Test a valid saltfile
"""
contents = """
__main__.py:
go: birds
"""
saltfile.write_text(contents)
parser.parse_args(["--saltfile", str(saltfile)])
assert parser.options.go == "birds"
def test_saltfile_cli_override(parser, saltfile):
"""
Test a valid saltfile
"""
contents = """
__main__.py:
log_level: debug
output: json
output_file: /fake/file
"""
saltfile.write_text(contents)
parser.parse_args(
[
"--saltfile",
str(saltfile),
"--log-level",
"info",
"--out-file",
"/still/fake/file",
]
)
assert parser.options.log_level == "info"
assert parser.options.output == "json"
assert parser.options.output_file == "/still/fake/file"

View file

@ -13,6 +13,13 @@ import tempfile
import pytest
try:
from smbprotocol.exceptions import CannotDelete
HAS_PSEXEC = True
except ImportError:
HAS_PSEXEC = False
import salt.utils.cloud as cloud
from salt.exceptions import SaltCloudException
from salt.utils.cloud import __ssh_gateway_arguments as ssh_gateway_arguments
@ -208,7 +215,8 @@ def test_deploy_windows_custom_port():
mock.assert_called_once_with("test", "Administrator", None, 1234)
def test_run_psexec_command_cleanup_lingering_paexec():
@pytest.mark.skipif(not HAS_PSEXEC, reason="Missing SMB Protocol Library")
def test_run_psexec_command_cleanup_lingering_paexec(caplog):
pytest.importorskip("pypsexec.client", reason="Requires PyPsExec")
mock_psexec = patch("salt.utils.cloud.PsExecClient", autospec=True)
mock_scmr = patch("salt.utils.cloud.ScmrService", autospec=True)
@ -232,11 +240,32 @@ def test_run_psexec_command_cleanup_lingering_paexec():
)
mock_client.return_value.cleanup.assert_called_once()
# Testing handling an error when it can't delete the PAexec binary
with mock_scmr, mock_rm_svc, mock_psexec as mock_client:
mock_client.return_value.session = MagicMock(username="Gary")
mock_client.return_value.connection = MagicMock(server_name="Krabbs")
mock_client.return_value.run_executable.return_value = (
"Sandy",
"MermaidMan",
"BarnicleBoy",
)
mock_client.return_value.cleanup = MagicMock(side_effect=CannotDelete())
cloud.run_psexec_command(
"spongebob",
"squarepants",
"patrick",
"squidward",
"plankton",
)
assert "Exception cleaning up PAexec:" in caplog.text
mock_client.return_value.disconnect.assert_called_once()
@pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.")
def test_deploy_windows_programdata():
"""
Test deploy_windows with a custom port
Test deploy_windows to ProgramData
"""
mock_true = MagicMock(return_value=True)
mock_tuple = MagicMock(return_value=(0, 0, 0))

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
import logging
import os
import signal
@ -43,10 +44,13 @@ def test_log_sanitize(test_cmd, caplog):
cmd,
log_stdout=True,
log_stderr=True,
log_stdout_level="debug",
log_stderr_level="debug",
log_sanitize=password,
stream_stdout=False,
stream_stderr=False,
)
ret = term.recv()
with caplog.at_level(logging.DEBUG):
ret = term.recv()
assert password not in caplog.text
assert "******" in caplog.text

View file

@ -34,6 +34,7 @@ import types
import attr
import pytest
import pytestskipmarkers.utils.platform
import tornado.ioloop
import tornado.web
from pytestshellutils.exceptions import ProcessFailed
@ -1644,6 +1645,10 @@ class VirtualEnv:
return pathlib.Path(self.venv_python).parent
def __enter__(self):
if pytestskipmarkers.utils.platform.is_fips_enabled():
pytest.skip(
"Test cannot currently create virtual environments on a FIPS enabled platform"
)
try:
self._create_virtualenv()
except subprocess.CalledProcessError:

View file

@ -3,6 +3,7 @@ import time
import attr
import pytest
from pytestskipmarkers.utils import platform
from saltfactories.utils import random_string
# This `pytest.importorskip` here actually works because this module
@ -102,6 +103,10 @@ def mysql_image(request):
@pytest.fixture(scope="module")
def create_mysql_combo(mysql_image):
if platform.is_fips_enabled():
if mysql_image.name in ("mysql-server", "percona") and mysql_image.tag == "8.0":
pytest.skip(f"These tests fail on {mysql_image.name}:{mysql_image.tag}")
return MySQLCombo(
mysql_name=mysql_image.name,
mysql_version=mysql_image.tag,

View file

@ -1,4 +1,5 @@
import logging
import sys
import time
import uuid
@ -11,11 +12,6 @@ from tests.conftest import CODE_DIR
log = logging.getLogger(__name__)
def _install_salt_in_container(container):
ret = container.run("python3", "-m", "pip", "install", "/salt")
log.debug("Install Salt in the container: %s", ret)
@attr.s(kw_only=True, slots=True)
class SaltVirtMinionContainerFactory(SaltMinion):
@ -72,7 +68,7 @@ class SaltVirtMinionContainerFactory(SaltMinion):
self.container_start_check(self._check_script_path_exists)
for port in (self.sshd_port, self.libvirt_tcp_port, self.libvirt_tls_port):
self.check_ports[port] = port
self.before_start(_install_salt_in_container, self, on_container=False)
self.before_start(self._install_salt_in_container, on_container=False)
def _check_script_path_exists(self, timeout_at):
while time.time() <= timeout_at:
@ -85,3 +81,37 @@ class SaltVirtMinionContainerFactory(SaltMinion):
else:
return False
return True
def _install_salt_in_container(self):
ret = self.run("bash", "-c", "echo $SALT_PY_VERSION")
assert ret.returncode == 0
if not ret.stdout:
log.warning(
"The 'SALT_PY_VERSION' environment variable is not set on the container"
)
salt_py_version = 3
ret = self.run(
"python3",
"-c",
"import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))",
)
assert ret.returncode == 0
if not ret.stdout:
requirements_py_version = "{}.{}".format(*sys.version_info)
else:
requirements_py_version = ret.stdout.strip()
else:
salt_py_version = requirements_py_version = ret.stdout.strip()
self.python_executable = f"python{salt_py_version}"
ret = self.run(
self.python_executable,
"-m",
"pip",
"install",
f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt",
"/salt",
)
log.debug("Install Salt in the container: %s", ret)
assert ret.returncode == 0

View file

@ -28,6 +28,10 @@ except ImportError:
# https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12
REQUIRED_BOTO3_VERSION = "1.2.1"
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
def __virtual__():
"""

View file

@ -25,6 +25,10 @@ except ImportError:
# https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12
REQUIRED_BOTO3_VERSION = "1.2.1"
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
def __virtual__():
"""

View file

@ -23,6 +23,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module

View file

@ -22,6 +22,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module,unused-import
# the boto_cloudtrail module relies on the connect_to_region() method

View file

@ -22,6 +22,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module,unused-import
log = logging.getLogger(__name__)

View file

@ -21,6 +21,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module

View file

@ -21,6 +21,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module

View file

@ -23,6 +23,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module,unused-import
# the boto_iot module relies on the connect_to_region() method

View file

@ -26,6 +26,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module
# the boto_lambda module relies on the connect_to_region() method

View file

@ -22,6 +22,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module,unused-import
# the boto_s3_bucket module relies on the connect_to_region() method

View file

@ -2,32 +2,27 @@
virt execution module unit tests
"""
# pylint: disable=3rd-party-module-not-gated
import datetime
import os
import shutil
import tempfile
import xml.etree.ElementTree as ET
import pytest
import salt.config
import salt.modules.config as config
import salt.modules.virt as virt
import salt.syspaths
import salt.utils.yaml
from salt.exceptions import CommandExecutionError, SaltInvocationError
# pylint: disable=import-error
from tests.support.helpers import dedent
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.unit import TestCase
# pylint: disable=invalid-name,protected-access,attribute-defined-outside-init,too-many-public-methods,unused-argument
class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors
class LibvirtMock(MagicMock):
"""
Libvirt library mock
"""
@ -1882,6 +1877,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
],
)
@pytest.mark.skip_on_fips_enabled_platform
def test_init(self):
"""
Test init() function

View file

@ -20,12 +20,13 @@ from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skip_on_windows(
reason=(
"Special steps are required for proper SSL validation because "
"`easy_install` is too old(and deprecated)."
)
)
),
]
KNOWN_VIRTUALENV_BINARY_NAMES = (

View file

@ -28,6 +28,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module

View file

@ -25,6 +25,10 @@ try:
except ImportError:
HAS_BOTO = False
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
# pylint: enable=import-error,no-name-in-module

View file

@ -11,12 +11,13 @@ from tests.support.runtests import RUNTIME_VARS
from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
pytest.mark.skip_on_windows(
reason=(
"Special steps are required for proper SSL validation because "
"`easy_install` is too old(and deprecated)."
)
)
),
]

View file

@ -24,6 +24,10 @@ except ImportError:
REQUIRED_BOTO3_VERSION = "1.2.1"
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
@pytest.mark.skipif(HAS_BOTO3 is False, reason="The boto module must be installed.")
@pytest.mark.skipif(

View file

@ -53,6 +53,11 @@ except ImportError:
return stub_function
pytestmark = [
pytest.mark.skip_on_fips_enabled_platform,
]
required_boto_version = "2.0.0"
required_boto3_version = "1.2.1"
region = "us-east-1"

View file

@ -332,6 +332,7 @@ class TestPrintOption(TestCase):
option = salt.utils.find.PrintOption("print", "path user")
self.assertEqual(option.requires(), salt.utils.find._REQUIRES_STAT)
@pytest.mark.skip_on_fips_enabled_platform
def test_print_option_execute(self):
hello_file = os.path.join(self.tmpdir, "hello.txt")
with salt.utils.files.fopen(hello_file, "w") as fp_:

View file

@ -1,3 +1,5 @@
import pytest
import salt.utils.hashutils
from tests.support.unit import TestCase
@ -87,6 +89,7 @@ class HashutilsTestCase(TestCase):
self.bytes,
)
@pytest.mark.skip_on_fips_enabled_platform
def test_md5_digest(self):
"""
Ensure that this function converts the value passed to bytes before