mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge 3006.x into 3007.x
This commit is contained in:
commit
7d8338b85f
72 changed files with 14521 additions and 178 deletions
|
@ -3,26 +3,15 @@ Module for OpenSCAP Management
|
|||
|
||||
"""
|
||||
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
import salt.utils.versions
|
||||
|
||||
ArgumentParser = object
|
||||
|
||||
try:
|
||||
import argparse # pylint: disable=minimum-python-version
|
||||
|
||||
ArgumentParser = argparse.ArgumentParser
|
||||
HAS_ARGPARSE = True
|
||||
except ImportError: # python 2.6
|
||||
HAS_ARGPARSE = False
|
||||
|
||||
|
||||
_XCCDF_MAP = {
|
||||
"eval": {
|
||||
"parser_arguments": [(("--profile",), {"required": True})],
|
||||
|
@ -35,11 +24,7 @@ _XCCDF_MAP = {
|
|||
}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return HAS_ARGPARSE, "argparse module is required."
|
||||
|
||||
|
||||
class _ArgumentParser(ArgumentParser):
|
||||
class _ArgumentParser(argparse.ArgumentParser):
|
||||
def __init__(self, action=None, *args, **kwargs):
|
||||
super().__init__(*args, prog="oscap", **kwargs)
|
||||
self.add_argument("action", choices=["eval"])
|
||||
|
@ -47,7 +32,7 @@ class _ArgumentParser(ArgumentParser):
|
|||
for params, kwparams in _XCCDF_MAP["eval"]["parser_arguments"]:
|
||||
self.add_argument(*params, **kwparams)
|
||||
|
||||
def error(self, message, *args, **kwargs):
|
||||
def error(self, message, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
raise Exception(message)
|
||||
|
||||
|
||||
|
@ -168,7 +153,9 @@ def xccdf_eval(
|
|||
|
||||
if success:
|
||||
tempdir = tempfile.mkdtemp()
|
||||
proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
|
||||
proc = subprocess.Popen(
|
||||
cmd_opts, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=tempdir
|
||||
)
|
||||
(stdoutdata, error) = proc.communicate()
|
||||
success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
|
||||
if proc.returncode < 0:
|
||||
|
@ -225,12 +212,18 @@ def xccdf(params):
|
|||
if success:
|
||||
cmd = _XCCDF_MAP[action]["cmd_pattern"].format(args.profile, policy)
|
||||
tempdir = tempfile.mkdtemp()
|
||||
proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir)
|
||||
proc = subprocess.Popen(
|
||||
shlex.split(cmd),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=tempdir,
|
||||
)
|
||||
(stdoutdata, error) = proc.communicate()
|
||||
success = _OSCAP_EXIT_CODES_MAP.get(proc.returncode, False)
|
||||
if proc.returncode < 0:
|
||||
error += f"\nKilled by signal {proc.returncode}\n".encode("ascii")
|
||||
returncode = proc.returncode
|
||||
success = _OSCAP_EXIT_CODES_MAP.get(returncode, False)
|
||||
if success:
|
||||
if not __salt__["cp.push_dir"](tempdir):
|
||||
success = False
|
||||
|
|
105
tests/pytests/functional/modules/test_mac_assistive.py
Normal file
105
tests/pytests/functional/modules/test_mac_assistive.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def assistive(modules):
|
||||
return modules.assistive
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def osa_script(assistive):
|
||||
osa_script_path = "/usr/bin/osascript"
|
||||
try:
|
||||
ret = assistive.install(osa_script_path, True)
|
||||
yield osa_script_path
|
||||
except CommandExecutionError as exc:
|
||||
pytest.skip(f"Unable to install {osa_script}: {exc}")
|
||||
finally:
|
||||
osa_script_ret = assistive.installed(osa_script_path)
|
||||
if osa_script_ret:
|
||||
assistive.remove(osa_script_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def install_remove_pkg_name(assistive):
|
||||
smile_bundle = "com.smileonmymac.textexpander"
|
||||
try:
|
||||
yield smile_bundle
|
||||
finally:
|
||||
smile_bundle_present = assistive.installed(smile_bundle)
|
||||
if smile_bundle_present:
|
||||
assistive.remove(smile_bundle)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_install_and_remove(assistive, install_remove_pkg_name):
|
||||
"""
|
||||
Tests installing and removing a bundled ID or command to use assistive access.
|
||||
"""
|
||||
ret = assistive.install(install_remove_pkg_name)
|
||||
assert ret
|
||||
ret = assistive.remove(install_remove_pkg_name)
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_installed(assistive, osa_script):
|
||||
"""
|
||||
Tests the True and False return of assistive.installed.
|
||||
"""
|
||||
# OSA script should have been installed in _setup_teardown_vars function
|
||||
ret = assistive.installed(osa_script)
|
||||
assert ret
|
||||
# Clean up install
|
||||
assistive.remove(osa_script)
|
||||
# Installed should now return False
|
||||
ret = assistive.installed(osa_script)
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_enable(assistive, osa_script):
|
||||
"""
|
||||
Tests setting the enabled status of a bundled ID or command.
|
||||
"""
|
||||
# OSA script should have been installed and enabled in _setup_teardown_vars function
|
||||
# Now let's disable it, which should return True.
|
||||
ret = assistive.enable(osa_script, False)
|
||||
assert ret
|
||||
# Double check the script was disabled, as intended.
|
||||
ret = assistive.enabled(osa_script)
|
||||
assert not ret
|
||||
# Now re-enable
|
||||
ret = assistive.enable(osa_script)
|
||||
assert ret
|
||||
# Double check the script was enabled, as intended.
|
||||
ret = assistive.enabled(osa_script)
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_enabled(assistive, osa_script):
|
||||
"""
|
||||
Tests if a bundled ID or command is listed in assistive access returns True.
|
||||
"""
|
||||
# OSA script should have been installed in _setup_teardown_vars function, which sets
|
||||
# enabled to True by default.
|
||||
ret = assistive.enabled(osa_script)
|
||||
assert ret
|
||||
# Disable OSA Script
|
||||
assistive.enable(osa_script, False)
|
||||
# Assert against new disabled status
|
||||
ret = assistive.enabled(osa_script)
|
||||
assert not ret
|
132
tests/pytests/functional/modules/test_mac_brew_pkg.py
Normal file
132
tests/pytests/functional/modules/test_mac_brew_pkg.py
Normal file
|
@ -0,0 +1,132 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
:codeauthor: Gareth J. Greenaway <greenaway@vmware.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
pytest.mark.skip_if_binaries_missing("brew"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def pkg(modules):
|
||||
return modules.pkg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_1_name(pkg):
|
||||
pkg_name = "algol68g"
|
||||
try:
|
||||
yield pkg_name
|
||||
finally:
|
||||
pkg_list = pkg.list_pkgs()
|
||||
|
||||
# Remove package if installed
|
||||
if pkg_name in pkg_list:
|
||||
pkg.remove(pkg_name)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_2_name(pkg):
|
||||
pkg_name = "acme"
|
||||
try:
|
||||
pkg.install(pkg_name)
|
||||
pkg_list = pkg.list_pkgs()
|
||||
if pkg_name not in pkg_list:
|
||||
pytest.skip(f"Failed to install the '{pkg_name}' package to delete")
|
||||
yield pkg_name
|
||||
finally:
|
||||
pkg_list = pkg.list_pkgs()
|
||||
|
||||
# Remove package if still installed
|
||||
if pkg_name in pkg_list:
|
||||
pkg.remove(pkg_name)
|
||||
|
||||
|
||||
def test_brew_install(pkg, pkg_1_name):
|
||||
"""
|
||||
Tests the installation of packages
|
||||
"""
|
||||
pkg.install(pkg_1_name)
|
||||
pkg_list = pkg.list_pkgs()
|
||||
assert pkg_1_name in pkg_list
|
||||
|
||||
|
||||
def test_remove(pkg, pkg_2_name):
|
||||
"""
|
||||
Tests the removal of packages
|
||||
"""
|
||||
pkg.remove(pkg_2_name)
|
||||
pkg_list = pkg.list_pkgs()
|
||||
assert pkg_2_name not in pkg_list
|
||||
|
||||
|
||||
def test_version(pkg, pkg_1_name):
|
||||
"""
|
||||
Test pkg.version for mac. Installs a package and then checks we can get
|
||||
a version for the installed package.
|
||||
"""
|
||||
pkg.install(pkg_1_name)
|
||||
pkg_list = pkg.list_pkgs()
|
||||
version = pkg.version(pkg_1_name)
|
||||
assert version
|
||||
assert pkg_1_name in pkg_list
|
||||
# make sure the version is accurate and is listed in the pkg_list
|
||||
assert version in str(pkg_list[pkg_1_name])
|
||||
|
||||
|
||||
def test_latest_version(pkg, pkg_1_name):
|
||||
"""
|
||||
Test pkg.latest_version:
|
||||
- get the latest version available
|
||||
- install the package
|
||||
- get the latest version available
|
||||
- check that the latest version is empty after installing it
|
||||
"""
|
||||
pkg.remove(pkg_1_name)
|
||||
uninstalled_latest = pkg.latest_version(pkg_1_name)
|
||||
|
||||
pkg.install(pkg_1_name)
|
||||
installed_latest = pkg.latest_version(pkg_1_name)
|
||||
version = pkg.version(pkg_1_name)
|
||||
assert isinstance(uninstalled_latest, str)
|
||||
assert installed_latest == version
|
||||
|
||||
|
||||
def test_refresh_db(pkg):
|
||||
"""
|
||||
Integration test to ensure pkg.refresh_db works with brew
|
||||
"""
|
||||
refresh_brew = pkg.refresh_db()
|
||||
assert refresh_brew
|
||||
|
||||
|
||||
def test_list_upgrades(pkg, pkg_1_name):
|
||||
"""
|
||||
Test pkg.list_upgrades: data is in the form {'name1': 'version1', 'name2': 'version2', ... }
|
||||
"""
|
||||
upgrades = pkg.list_upgrades()
|
||||
assert isinstance(upgrades, dict)
|
||||
if upgrades:
|
||||
for name in upgrades:
|
||||
assert isinstance(name, str)
|
||||
assert isinstance(upgrades[name], str)
|
||||
|
||||
|
||||
def test_info_installed(pkg, pkg_1_name):
|
||||
"""
|
||||
Test pkg.info_installed: info returned has certain fields used by
|
||||
mac_brew.latest_version
|
||||
"""
|
||||
pkg.install(pkg_1_name)
|
||||
info = pkg.info_installed(pkg_1_name)
|
||||
assert pkg_1_name in info
|
||||
assert "versions" in info[pkg_1_name]
|
||||
assert "revision" in info[pkg_1_name]
|
||||
assert "stable" in info[pkg_1_name]["versions"]
|
74
tests/pytests/functional/modules/test_mac_desktop.py
Normal file
74
tests/pytests/functional/modules/test_mac_desktop.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
"""
|
||||
Integration tests for the mac_desktop execution module.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def desktop(modules):
|
||||
return modules.desktop
|
||||
|
||||
|
||||
def test_get_output_volume(desktop):
|
||||
"""
|
||||
Tests the return of get_output_volume.
|
||||
"""
|
||||
ret = desktop.get_output_volume()
|
||||
assert ret is not None
|
||||
|
||||
|
||||
def test_set_output_volume(desktop):
|
||||
"""
|
||||
Tests the return of set_output_volume.
|
||||
"""
|
||||
current_vol = desktop.get_output_volume()
|
||||
try:
|
||||
to_set = 10
|
||||
if current_vol == str(to_set):
|
||||
to_set += 2
|
||||
new_vol = desktop.set_output_volume(str(to_set))
|
||||
check_vol = desktop.get_output_volume()
|
||||
assert new_vol == check_vol
|
||||
finally:
|
||||
# Set volume back to what it was before
|
||||
desktop.set_output_volume(current_vol)
|
||||
|
||||
|
||||
def test_screensaver(desktop):
|
||||
"""
|
||||
Tests the return of the screensaver function.
|
||||
"""
|
||||
try:
|
||||
ret = desktop.screensaver()
|
||||
except CommandExecutionError as exc:
|
||||
pytest.skip("Skipping. Screensaver unavailable.")
|
||||
assert ret
|
||||
|
||||
|
||||
def test_lock(desktop):
|
||||
"""
|
||||
Tests the return of the lock function.
|
||||
"""
|
||||
try:
|
||||
ret = desktop.lock()
|
||||
except CommandExecutionError as exc:
|
||||
pytest.skip("Skipping. Unable to lock screen.")
|
||||
assert ret
|
||||
|
||||
|
||||
def test_say(desktop):
|
||||
"""
|
||||
Tests the return of the say function.
|
||||
"""
|
||||
ret = desktop.say("hello", "world")
|
||||
assert ret
|
176
tests/pytests/functional/modules/test_mac_group.py
Normal file
176
tests/pytests/functional/modules/test_mac_group.py
Normal file
|
@ -0,0 +1,176 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def group(modules):
|
||||
return modules.group
|
||||
|
||||
|
||||
# Create group name strings for tests
|
||||
@pytest.fixture(scope="module")
|
||||
def non_existing_group_name(group):
|
||||
group_name = random_string("group-", lowercase=False)
|
||||
try:
|
||||
yield group_name
|
||||
finally:
|
||||
# Delete the added group
|
||||
group_info = group.info(group_name)
|
||||
if group_info:
|
||||
group.delete(group_name)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def existing_group_name(group):
|
||||
group_name = random_string("group-", lowercase=False)
|
||||
try:
|
||||
ret = group.add(group_name, 4567)
|
||||
if ret is not True:
|
||||
pytest.skip("Failed to create a group to delete")
|
||||
yield group_name
|
||||
finally:
|
||||
# Delete the added group
|
||||
group_info = group.info(group_name)
|
||||
if group_info:
|
||||
group.delete(group_name)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def non_existing_user(group):
|
||||
group_name = random_string("user-", lowercase=False)
|
||||
try:
|
||||
yield group_name
|
||||
finally:
|
||||
# Delete the added group
|
||||
group_info = group.info(group_name)
|
||||
if group_info:
|
||||
group.delete(group_name)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def existing_user(group, existing_group_name):
|
||||
group_name = random_string("user-", lowercase=False)
|
||||
try:
|
||||
ret = group.adduser(existing_group_name, group_name)
|
||||
if ret is not True:
|
||||
pytest.skip("Failed to create an existing group member")
|
||||
yield group_name
|
||||
finally:
|
||||
# Delete the added group
|
||||
group_info = group.info(group_name)
|
||||
if group_info:
|
||||
group.delete(group_name)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def rep_user_group():
|
||||
yield random_string("RS-", lowercase=False)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def non_existing_group_member(group):
|
||||
group_name = random_string("user-", lowercase=False)
|
||||
try:
|
||||
yield group_name
|
||||
finally:
|
||||
# Delete the added group
|
||||
group_info = group.info(group_name)
|
||||
if group_info:
|
||||
group.delete(group_name)
|
||||
|
||||
|
||||
def test_mac_group_add(group, non_existing_group_name):
|
||||
"""
|
||||
Tests the add group function
|
||||
"""
|
||||
group.add(non_existing_group_name, 3456)
|
||||
group_info = group.info(non_existing_group_name)
|
||||
assert group_info["name"] == non_existing_group_name
|
||||
|
||||
|
||||
def test_mac_group_delete(group, existing_group_name):
|
||||
"""
|
||||
Tests the delete group function
|
||||
"""
|
||||
ret = group.delete(existing_group_name)
|
||||
assert ret
|
||||
|
||||
|
||||
def test_mac_group_chgid(group, existing_group_name):
|
||||
"""
|
||||
Tests changing the group id
|
||||
"""
|
||||
gid = 6789
|
||||
group_info = group.info(existing_group_name)
|
||||
assert group_info["gid"] != gid
|
||||
group.chgid(existing_group_name, gid)
|
||||
group_info = group.info(existing_group_name)
|
||||
assert group_info["gid"] == gid
|
||||
|
||||
|
||||
def test_mac_adduser(group, non_existing_group_name, non_existing_user):
|
||||
"""
|
||||
Tests adding user to the group
|
||||
"""
|
||||
# Create a group to use for test - If unsuccessful, skip the test
|
||||
ret = group.add(non_existing_group_name, 5678)
|
||||
if ret is not True:
|
||||
group.delete(non_existing_group_name)
|
||||
pytest.skip("Failed to create a group to manipulate")
|
||||
|
||||
group.adduser(non_existing_group_name, non_existing_user)
|
||||
group_info = group.info(non_existing_group_name)
|
||||
assert non_existing_user in group_info["members"]
|
||||
assert group_info["members"] == [non_existing_user]
|
||||
|
||||
|
||||
def test_mac_deluser(group, existing_group_name, existing_user):
|
||||
"""
|
||||
Test deleting user from a group
|
||||
"""
|
||||
delusr = group.deluser(existing_group_name, existing_user)
|
||||
assert delusr
|
||||
|
||||
group_info = group.info(existing_group_name)
|
||||
assert existing_user not in group_info["members"]
|
||||
|
||||
|
||||
def test_mac_members(
|
||||
group, existing_group_name, existing_user, non_existing_group_member
|
||||
):
|
||||
"""
|
||||
Test replacing members of a group
|
||||
"""
|
||||
group_info = group.info(existing_group_name)
|
||||
assert non_existing_group_member not in group_info["members"]
|
||||
assert non_existing_user in group_info["members"]
|
||||
|
||||
# Replace group members
|
||||
rep_group_mem = group.members(existing_group_name, non_existing_group_member)
|
||||
assert rep_group_mem
|
||||
|
||||
# ensure new user is added to group and previous user is removed
|
||||
group_info = group.info(existing_group_name)
|
||||
assert non_existing_group_member in group_info["members"]
|
||||
assert non_existing_user not in group_info["members"]
|
||||
|
||||
|
||||
def test_mac_getent(group, existing_user, existing_group_name):
|
||||
"""
|
||||
Test returning info on all groups
|
||||
"""
|
||||
getinfo = group.getent()
|
||||
assert getinfo
|
||||
assert existing_group_name in str(getinfo)
|
||||
assert existing_user in str(getinfo)
|
129
tests/pytests/functional/modules/test_mac_keychain.py
Normal file
129
tests/pytests/functional/modules/test_mac_keychain.py
Normal file
|
@ -0,0 +1,129 @@
|
|||
"""
|
||||
Validate the mac-keychain module
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.versions
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def cmd(modules):
|
||||
return modules.cmd
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def keychain(modules):
|
||||
return modules.keychain
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def setup_teardown_vars(keychain, base_env_state_tree_root_dir):
|
||||
cert = os.path.join(RUNTIME_VARS.FILES, "file", "base", "certs", "salttest.p12")
|
||||
cert_alias = "Salt Test"
|
||||
passwd = "salttest"
|
||||
|
||||
try:
|
||||
yield cert, cert_alias, passwd
|
||||
finally:
|
||||
certs_list = keychain.list_certs()
|
||||
if cert_alias in certs_list:
|
||||
keychain.uninstall(cert_alias)
|
||||
|
||||
|
||||
def test_mac_keychain_install(keychain, setup_teardown_vars):
|
||||
"""
|
||||
Tests that attempts to install a certificate
|
||||
"""
|
||||
|
||||
cert = setup_teardown_vars[0]
|
||||
cert_alias = setup_teardown_vars[1]
|
||||
passwd = setup_teardown_vars[2]
|
||||
|
||||
install_cert = keychain.install(cert, passwd)
|
||||
assert install_cert
|
||||
assert install_cert == "1 identity imported."
|
||||
|
||||
# check to ensure the cert was installed
|
||||
certs_list = keychain.list_certs()
|
||||
assert cert_alias in certs_list
|
||||
|
||||
|
||||
def test_mac_keychain_uninstall(keychain, setup_teardown_vars):
|
||||
"""
|
||||
Tests that attempts to uninstall a certificate
|
||||
"""
|
||||
|
||||
cert = setup_teardown_vars[0]
|
||||
cert_alias = setup_teardown_vars[1]
|
||||
passwd = setup_teardown_vars[2]
|
||||
|
||||
keychain.install(cert, passwd)
|
||||
certs_list = keychain.list_certs()
|
||||
|
||||
if cert_alias not in certs_list:
|
||||
keychain.uninstall(cert_alias)
|
||||
pytest.skip("Failed to install keychain")
|
||||
|
||||
# uninstall cert
|
||||
keychain.uninstall(cert_alias)
|
||||
certs_list = keychain.list_certs()
|
||||
|
||||
# check to ensure the cert was uninstalled
|
||||
assert cert_alias not in str(certs_list)
|
||||
|
||||
|
||||
@pytest.mark.skip_if_binaries_missing("openssl")
|
||||
def test_mac_keychain_get_friendly_name(keychain, shell, setup_teardown_vars):
|
||||
"""
|
||||
Test that attempts to get friendly name of a cert
|
||||
"""
|
||||
cert = setup_teardown_vars[0]
|
||||
cert_alias = setup_teardown_vars[1]
|
||||
passwd = setup_teardown_vars[2]
|
||||
|
||||
keychain.install(cert, passwd)
|
||||
certs_list = keychain.list_certs()
|
||||
if cert_alias not in certs_list:
|
||||
keychain.uninstall(cert_alias)
|
||||
pytest.skip("Failed to install keychain")
|
||||
|
||||
ret = shell.run("openssl", "version")
|
||||
assert ret.stdout
|
||||
openssl_version = ret.stdout.split()[1]
|
||||
|
||||
# openssl versions under 3.0.0 do not include legacy flag
|
||||
if salt.utils.versions.compare(ver1=openssl_version, oper="<", ver2="3.0.0"):
|
||||
get_name = keychain.get_friendly_name(cert, passwd, legacy=False)
|
||||
else:
|
||||
get_name = keychain.get_friendly_name(cert, passwd, legacy=True)
|
||||
|
||||
assert get_name == cert_alias
|
||||
|
||||
|
||||
def test_mac_keychain_get_default_keychain(keychain, cmd, setup_teardown_vars):
|
||||
"""
|
||||
Test that attempts to get the default keychain
|
||||
"""
|
||||
sys_get_keychain = keychain.get_default_keychain()
|
||||
salt_get_keychain = cmd.run("security default-keychain -d user")
|
||||
assert salt_get_keychain == sys_get_keychain
|
||||
|
||||
|
||||
def test_mac_keychain_list_certs(keychain, setup_teardown_vars):
|
||||
"""
|
||||
Test that attempts to list certs
|
||||
"""
|
||||
cert_default = "com.apple.systemdefault"
|
||||
certs = keychain.list_certs()
|
||||
assert cert_default in certs
|
118
tests/pytests/functional/modules/test_mac_portspkg.py
Normal file
118
tests/pytests/functional/modules/test_mac_portspkg.py
Normal file
|
@ -0,0 +1,118 @@
|
|||
"""
|
||||
integration tests for mac_ports
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
pytest.mark.skip_if_binaries_missing("port"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def pkg(modules):
|
||||
return modules.pkg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def uninstalled_pkg_name(pkg):
|
||||
pkgname = installed_pkg_name
|
||||
try:
|
||||
pkg.refresh_db()
|
||||
yield pkgname
|
||||
finally:
|
||||
if pkgname in pkg.list_pkgs():
|
||||
pkg.remove(pkgname)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def installed_pkg_name(uninstalled_pkg_name):
|
||||
pkg.install(uninstalled_pkg_name)
|
||||
return uninstalled_pkg_name
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def _setup_teardown_vars(pkg):
|
||||
AGREE_INSTALLED = False
|
||||
try:
|
||||
ret = pkg.list_pkgs()
|
||||
AGREE_INSTALLED = installed_pkg_name in ret
|
||||
pkg.refresh_db()
|
||||
yield
|
||||
finally:
|
||||
if AGREE_INSTALLED:
|
||||
pkg.remove(installed_pkg_name)
|
||||
|
||||
|
||||
def test_list_pkgs(pkg, installed_pkg_name):
|
||||
"""
|
||||
Test pkg.list_pkgs
|
||||
"""
|
||||
pkg_list_ret = pkg.list_pkgs()
|
||||
assert isinstance(pkg_list_ret, dict)
|
||||
assert installed_pkg_name in pkg_list_ret
|
||||
|
||||
|
||||
def test_latest_version(pkg, installed_pkg_name):
|
||||
"""
|
||||
Test pkg.latest_version
|
||||
"""
|
||||
result = pkg.latest_version(installed_pkg_name, refresh=False)
|
||||
assert isinstance(result, dict)
|
||||
assert installed_pkg_name in result.data
|
||||
|
||||
|
||||
def test_remove(pkg, installed_pkg_name):
|
||||
"""
|
||||
Test pkg.remove
|
||||
"""
|
||||
ret = pkg.remove(installed_pkg_name)
|
||||
assert isinstance(ret, dict)
|
||||
assert installed_pkg_name in ret
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test_install(pkg, uninstalled_pkg_name):
|
||||
"""
|
||||
Test pkg.install
|
||||
"""
|
||||
ret = pkg.install(uninstalled_pkg_name)
|
||||
assert isinstance(ret, dict)
|
||||
assert uninstalled_pkg_name in ret
|
||||
|
||||
|
||||
def test_list_upgrades_type(pkg):
|
||||
"""
|
||||
Test pkg.list_upgrades return type
|
||||
"""
|
||||
ret = pkg.list_upgrades(refresh=False)
|
||||
assert isinstance(ret, dict)
|
||||
|
||||
|
||||
def test_upgrade_available(pkg, installed_pkg_name):
|
||||
"""
|
||||
Test pkg.upgrade_available
|
||||
"""
|
||||
ret = pkg.upgrade_available(installed_pkg_name, refresh=False)
|
||||
assert not ret.data
|
||||
|
||||
|
||||
def test_refresh_db(pkg):
|
||||
"""
|
||||
Test pkg.refresh_db
|
||||
"""
|
||||
ret = pkg.refresh_db()
|
||||
assert ret
|
||||
|
||||
|
||||
def test_upgrade(pkg):
|
||||
"""
|
||||
Test pkg.upgrade
|
||||
"""
|
||||
ret = pkg.upgrade(refresh=False)
|
||||
assert isinstance(ret, dict)
|
||||
assert ret.data["result"]
|
339
tests/pytests/functional/modules/test_mac_power.py
Normal file
339
tests/pytests/functional/modules/test_mac_power.py
Normal file
|
@ -0,0 +1,339 @@
|
|||
"""
|
||||
integration tests for mac_power
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_if_binaries_missing("systemsetup"),
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def power(modules):
|
||||
return modules.power
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_computer_sleep(power):
|
||||
ret = power.get_computer_sleep()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
power.set_computer_sleep(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_display_sleep(power):
|
||||
ret = power.get_display_sleep()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
power.set_display_sleep(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_harddisk_sleep(power):
|
||||
ret = power.get_harddisk_sleep()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
power.set_harddisk_sleep(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_restart_power_failure(power):
|
||||
try:
|
||||
ret = power.get_restart_power_failure()
|
||||
if not isinstance(ret, bool):
|
||||
assert "Error" in ret
|
||||
pytest.skip(f"Error while calling `get_restart_power_failure()`: {ret}")
|
||||
except CommandExecutionError as exc:
|
||||
if "Not supported on this machine" in str(exc):
|
||||
pytest.skip("Restart After Power Failure: Not supported on this machine.")
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if isinstance(ret, bool):
|
||||
if ret:
|
||||
ret = power.set_restart_power_failure("On")
|
||||
assert ret
|
||||
else:
|
||||
ret = power.set_restart_power_failure("Off")
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_sleep_on_power_button(power):
|
||||
try:
|
||||
ret = power.get_sleep_on_power_button()
|
||||
if not isinstance(ret, bool):
|
||||
functionality_available = False
|
||||
else:
|
||||
functionality_available = True
|
||||
except CommandExecutionError as exc:
|
||||
functionality_available = False
|
||||
|
||||
if functionality_available is False:
|
||||
pytest.skip("Skipping. sleep_on_power_button unavailable.")
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
power.set_sleep_on_power_button(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_wake_on_modem(power):
|
||||
try:
|
||||
ret = power.get_wake_on_modem()
|
||||
if not isinstance(ret, bool):
|
||||
functionality_available = False
|
||||
else:
|
||||
functionality_available = True
|
||||
except CommandExecutionError as exc:
|
||||
functionality_available = False
|
||||
|
||||
if functionality_available is False:
|
||||
pytest.skip("Skipping. wake_on_modem unavailable.")
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
power.set_wake_on_modem(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_wake_on_network(power):
|
||||
try:
|
||||
ret = power.get_wake_on_network()
|
||||
if not isinstance(ret, bool):
|
||||
assert "Error" in ret
|
||||
pytest.skip(f"Error while calling `get_wake_on_network()`: {ret}")
|
||||
except CommandExecutionError as exc:
|
||||
if "Not supported on this machine" in str(exc):
|
||||
pytest.skip("Wake On Network Access: Not supported on this machine")
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if isinstance(ret, bool):
|
||||
ret = power.set_wake_on_network(ret)
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_computer_sleep")
|
||||
def test_computer_sleep(power):
|
||||
"""
|
||||
Test power.get_computer_sleep
|
||||
Test power.set_computer_sleep
|
||||
"""
|
||||
|
||||
# Normal Functionality
|
||||
ret = power.set_computer_sleep(90)
|
||||
assert ret
|
||||
|
||||
ret = power.get_computer_sleep()
|
||||
assert ret == "after 90 minutes"
|
||||
|
||||
ret = power.set_computer_sleep("Off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_computer_sleep()
|
||||
assert ret == "Never"
|
||||
|
||||
# Test invalid input
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_computer_sleep("spongebob")
|
||||
assert "Invalid String Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_computer_sleep(0)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_computer_sleep(181)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_computer_sleep(True)
|
||||
assert "Invalid Boolean Value for Minutes" in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_display_sleep")
|
||||
def test_display_sleep(power):
|
||||
"""
|
||||
Test power.get_display_sleep
|
||||
Test power.set_display_sleep
|
||||
"""
|
||||
|
||||
# Normal Functionality
|
||||
ret = power.set_display_sleep(90)
|
||||
assert ret
|
||||
|
||||
ret = power.get_display_sleep()
|
||||
assert ret == "after 90 minutes"
|
||||
|
||||
ret = power.set_display_sleep("Off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_display_sleep()
|
||||
assert ret == "Never"
|
||||
|
||||
# Test invalid input
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_display_sleep("spongebob")
|
||||
assert "Invalid String Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_display_sleep(0)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_display_sleep(181)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_display_sleep(True)
|
||||
assert "Invalid Boolean Value for Minutes" in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_harddisk_sleep")
|
||||
def test_harddisk_sleep(power):
|
||||
"""
|
||||
Test power.get_harddisk_sleep
|
||||
Test power.set_harddisk_sleep
|
||||
"""
|
||||
|
||||
# Normal Functionality
|
||||
ret = power.set_harddisk_sleep(90)
|
||||
assert ret
|
||||
|
||||
ret = power.get_harddisk_sleep()
|
||||
assert ret == "after 90 minutes"
|
||||
|
||||
ret = power.set_harddisk_sleep("Off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_harddisk_sleep()
|
||||
assert ret == "Never"
|
||||
|
||||
# Test invalid input
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_harddisk_sleep("spongebob")
|
||||
assert "Invalid String Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_harddisk_sleep(0)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_harddisk_sleep(181)
|
||||
assert "Invalid Integer Value for Minutes" in str(exc.value)
|
||||
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
power.set_harddisk_sleep(True)
|
||||
assert "Invalid Boolean Value for Minutes" in str(exc.value)
|
||||
|
||||
|
||||
def test_restart_freeze(power):
|
||||
"""
|
||||
Test power.get_restart_freeze
|
||||
Test power.set_restart_freeze
|
||||
"""
|
||||
# Normal Functionality
|
||||
ret = power.set_restart_freeze("on")
|
||||
assert ret
|
||||
|
||||
ret = power.get_restart_freeze()
|
||||
assert ret
|
||||
|
||||
# This will return False because mac fails to actually make the change
|
||||
ret = power.set_restart_freeze("off")
|
||||
assert not ret
|
||||
|
||||
# Even setting to off returns true, it actually is never set
|
||||
# This is an apple bug
|
||||
ret = power.get_restart_freeze()
|
||||
assert ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_restart_power_failure")
|
||||
def test_restart_power_failure(power):
|
||||
"""
|
||||
Test power.get_restart_power_failure
|
||||
Test power.set_restart_power_failure
|
||||
"""
|
||||
ret = power.set_restart_power_failure("On")
|
||||
assert ret
|
||||
|
||||
ret = power.get_restart_power_failure()
|
||||
assert ret
|
||||
|
||||
ret = power.set_restart_power_failure("Off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_restart_power_failure()
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_sleep_on_power_button")
|
||||
def test_sleep_on_power_button(power):
|
||||
"""
|
||||
Test power.get_sleep_on_power_button
|
||||
Test power.set_sleep_on_power_button
|
||||
"""
|
||||
ret = power.set_sleep_on_power_button("on")
|
||||
assert ret
|
||||
|
||||
ret = power.get_sleep_on_power_button()
|
||||
assert ret
|
||||
|
||||
ret = power.set_sleep_on_power_button("off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_sleep_on_power_button()
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_wake_on_modem")
|
||||
def test_wake_on_modem(power):
|
||||
"""
|
||||
Test power.get_wake_on_modem
|
||||
Test power.set_wake_on_modem
|
||||
"""
|
||||
ret = power.set_wake_on_modem("on")
|
||||
assert ret
|
||||
|
||||
ret = power.get_wake_on_modem()
|
||||
assert ret
|
||||
|
||||
ret = power.set_wake_on_modem("off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_wake_on_modem()
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_wake_on_network")
|
||||
def test_wake_on_network(power):
|
||||
"""
|
||||
Test power.get_wake_on_network
|
||||
Test power.set_wake_on_network
|
||||
"""
|
||||
ret = power.set_wake_on_network("on")
|
||||
assert ret
|
||||
|
||||
ret = power.get_wake_on_network()
|
||||
assert ret
|
||||
|
||||
ret = power.set_wake_on_network("off")
|
||||
assert ret
|
||||
|
||||
ret = power.get_wake_on_network()
|
||||
assert not ret
|
252
tests/pytests/functional/modules/test_mac_service.py
Normal file
252
tests/pytests/functional/modules/test_mac_service.py
Normal file
|
@ -0,0 +1,252 @@
|
|||
"""
|
||||
integration tests for mac_service
|
||||
"""
|
||||
|
||||
import plistlib
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_if_binaries_missing("launchctl", "plutil"),
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def service(modules):
|
||||
return modules.service
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def service_name(service):
|
||||
|
||||
service_name = "com.salt.integration.test"
|
||||
service_path = "/Library/LaunchDaemons/com.salt.integration.test.plist"
|
||||
|
||||
service_data = {
|
||||
"KeepAlive": True,
|
||||
"Label": service_name,
|
||||
"ProgramArguments": ["/bin/sleep", "1000"],
|
||||
"RunAtLoad": True,
|
||||
}
|
||||
with salt.utils.files.fopen(service_path, "wb") as fp:
|
||||
plistlib.dump(service_data, fp)
|
||||
service.enable(service_name)
|
||||
service.start(service_name)
|
||||
|
||||
try:
|
||||
yield service_name
|
||||
finally:
|
||||
# Try to stop the service if it's running
|
||||
try:
|
||||
service.stop(service_name)
|
||||
except CommandExecutionError:
|
||||
pass
|
||||
salt.utils.files.safe_rm(service_path)
|
||||
|
||||
|
||||
def test_show(service, service_name):
|
||||
"""
|
||||
Test service.show
|
||||
"""
|
||||
# Existing Service
|
||||
service_info = service.show(service_name)
|
||||
assert isinstance(service_info, dict)
|
||||
assert service_info["plist"]["Label"] == service_name
|
||||
|
||||
# Missing Service
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.show("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
|
||||
def test_launchctl(service, service_name):
|
||||
"""
|
||||
Test service.launchctl
|
||||
"""
|
||||
# Expected Functionality
|
||||
ret = service.launchctl("error", "bootstrap", 64)
|
||||
assert ret
|
||||
|
||||
ret = service.launchctl("error", "bootstrap", 64, return_stdout=True)
|
||||
assert ret == "64: unknown error code"
|
||||
|
||||
# Raise an error
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.launchctl("error", "bootstrap")
|
||||
assert "Failed to error service" in str(exc.value)
|
||||
|
||||
|
||||
def test_list(service, service_name):
|
||||
"""
|
||||
Test service.list
|
||||
"""
|
||||
# Expected Functionality
|
||||
ret = service.list()
|
||||
assert "PID" in ret
|
||||
ret = service.list(service_name)
|
||||
assert "{" in ret
|
||||
|
||||
# Service not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.list("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
|
||||
def test_enable(service, service_name):
|
||||
"""
|
||||
Test service.enable
|
||||
"""
|
||||
ret = service.enable(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.enable("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
|
||||
def test_disable(service, service_name):
|
||||
"""
|
||||
Test service.disable
|
||||
"""
|
||||
ret = service.disable(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.disable("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
|
||||
def test_start(service, service_name):
|
||||
"""
|
||||
Test service.start
|
||||
Test service.stop
|
||||
Test service.status
|
||||
"""
|
||||
service.stop(service_name)
|
||||
ret = service.start(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.start("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
|
||||
def test_stop(service, service_name):
|
||||
"""
|
||||
Test service.stop
|
||||
"""
|
||||
ret = service.stop(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.stop("spongebob")
|
||||
assert "Service not found" in str(exc.value)
|
||||
|
||||
service.start(service_name)
|
||||
|
||||
|
||||
def test_status(service, service_name):
|
||||
"""
|
||||
Test service.status
|
||||
"""
|
||||
# A running service
|
||||
ret = service.status(service_name)
|
||||
assert ret
|
||||
|
||||
# A stopped service
|
||||
service.stop(service_name)
|
||||
ret = service.status(service_name)
|
||||
assert not ret
|
||||
|
||||
# Service not found
|
||||
ret = service.status("spongebob")
|
||||
assert not ret
|
||||
|
||||
service.start(service_name)
|
||||
|
||||
|
||||
def test_available(service, service_name):
|
||||
"""
|
||||
Test service.available
|
||||
"""
|
||||
ret = service.available(service_name)
|
||||
assert ret
|
||||
|
||||
ret = service.available("spongebob")
|
||||
assert not ret
|
||||
|
||||
|
||||
def test_missing(service, service_name):
|
||||
"""
|
||||
Test service.missing
|
||||
"""
|
||||
ret = service.missing(service_name)
|
||||
assert not ret
|
||||
|
||||
ret = service.missing("spongebob")
|
||||
assert ret
|
||||
|
||||
|
||||
def test_enabled(service, service_name):
|
||||
"""
|
||||
Test service.enabled
|
||||
"""
|
||||
service.disabled(service_name)
|
||||
ret = service.enabled(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.enabled("spongebob")
|
||||
assert "Service not found: spongebob" in str(exc.value)
|
||||
|
||||
|
||||
def test_disabled(service, service_name):
|
||||
"""
|
||||
Test service.disabled
|
||||
"""
|
||||
ret = service.disabled(service_name)
|
||||
assert not ret
|
||||
|
||||
ret = service.disable(service_name)
|
||||
assert ret
|
||||
|
||||
ret = service.disabled(service_name)
|
||||
assert ret
|
||||
|
||||
ret = service.enable(service_name)
|
||||
assert ret
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = service.disable("spongebob")
|
||||
assert "Service not found: spongebob" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_all(service, service_name):
|
||||
"""
|
||||
Test service.get_all
|
||||
"""
|
||||
services = service.get_all()
|
||||
assert isinstance(services, list)
|
||||
assert service_name in services
|
||||
|
||||
|
||||
def test_get_enabled(service, service_name):
|
||||
"""
|
||||
Test service.get_enabled
|
||||
"""
|
||||
services = service.get_enabled()
|
||||
assert isinstance(services, list)
|
||||
assert service_name in services
|
||||
|
||||
|
||||
def test_service_laoded(service, service_name):
|
||||
"""
|
||||
Test service.get_enabled
|
||||
"""
|
||||
ret = service.loaded(service_name)
|
||||
assert ret
|
174
tests/pytests/functional/modules/test_mac_shadow.py
Normal file
174
tests/pytests/functional/modules/test_mac_shadow.py
Normal file
|
@ -0,0 +1,174 @@
|
|||
"""
|
||||
integration tests for mac_shadow
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import types
|
||||
|
||||
import pytest
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_if_binaries_missing("dscl", "pwpolicy"),
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def shadow(modules):
|
||||
return modules.shadow
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def accounts():
|
||||
with pytest.helpers.create_account(create_group=True) as _account:
|
||||
yield types.SimpleNamespace(
|
||||
existing=_account.username,
|
||||
non_existing=random_string("account-", lowercase=False),
|
||||
)
|
||||
|
||||
|
||||
def test_info(shadow, accounts):
|
||||
"""
|
||||
Test shadow.info
|
||||
"""
|
||||
# Correct Functionality
|
||||
ret = shadow.info(accounts.existing)
|
||||
assert ret["name"] == accounts.existing
|
||||
|
||||
# User does not exist
|
||||
ret = shadow.info(accounts.non_existing)
|
||||
assert ret["name"] == ""
|
||||
|
||||
|
||||
def test_get_last_change(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_last_change
|
||||
"""
|
||||
# Correct Functionality
|
||||
text_date = shadow.get_last_change(accounts.existing)
|
||||
assert text_date != "Invalid Timestamp"
|
||||
obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S")
|
||||
assert isinstance(obj_date, datetime.date)
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_last_change(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_login_failed_last(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_login_failed_last
|
||||
"""
|
||||
# Correct Functionality
|
||||
text_date = shadow.get_login_failed_last(accounts.existing)
|
||||
assert text_date != "Invalid Timestamp"
|
||||
obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S")
|
||||
assert isinstance(obj_date, datetime.date)
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_login_failed_last(accounts)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_login_failed_count(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_login_failed_count
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.get_login_failed_count(accounts.existing) == "0"
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_login_failed_count(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_set_maxdays(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_maxdays
|
||||
Test shadow.set_maxdays
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.set_maxdays(accounts.existing, 20)
|
||||
assert shadow.get_maxdays(accounts.existing) == 20
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.set_maxdays(accounts.non_existing, 7)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_maxdays(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_set_change(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_change
|
||||
Test shadow.set_change
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.set_change(accounts.existing, "02/11/2011")
|
||||
assert shadow.get_change(accounts.existing) == "02/11/2011"
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.set_change(accounts.non_existing, "02/11/2012")
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_change(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_get_set_expire(shadow, accounts):
|
||||
"""
|
||||
Test shadow.get_expire
|
||||
Test shadow.set_expire
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.set_expire(accounts.existing, "02/11/2011")
|
||||
assert shadow.get_expire(accounts.existing) == "02/11/2011"
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.set_expire(accounts.non_existing, "02/11/2012")
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.get_expire(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_del_password(shadow, accounts):
|
||||
"""
|
||||
Test shadow.del_password
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.del_password(accounts.existing)
|
||||
assert shadow.info(accounts.existing)["passwd"] == "*"
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.del_password(accounts.non_existing)
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
||||
|
||||
|
||||
def test_set_password(shadow, accounts):
|
||||
"""
|
||||
Test shadow.set_password
|
||||
"""
|
||||
# Correct Functionality
|
||||
assert shadow.set_password(accounts.existing, "Pa$$W0rd")
|
||||
|
||||
# User does not exist
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
shadow.set_password(accounts.non_existing, "P@SSw0rd")
|
||||
assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value)
|
193
tests/pytests/functional/modules/test_mac_softwareupdate.py
Normal file
193
tests/pytests/functional/modules/test_mac_softwareupdate.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
"""
|
||||
integration tests for mac_softwareupdate
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_if_binaries_missing("softwareupdate"),
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def softwareupdate(modules):
|
||||
return modules.softwareupdate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_schedule_enabled(softwareupdate):
|
||||
ret = softwareupdate.schedule_enabled()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
softwareupdate.schedule_enable(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_catalog(softwareupdate):
|
||||
ret = softwareupdate.get_catalog()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if ret == "Default":
|
||||
softwareupdate.reset_catalog()
|
||||
else:
|
||||
softwareupdate.set_catalog(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_ignored(softwareupdate):
|
||||
ret = softwareupdate.list_ignored() or ()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for item in ret:
|
||||
softwareupdate.ignore(item)
|
||||
|
||||
|
||||
def test_list_available(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.list_available
|
||||
"""
|
||||
# Can't predict what will be returned, so can only test that the return
|
||||
# is the correct type, dict
|
||||
ret = softwareupdate.list_available()
|
||||
assert isinstance(ret, dict)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_ignored")
|
||||
@pytest.mark.skip(reason="Ignore removed from latest OS X.")
|
||||
def test_ignore(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.ignore
|
||||
Test softwareupdate.list_ignored
|
||||
Test softwareupdate.reset_ignored
|
||||
"""
|
||||
# Test reset_ignored
|
||||
ret = softwareupdate.reset_ignored()
|
||||
assert ret
|
||||
|
||||
ret = softwareupdate.list_ignored()
|
||||
assert ret == []
|
||||
|
||||
# Test ignore
|
||||
ret = softwareupdate.ignore("spongebob")
|
||||
assert ret
|
||||
|
||||
ret = softwareupdate.ignore("squidward")
|
||||
assert ret
|
||||
|
||||
# Test list_ignored and verify ignore
|
||||
ret = softwareupdate.list_ignored()
|
||||
assert "spongebob" in ret
|
||||
|
||||
ret = softwareupdate.list_ignored()
|
||||
assert "squidward" in ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_schedule_enabled")
|
||||
@pytest.mark.skip(reason="Ignore schedule support removed from latest OS X.")
|
||||
def test_schedule(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.schedule_enable
|
||||
Test softwareupdate.schedule_enabled
|
||||
"""
|
||||
# Test enable
|
||||
ret = softwareupdate.schedule_enable(True)
|
||||
assert ret
|
||||
|
||||
ret = softwareupdate.schedule_enabled()
|
||||
assert ret
|
||||
|
||||
# Test disable in case it was already enabled
|
||||
ret = softwareupdate.schedule_enable(False)
|
||||
assert not ret
|
||||
|
||||
ret = softwareupdate.schedule_enabled()
|
||||
assert not ret
|
||||
|
||||
|
||||
def test_update(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.update_all
|
||||
Test softwareupdate.update
|
||||
Test softwareupdate.update_available
|
||||
|
||||
Need to know the names of updates that are available to properly test
|
||||
the update functions...
|
||||
"""
|
||||
# There's no way to know what the dictionary will contain, so all we can
|
||||
# check is that the return is a dictionary
|
||||
ret = softwareupdate.update_all()
|
||||
assert isinstance(ret, dict)
|
||||
|
||||
# Test update_available
|
||||
ret = softwareupdate.update_available("spongebob")
|
||||
assert not ret
|
||||
|
||||
# Test update not available
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
ret = softwareupdate.update("spongebob")
|
||||
assert "Update not available" in str(exc.value)
|
||||
|
||||
|
||||
def test_list_downloads(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.list_downloads
|
||||
"""
|
||||
ret = softwareupdate.list_downloads()
|
||||
assert isinstance(ret, list)
|
||||
|
||||
|
||||
def test_download(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.download
|
||||
|
||||
Need to know the names of updates that are available to properly test
|
||||
the download function
|
||||
"""
|
||||
# Test update not available
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
softwareupdate.download("spongebob")
|
||||
assert "Update not available" in str(exc.value)
|
||||
|
||||
|
||||
def test_download_all(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.download_all
|
||||
"""
|
||||
ret = softwareupdate.download_all()
|
||||
assert isinstance(ret, list)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_catalog")
|
||||
@pytest.mark.skip(reason="Ignore catalog support removed from latest OS X.")
|
||||
def test_get_set_reset_catalog(softwareupdate):
|
||||
"""
|
||||
Test softwareupdate.download_all
|
||||
"""
|
||||
# Reset the catalog
|
||||
ret = softwareupdate.reset_catalog()
|
||||
assert ret
|
||||
|
||||
ret = softwareupdate.get_catalog()
|
||||
assert ret == "Default"
|
||||
|
||||
# Test setting and getting the catalog
|
||||
ret = softwareupdate.set_catalog("spongebob")
|
||||
assert ret
|
||||
|
||||
ret = softwareupdate.get_catalog()
|
||||
assert ret == "spongebob"
|
||||
|
||||
# Test reset the catalog
|
||||
ret = softwareupdate.reset_catalog()
|
||||
assert ret
|
||||
|
||||
assert softwareupdate.get_catalog()
|
||||
assert ret == "Default"
|
|
@ -56,10 +56,10 @@ def _remote_events_cleanup(system, grains):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def _subnet_cleanup(system):
|
||||
def subnet_name(system):
|
||||
subnet_name = system.get_subnet_name()
|
||||
try:
|
||||
yield
|
||||
yield random_string("subnet-", lowercase=False)
|
||||
finally:
|
||||
if system.get_subnet_name() != subnet_name:
|
||||
system.set_subnet_name(subnet_name)
|
||||
|
@ -76,26 +76,15 @@ def _keyboard_cleanup(system):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def _computer_name_cleanup(system):
|
||||
def computer_name(system):
|
||||
computer_name = system.get_computer_name()
|
||||
try:
|
||||
yield
|
||||
yield random_string("cmptr-", lowercase=False)
|
||||
finally:
|
||||
if system.get_computer_name() != computer_name:
|
||||
system.set_computer_name(computer_name)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _setup_teardown_vars(service, system):
|
||||
atrun_enabled = service.enabled("com.apple.atrun")
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not atrun_enabled:
|
||||
atrun = "/System/Library/LaunchDaemons/com.apple.atrun.plist"
|
||||
service.stop(atrun)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_remote_login_cleanup")
|
||||
def test_get_set_remote_login(system):
|
||||
"""
|
||||
|
@ -197,19 +186,16 @@ def test_get_set_remote_events(system):
|
|||
assert "Invalid String Value for Enabled" in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_subnet_cleanup")
|
||||
def test_get_set_subnet_name(system):
|
||||
def test_get_set_subnet_name(system, subnet_name):
|
||||
"""
|
||||
Test system.get_subnet_name
|
||||
Test system.set_subnet_name
|
||||
"""
|
||||
set_subnet_name = random_string("RS-", lowercase=False)
|
||||
|
||||
ret = system.set_subnet_name(set_subnet_name)
|
||||
ret = system.set_subnet_name(subnet_name)
|
||||
assert ret
|
||||
|
||||
ret = system.get_subnet_name()
|
||||
assert ret == set_subnet_name
|
||||
assert ret == subnet_name
|
||||
|
||||
|
||||
@pytest.mark.skip_initial_gh_actions_failure
|
||||
|
@ -336,21 +322,17 @@ def test_get_set_boot_arch(system):
|
|||
# investigate
|
||||
# @pytest.mark.skipif(salt.utils.platform.is_darwin() and six.PY3, reason='This test hangs on OS X on Py3. Skipping until #53566 is merged.')
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.usefixtures("_computer_name_cleanup")
|
||||
def test_get_set_computer_name(system):
|
||||
def test_get_set_computer_name(system, computer_name):
|
||||
"""
|
||||
Test system.get_computer_name
|
||||
Test system.set_computer_name
|
||||
"""
|
||||
set_computer_name = random_string("RS-", lowercase=False)
|
||||
current_computer_name = system.get_computer_name()
|
||||
assert current_computer_name
|
||||
assert current_computer_name != computer_name
|
||||
|
||||
computer_name = system.get_computer_name()
|
||||
|
||||
log.debug("set name is %s", set_computer_name)
|
||||
ret = system.set_computer_name(set_computer_name)
|
||||
ret = system.set_computer_name(computer_name)
|
||||
assert ret
|
||||
|
||||
ret = system.get_computer_name()
|
||||
assert ret == set_computer_name
|
||||
|
||||
system.set_computer_name(computer_name)
|
||||
assert ret == computer_name
|
||||
|
|
242
tests/pytests/functional/modules/test_mac_timezone.py
Normal file
242
tests/pytests/functional/modules/test_mac_timezone.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
"""
|
||||
Integration tests for mac_timezone
|
||||
|
||||
If using parallels, make sure Time sync is turned off. Otherwise, parallels will
|
||||
keep changing your date/time settings while the tests are running. To turn off
|
||||
Time sync do the following:
|
||||
- Go to actions -> configure
|
||||
- Select options at the top and 'More Options' on the left
|
||||
- Set time to 'Do not sync'
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_if_binaries_missing("systemsetup"),
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def timezone(modules):
|
||||
return modules.timezone
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_time_server(timezone):
|
||||
ret = timezone.get_time_server()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if timezone.get_time_server() != ret:
|
||||
timezone.set_time_server(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_using_network_time(timezone):
|
||||
ret = timezone.get_using_network_time()
|
||||
try:
|
||||
timezone.set_using_network_time(False)
|
||||
yield ret
|
||||
finally:
|
||||
timezone.set_using_network_time(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_time(timezone, _reset_using_network_time):
|
||||
ret = timezone.get_time()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not _reset_using_network_time:
|
||||
timezone.set_time(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_date(timezone, _reset_using_network_time):
|
||||
ret = timezone.get_date()
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if not _reset_using_network_time:
|
||||
timezone.set_date(ret)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_zone(timezone):
|
||||
ret = timezone.get_zone()
|
||||
try:
|
||||
timezone.set_zone("America/Denver")
|
||||
yield
|
||||
finally:
|
||||
timezone.set_zone(ret)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_date")
|
||||
def test_get_set_date(timezone):
|
||||
"""
|
||||
Test timezone.get_date
|
||||
Test timezone.set_date
|
||||
"""
|
||||
# Correct Functionality
|
||||
ret = timezone.set_date("2/20/2011")
|
||||
assert ret
|
||||
ret = timezone.get_date()
|
||||
assert ret == "2/20/2011"
|
||||
|
||||
# Test bad date format
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
ret = timezone.set_date("13/12/2014")
|
||||
assert (
|
||||
"ERROR executing 'timezone.set_date': Invalid Date/Time Format: 13/12/2014"
|
||||
in str(exc.value)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_get_time(timezone):
|
||||
"""
|
||||
Test timezone.get_time
|
||||
"""
|
||||
text_time = timezone.get_time()
|
||||
assert text_time != "Invalid Timestamp"
|
||||
obj_date = datetime.datetime.strptime(text_time, "%H:%M:%S")
|
||||
assert isinstance(obj_date, datetime.date)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_time")
|
||||
def test_set_time(timezone):
|
||||
"""
|
||||
Test timezone.set_time
|
||||
"""
|
||||
# Correct Functionality
|
||||
ret = timezone.set_time("3:14")
|
||||
assert ret
|
||||
|
||||
# Test bad time format
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
ret = timezone.set_time("3:71")
|
||||
assert (
|
||||
"ERROR executing 'timezone.set_time': Invalid Date/Time Format: 3:71"
|
||||
in str(exc.value)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_zone")
|
||||
def test_get_set_zone(timezone):
|
||||
"""
|
||||
Test timezone.get_zone
|
||||
Test timezone.set_zone
|
||||
"""
|
||||
# Correct Functionality
|
||||
ret = timezone.set_zone("Pacific/Wake")
|
||||
assert ret
|
||||
|
||||
ret = timezone.get_zone()
|
||||
assert ret == "Pacific/Wake"
|
||||
|
||||
# Test bad time zone
|
||||
with pytest.raises(SaltInvocationError) as exc:
|
||||
ret = timezone.set_zone("spongebob")
|
||||
assert (
|
||||
"ERROR executing 'timezone.set_zone': Invalid Timezone: spongebob"
|
||||
in str(exc.value)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_zone")
|
||||
def test_get_offset(timezone):
|
||||
"""
|
||||
Test timezone.get_offset
|
||||
"""
|
||||
ret = timezone.set_zone("Pacific/Wake")
|
||||
assert ret
|
||||
ret = timezone.get_offset()
|
||||
assert isinstance(ret, str)
|
||||
assert ret == "+1200"
|
||||
|
||||
ret = timezone.set_zone("America/Los_Angeles")
|
||||
assert ret
|
||||
ret = timezone.get_offset()
|
||||
assert isinstance(ret, str)
|
||||
assert ret == "-0800"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_zone")
|
||||
def test_get_set_zonecode(timezone):
|
||||
"""
|
||||
Test timezone.get_zonecode
|
||||
Test timezone.set_zonecode
|
||||
"""
|
||||
ret = timezone.set_zone("America/Los_Angeles")
|
||||
assert ret
|
||||
ret = timezone.get_zone()
|
||||
assert isinstance(ret, str)
|
||||
assert ret == "America/Los_Angeles"
|
||||
|
||||
ret = timezone.set_zone("Pacific/Wake")
|
||||
assert ret
|
||||
ret = timezone.get_zone()
|
||||
assert isinstance(ret, str)
|
||||
assert ret == "Pacific/Wake"
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_list_zones(timezone):
|
||||
"""
|
||||
Test timezone.list_zones
|
||||
"""
|
||||
zones = timezone.list_zones()
|
||||
assert isinstance(zones, list)
|
||||
assert "America/Denver" in zones
|
||||
assert "America/Los_Angeles" in zones
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_zone")
|
||||
def test_zone_compare(timezone):
|
||||
"""
|
||||
Test timezone.zone_compare
|
||||
"""
|
||||
ret = timezone.zone_compare("America/Denver")
|
||||
assert ret
|
||||
ret = timezone.zone_compare("Pacific/Wake")
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_using_network_time")
|
||||
def test_get_set_using_network_time(timezone):
|
||||
"""
|
||||
Test timezone.get_using_network_time
|
||||
Test timezone.set_using_network_time
|
||||
"""
|
||||
ret = timezone.set_using_network_time(True)
|
||||
assert ret
|
||||
|
||||
ret = timezone.get_using_network_time()
|
||||
assert ret
|
||||
|
||||
ret = timezone.set_using_network_time(False)
|
||||
assert ret
|
||||
|
||||
ret = timezone.get_using_network_time()
|
||||
assert not ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_time_server")
|
||||
def test_get_set_time_server(timezone):
|
||||
"""
|
||||
Test timezone.get_time_server
|
||||
Test timezone.set_time_server
|
||||
"""
|
||||
ret = timezone.set_time_server("spongebob.com")
|
||||
assert ret
|
||||
|
||||
ret = timezone.get_time_server()
|
||||
assert ret == "spongebob.com"
|
189
tests/pytests/functional/modules/test_mac_user.py
Normal file
189
tests/pytests/functional/modules/test_mac_user.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
import salt.utils.files
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def user(modules):
|
||||
return modules.user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _reset_enable_auto_login(user):
|
||||
# Make sure auto login is disabled before we start
|
||||
if user.get_auto_login():
|
||||
pytest.skip("Auto login already enabled")
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Make sure auto_login is disabled
|
||||
ret = user.disable_auto_login()
|
||||
assert ret
|
||||
|
||||
# Make sure autologin is disabled
|
||||
ret = user.get_auto_login()
|
||||
if ret:
|
||||
pytest.fail("Failed to disable auto login")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_user(user):
|
||||
username = random_string("account-", uppercase=False)
|
||||
try:
|
||||
ret = user.add(username)
|
||||
if ret is not True:
|
||||
pytest.skip("Failed to create an account to manipulate")
|
||||
yield username
|
||||
finally:
|
||||
user_info = user.info(username)
|
||||
if user_info:
|
||||
user.delete(username)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def non_existing_user(user):
|
||||
username = random_string("account-", uppercase=False)
|
||||
try:
|
||||
yield username
|
||||
finally:
|
||||
user_info = user.info(username)
|
||||
if user_info:
|
||||
user.delete(username)
|
||||
|
||||
|
||||
def test_mac_user_add(user, non_existing_user):
|
||||
"""
|
||||
Tests the add function
|
||||
"""
|
||||
user.add(non_existing_user)
|
||||
user_info = user.info(non_existing_user)
|
||||
assert user_info["name"] == non_existing_user
|
||||
|
||||
|
||||
def test_mac_user_delete(user, existing_user):
|
||||
"""
|
||||
Tests the delete function
|
||||
"""
|
||||
ret = user.delete(existing_user)
|
||||
assert ret
|
||||
|
||||
|
||||
def test_mac_user_primary_group(user, existing_user):
|
||||
"""
|
||||
Tests the primary_group function
|
||||
"""
|
||||
primary_group = user.primary_group(existing_user)
|
||||
uid_info = user.info(existing_user)
|
||||
assert primary_group in uid_info["groups"]
|
||||
|
||||
|
||||
def test_mac_user_changes(user, existing_user):
|
||||
"""
|
||||
Tests mac_user functions that change user properties
|
||||
"""
|
||||
# Test mac_user.chuid
|
||||
user.chuid(existing_user, 4376)
|
||||
uid_info = user.info(existing_user)
|
||||
assert uid_info["uid"] == 4376
|
||||
|
||||
# Test mac_user.chgid
|
||||
user.chgid(existing_user, 4376)
|
||||
gid_info = user.info(existing_user)
|
||||
assert gid_info["gid"] == 4376
|
||||
|
||||
# Test mac.user.chshell
|
||||
user.chshell(existing_user, "/bin/zsh")
|
||||
shell_info = user.info(existing_user)
|
||||
assert shell_info["shell"] == "/bin/zsh"
|
||||
|
||||
# Test mac_user.chhome
|
||||
user.chhome(existing_user, "/Users/foo")
|
||||
home_info = user.info(existing_user)
|
||||
assert home_info["home"] == "/Users/foo"
|
||||
|
||||
# Test mac_user.chfullname
|
||||
user.chfullname(existing_user, "Foo Bar")
|
||||
fullname_info = user.info(existing_user)
|
||||
assert fullname_info["fullname"] == "Foo Bar"
|
||||
|
||||
# Test mac_user.chgroups
|
||||
ret = user.info(existing_user)
|
||||
pre_info = ret["groups"]
|
||||
expected = pre_info + ["wheel"]
|
||||
user.chgroups(existing_user, "wheel")
|
||||
groups_info = user.info(existing_user)
|
||||
assert groups_info["groups"] == expected
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_enable_auto_login")
|
||||
def test_mac_user_enable_auto_login(user):
|
||||
"""
|
||||
Tests mac_user functions that enable auto login
|
||||
"""
|
||||
# Does enable return True
|
||||
ret = user.enable_auto_login("Spongebob", "Squarepants")
|
||||
assert ret
|
||||
|
||||
# Did it set the user entry in the plist file
|
||||
ret = user.get_auto_login()
|
||||
assert ret == "Spongebob"
|
||||
|
||||
# Did it generate the `/etc/kcpassword` file
|
||||
assert os.path.exists("/etc/kcpassword")
|
||||
|
||||
# Are the contents of the file correct
|
||||
test_data = bytes.fromhex("2e f8 27 42 a0 d9 ad 8b cd cd 6c 7d")
|
||||
with salt.utils.files.fopen("/etc/kcpassword", "rb") as f:
|
||||
file_data = f.read()
|
||||
assert test_data == file_data
|
||||
|
||||
# Does disable return True
|
||||
ret = user.disable_auto_login()
|
||||
assert ret
|
||||
|
||||
# Does it remove the user entry in the plist file
|
||||
ret = user.get_auto_login()
|
||||
assert not ret
|
||||
|
||||
# Is the `/etc/kcpassword` file removed
|
||||
assert not os.path.exists("/etc/kcpassword")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("_reset_enable_auto_login")
|
||||
def test_mac_user_disable_auto_login(user):
|
||||
"""
|
||||
Tests mac_user functions that disable auto login
|
||||
"""
|
||||
# Enable auto login for the test
|
||||
user.enable_auto_login("Spongebob", "Squarepants")
|
||||
|
||||
# Make sure auto login got set up
|
||||
ret = user.get_auto_login()
|
||||
if not ret == "Spongebob":
|
||||
raise pytest.fail("Failed to enable auto login")
|
||||
|
||||
# Does disable return True
|
||||
ret = user.disable_auto_login()
|
||||
assert ret
|
||||
|
||||
# Does it remove the user entry in the plist file
|
||||
ret = user.get_auto_login()
|
||||
assert not ret
|
||||
|
||||
# Is the `/etc/kcpassword` file removed
|
||||
assert not os.path.exists("/etc/kcpassword")
|
176
tests/pytests/functional/modules/test_mac_xattr.py
Normal file
176
tests/pytests/functional/modules/test_mac_xattr.py
Normal file
|
@ -0,0 +1,176 @@
|
|||
"""
|
||||
integration tests for mac_xattr
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_if_binaries_missing("xattr"),
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def xattr(modules):
|
||||
return modules.xattr
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def existing_file(tmp_path):
|
||||
fpath = tmp_path / "xattr_test_file.txt"
|
||||
fpath.touch()
|
||||
return fpath
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def non_existing_file(tmp_path):
|
||||
return tmp_path / "xattr_no_file"
|
||||
|
||||
|
||||
def test_list_no_xattr(xattr, existing_file, non_existing_file):
|
||||
"""
|
||||
Make sure there are no attributes
|
||||
"""
|
||||
# Clear existing attributes
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Test no attributes
|
||||
ret = xattr.list(existing_file)
|
||||
assert ret == {}
|
||||
|
||||
# Test file not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.list(non_existing_file)
|
||||
assert f"File not found: {non_existing_file}" in str(exc.value)
|
||||
|
||||
|
||||
def test_write(xattr, existing_file, non_existing_file):
|
||||
"""
|
||||
Write an attribute
|
||||
"""
|
||||
# Clear existing attributes
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Write some attributes
|
||||
ret = xattr.write(existing_file, "spongebob", "squarepants")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "squidward", "plankton")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "crabby", "patty")
|
||||
assert ret
|
||||
|
||||
# Test that they were actually added
|
||||
ret = xattr.list(existing_file)
|
||||
assert ret == {
|
||||
"spongebob": "squarepants",
|
||||
"squidward": "plankton",
|
||||
"crabby": "patty",
|
||||
}
|
||||
|
||||
# Test file not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.write(non_existing_file, "patrick", "jellyfish")
|
||||
assert f"File not found: {non_existing_file}" in str(exc.value)
|
||||
|
||||
|
||||
def test_read(xattr, existing_file, non_existing_file):
|
||||
"""
|
||||
Test xattr.read
|
||||
"""
|
||||
# Clear existing attributes
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Write an attribute
|
||||
ret = xattr.write(existing_file, "spongebob", "squarepants")
|
||||
assert ret
|
||||
|
||||
# Read the attribute
|
||||
ret = xattr.read(existing_file, "spongebob")
|
||||
assert ret == "squarepants"
|
||||
|
||||
# Test file not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.read(non_existing_file, "spongebob")
|
||||
assert f"File not found: {non_existing_file}" in str(exc.value)
|
||||
|
||||
# Test attribute not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.read(existing_file, "patrick")
|
||||
assert "Attribute not found: patrick" in str(exc.value)
|
||||
|
||||
|
||||
def test_delete(xattr, existing_file, non_existing_file):
|
||||
"""
|
||||
Test xattr.delete
|
||||
"""
|
||||
# Clear existing attributes
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Write some attributes
|
||||
ret = xattr.write(existing_file, "spongebob", "squarepants")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "squidward", "plankton")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "crabby", "patty")
|
||||
assert ret
|
||||
|
||||
# Delete an attribute
|
||||
ret = xattr.delete(existing_file, "squidward")
|
||||
assert ret
|
||||
|
||||
# Make sure it was actually deleted
|
||||
ret = xattr.list(existing_file)
|
||||
assert ret == {
|
||||
"spongebob": "squarepants",
|
||||
"crabby": "patty",
|
||||
}
|
||||
|
||||
# Test file not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.delete(non_existing_file, "spongebob")
|
||||
assert f"File not found: {non_existing_file}" in str(exc.value)
|
||||
|
||||
# Test attribute not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.delete(existing_file, "patrick")
|
||||
assert "Attribute not found: patrick" in str(exc.value)
|
||||
|
||||
|
||||
def test_clear(xattr, existing_file, non_existing_file):
|
||||
"""
|
||||
Test xattr.clear
|
||||
"""
|
||||
# Clear existing attributes
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Write some attributes
|
||||
ret = xattr.write(existing_file, "spongebob", "squarepants")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "squidward", "plankton")
|
||||
assert ret
|
||||
|
||||
ret = xattr.write(existing_file, "crabby", "patty")
|
||||
assert ret
|
||||
|
||||
# Test Clear
|
||||
ret = xattr.clear(existing_file)
|
||||
assert ret
|
||||
|
||||
# Test file not found
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ret = xattr.clear(non_existing_file)
|
||||
assert f"File not found: {non_existing_file}" in str(exc.value)
|
188
tests/pytests/integration/modules/test_mac_sysctl.py
Normal file
188
tests/pytests/integration/modules/test_mac_sysctl.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
"""
|
||||
|
||||
import os
|
||||
import random
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
pytest.mark.skip_unless_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def assign_cmd():
|
||||
return "net.inet.icmp.timestamp"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def config_file():
|
||||
return "/etc/sysctl.conf"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def setup_teardown_vars(salt_call_cli, assign_cmd, config_file):
|
||||
has_conf = False
|
||||
ret = salt_call_cli.run("sysctl.get", assign_cmd, config_file)
|
||||
val = ret.data
|
||||
|
||||
if val is None:
|
||||
pytest.skip(f"The call 'sysctl.get {assign_cmd}' returned: None")
|
||||
|
||||
# If sysctl file is present, make a copy
|
||||
# Remove original file so we can replace it with test files
|
||||
if os.path.isfile(config_file):
|
||||
has_conf = True
|
||||
try:
|
||||
temp_sysctl_config = __copy_sysctl(config_file)
|
||||
except CommandExecutionError:
|
||||
msg = "Could not copy file: {0}"
|
||||
raise CommandExecutionError(msg.format(config_file))
|
||||
os.remove(config_file)
|
||||
|
||||
try:
|
||||
yield val
|
||||
finally:
|
||||
ret = salt_call_cli.run("sysctl.get", assign_cmd)
|
||||
if ret.data != val:
|
||||
salt_call_cli.run("sysctl.assign", assign_cmd, val)
|
||||
|
||||
if has_conf is True:
|
||||
# restore original sysctl file
|
||||
__restore_sysctl(config_file, temp_sysctl_config)
|
||||
|
||||
if has_conf is False and os.path.isfile(config_file):
|
||||
# remove sysctl.conf created by tests
|
||||
os.remove(temp_sysctl_config)
|
||||
|
||||
|
||||
def test_assign(salt_call_cli, assign_cmd, setup_teardown_vars):
|
||||
"""
|
||||
Tests assigning a single sysctl parameter
|
||||
"""
|
||||
val = setup_teardown_vars[0]
|
||||
|
||||
try:
|
||||
rand = random.randint(0, 500)
|
||||
while rand == val:
|
||||
rand = random.randint(0, 500)
|
||||
salt_call_cli.run("sysctl.assign", assign_cmd, rand)
|
||||
ret = int(salt_call_cli.run("sysctl.get", assign_cmd))
|
||||
info = int(ret.data)
|
||||
try:
|
||||
assert rand == info
|
||||
except AssertionError:
|
||||
salt_call_cli.run("sysctl.assign", assign_cmd, val)
|
||||
raise
|
||||
except CommandExecutionError:
|
||||
salt_call_cli.run("sysctl.assign", assign_cmd, val)
|
||||
raise
|
||||
|
||||
|
||||
def test_persist_new_file(salt_call_cli, assign_cmd, config_file):
|
||||
"""
|
||||
Tests assigning a sysctl value to a system without a sysctl.conf file
|
||||
"""
|
||||
# Always start with a clean/known sysctl.conf state
|
||||
if os.path.isfile(config_file):
|
||||
os.remove(config_file)
|
||||
try:
|
||||
salt_call_cli.run("sysctl.persist", assign_cmd, 10)
|
||||
line = f"{assign_cmd}={10}"
|
||||
found = __check_string(config_file, line)
|
||||
assert found
|
||||
except CommandExecutionError:
|
||||
os.remove(config_file)
|
||||
raise
|
||||
|
||||
|
||||
def test_persist_already_set(salt_call_cli, config_file, setup_teardown_vars):
|
||||
"""
|
||||
Tests assigning a sysctl value that is already set in sysctl.conf file
|
||||
"""
|
||||
# Always start with a clean/known sysctl.conf state
|
||||
if os.path.isfile(config_file):
|
||||
os.remove(config_file)
|
||||
try:
|
||||
salt_call_cli.run("sysctl.persist", assign_cmd, 50)
|
||||
ret = salt_call_cli.run("sysctl.persist", assign_cmd, 50)
|
||||
assert ret.data == "Already set"
|
||||
except CommandExecutionError:
|
||||
os.remove(config_file)
|
||||
raise
|
||||
|
||||
|
||||
def test_persist_apply_change(
|
||||
salt_call_cli, assign_cmd, config_file, setup_teardown_vars
|
||||
):
|
||||
"""
|
||||
Tests assigning a sysctl value and applying the change to system
|
||||
"""
|
||||
val = setup_teardown_vars[0]
|
||||
|
||||
# Always start with a clean/known sysctl.conf state
|
||||
if os.path.isfile(config_file):
|
||||
os.remove(config_file)
|
||||
try:
|
||||
rand = random.randint(0, 500)
|
||||
while rand == val:
|
||||
rand = random.randint(0, 500)
|
||||
salt_call_cli.run("sysctl.persist", assign_cmd, rand, apply_change=True)
|
||||
ret = salt_call_cli.run("sysctl.get", assign_cmd)
|
||||
info = int(ret.data)
|
||||
assert info == rand
|
||||
except CommandExecutionError:
|
||||
os.remove(config_file)
|
||||
raise
|
||||
|
||||
|
||||
def __copy_sysctl(CONFIG):
|
||||
"""
|
||||
Copies an existing sysconf file and returns temp file path. Copied
|
||||
file will be restored in tearDown
|
||||
"""
|
||||
# Create new temporary file path and open needed files
|
||||
temp_path = salt.utils.files.mkstemp()
|
||||
with salt.utils.files.fopen(CONFIG, "r") as org_conf:
|
||||
with salt.utils.files.fopen(temp_path, "w") as temp_sysconf:
|
||||
# write sysctl lines to temp file
|
||||
for line in org_conf:
|
||||
temp_sysconf.write(line)
|
||||
return temp_path
|
||||
|
||||
|
||||
def __restore_sysctl(sysctl_config, temp_sysctl_config):
|
||||
"""
|
||||
Restores the original sysctl.conf file from temporary copy
|
||||
"""
|
||||
# If sysctl testing file exists, delete it
|
||||
if os.path.isfile(sysctl_config):
|
||||
os.remove(sysctl_config)
|
||||
|
||||
# write temp lines to sysctl file to restore
|
||||
with salt.utils.files.fopen(temp_sysctl_config, "r") as temp_sysctl:
|
||||
with salt.utils.files.fopen(sysctl_config, "w") as sysctl:
|
||||
for line in temp_sysctl:
|
||||
sysctl.write(line)
|
||||
|
||||
# delete temporary file
|
||||
os.remove(temp_sysctl_config)
|
||||
|
||||
|
||||
def __check_string(conf_file, to_find):
|
||||
"""
|
||||
Returns True if given line is present in file
|
||||
"""
|
||||
with salt.utils.files.fopen(conf_file, "r") as f_in:
|
||||
for line in f_in:
|
||||
if to_find in salt.utils.stringutils.to_unicode(line):
|
||||
return True
|
||||
return False
|
|
@ -214,15 +214,19 @@ def test_gpg_pillar(salt_ssh_cli):
|
|||
assert ret.returncode == 0
|
||||
assert isinstance(ret.data, dict)
|
||||
assert ret.data
|
||||
assert "secrets" in ret.data
|
||||
assert "foo" in ret.data["secrets"]
|
||||
assert "BEGIN PGP MESSAGE" not in ret.data["secrets"]["foo"]
|
||||
assert ret.data["secrets"]["foo"] == "supersecret"
|
||||
assert "_errors" not in ret.data
|
||||
_assert_gpg_pillar(ret.data)
|
||||
|
||||
|
||||
def _assert_gpg_pillar(ret):
|
||||
assert "secrets" in ret
|
||||
assert "foo" in ret["secrets"]
|
||||
assert "BEGIN PGP MESSAGE" not in ret["secrets"]["foo"]
|
||||
assert ret["secrets"]["foo"] == "supersecret"
|
||||
assert "_errors" not in ret
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("pillar_setup")
|
||||
def test_saltutil_runner(salt_ssh_cli, salt_minion, salt_run_cli):
|
||||
def test_saltutil_runner(salt_ssh_cli, salt_minion):
|
||||
"""
|
||||
Ensure that during pillar compilation, the cache dir is not
|
||||
overridden. For a history, see PR #50489 and issue #36796,
|
||||
|
@ -233,9 +237,66 @@ def test_saltutil_runner(salt_ssh_cli, salt_minion, salt_run_cli):
|
|||
assert ret.returncode == 0
|
||||
assert isinstance(ret.data, dict)
|
||||
assert ret.data
|
||||
assert "saltutil" in ret.data
|
||||
assert isinstance(ret.data["saltutil"], dict)
|
||||
assert ret.data["saltutil"]
|
||||
assert salt_minion.id in ret.data["saltutil"]
|
||||
assert ret.data["saltutil"][salt_minion.id] is True
|
||||
assert "_errors" not in ret.data
|
||||
_assert_saltutil_runner_pillar(ret.data, salt_minion.id)
|
||||
|
||||
|
||||
def _assert_saltutil_runner_pillar(ret, salt_minion_id):
|
||||
assert "saltutil" in ret
|
||||
assert isinstance(ret["saltutil"], dict)
|
||||
assert ret["saltutil"]
|
||||
assert salt_minion_id in ret["saltutil"]
|
||||
assert ret["saltutil"][salt_minion_id] is True
|
||||
assert "_errors" not in ret
|
||||
|
||||
|
||||
@pytest.mark.skip_if_binaries_missing("gpg")
|
||||
@pytest.mark.usefixtures("pillar_setup", "gpg_homedir")
|
||||
def test_gpg_pillar_orch(salt_ssh_cli, salt_run_cli, gpg_homedir):
|
||||
"""
|
||||
Ensure that GPG-encrypted pillars can be decrypted when Salt-SSH is
|
||||
called during an orchestration or via saltutil.cmd.
|
||||
This is issue #65670.
|
||||
"""
|
||||
# Use salt_run_cli since the config paths are different between
|
||||
# test master and test minion.
|
||||
ret = salt_run_cli.run(
|
||||
"salt.cmd",
|
||||
"saltutil.cmd",
|
||||
salt_ssh_cli.target_host,
|
||||
"pillar.items",
|
||||
ssh=True,
|
||||
ignore_host_keys=True,
|
||||
roster_file=str(salt_ssh_cli.roster_file),
|
||||
ssh_priv=str(salt_ssh_cli.client_key),
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert isinstance(ret.data, dict)
|
||||
assert ret.data
|
||||
_assert_gpg_pillar(ret.data[salt_ssh_cli.target_host]["return"])
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("pillar_setup")
|
||||
def test_saltutil_runner_orch(salt_ssh_cli, salt_run_cli, salt_minion):
|
||||
"""
|
||||
Ensure that runner calls in the pillar succeed when Salt-SSH is
|
||||
called during an orchestration or via saltutil.cmd.
|
||||
This is a variant of issue #65670.
|
||||
"""
|
||||
# Use salt_run_cli since the config paths are different between
|
||||
# test master and test minion.
|
||||
ret = salt_run_cli.run(
|
||||
"salt.cmd",
|
||||
"saltutil.cmd",
|
||||
salt_ssh_cli.target_host,
|
||||
"pillar.items",
|
||||
ssh=True,
|
||||
ignore_host_keys=True,
|
||||
roster_file=str(salt_ssh_cli.roster_file),
|
||||
ssh_priv=str(salt_ssh_cli.client_key),
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert isinstance(ret.data, dict)
|
||||
assert ret.data
|
||||
_assert_saltutil_runner_pillar(
|
||||
ret.data[salt_ssh_cli.target_host]["return"], salt_minion.id
|
||||
)
|
||||
|
|
0
tests/pytests/pkg/__init__.py
Normal file
0
tests/pytests/pkg/__init__.py
Normal file
471
tests/pytests/pkg/conftest.py
Normal file
471
tests/pytests/pkg/conftest.py
Normal file
|
@ -0,0 +1,471 @@
|
|||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from pytestskipmarkers.utils import platform
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
import salt.config
|
||||
from tests.conftest import CODE_DIR
|
||||
from tests.support.pkg import ApiRequest, SaltMaster, SaltMasterWindows, SaltPkgInstall
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Variable defining a FIPS test run or not
|
||||
FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def version(install_salt):
|
||||
"""
|
||||
get version number from artifact
|
||||
"""
|
||||
return install_salt.version
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def _system_up_to_date(
|
||||
grains,
|
||||
shell,
|
||||
):
|
||||
if grains["os_family"] == "Debian":
|
||||
ret = shell.run("apt", "update")
|
||||
assert ret.returncode == 0
|
||||
env = os.environ.copy()
|
||||
env["DEBIAN_FRONTEND"] = "noninteractive"
|
||||
ret = shell.run(
|
||||
"apt",
|
||||
"upgrade",
|
||||
"-y",
|
||||
"-o",
|
||||
"DPkg::Options::=--force-confdef",
|
||||
"-o",
|
||||
"DPkg::Options::=--force-confold",
|
||||
env=env,
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
elif grains["os_family"] == "Redhat":
|
||||
ret = shell.run("yum", "update", "-y")
|
||||
assert ret.returncode == 0
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
"""
|
||||
register argparse-style options and ini-style config values.
|
||||
"""
|
||||
test_selection_group = parser.getgroup("Tests Runtime Selection")
|
||||
test_selection_group.addoption(
|
||||
"--pkg-system-service",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Run the daemons as system services",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--upgrade",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Install previous version and then upgrade then run tests",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--downgrade",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Install current version and then downgrade to the previous version and run tests",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--no-install",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Do not install salt and use a previous install Salt package",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--no-uninstall",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Do not uninstall salt packages after test run is complete",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--classic",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Test an upgrade from the classic packages.",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--prev-version",
|
||||
action="store",
|
||||
help="Test an upgrade from the version specified.",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--use-prev-version",
|
||||
action="store_true",
|
||||
help="Tells the test suite to validate the version using the previous version (for downgrades)",
|
||||
)
|
||||
test_selection_group.addoption(
|
||||
"--download-pkgs",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Test package download tests",
|
||||
)
|
||||
|
||||
|
||||
@pytest.hookimpl(tryfirst=True)
|
||||
def pytest_runtest_setup(item):
|
||||
"""
|
||||
Fixtures injection based on markers or test skips based on CLI arguments
|
||||
"""
|
||||
if (
|
||||
str(item.fspath).startswith(str(pathlib.Path(__file__).parent / "download"))
|
||||
and item.config.getoption("--download-pkgs") is False
|
||||
):
|
||||
raise pytest.skip.Exception(
|
||||
"The package download tests are disabled. Pass '--download-pkgs' to pytest "
|
||||
"to enable them.",
|
||||
_use_item_location=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def salt_factories_root_dir(request, tmp_path_factory):
|
||||
root_dir = SaltPkgInstall.salt_factories_root_dir(
|
||||
request.config.getoption("--pkg-system-service")
|
||||
)
|
||||
if root_dir is not None:
|
||||
yield root_dir
|
||||
else:
|
||||
if platform.is_darwin():
|
||||
root_dir = pathlib.Path("/tmp/salt-tests-tmpdir")
|
||||
root_dir.mkdir(mode=0o777, parents=True, exist_ok=True)
|
||||
else:
|
||||
root_dir = tmp_path_factory.mktemp("salt-tests")
|
||||
try:
|
||||
yield root_dir
|
||||
finally:
|
||||
shutil.rmtree(str(root_dir), ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def salt_factories_config(salt_factories_root_dir):
|
||||
return {
|
||||
"code_dir": CODE_DIR,
|
||||
"root_dir": salt_factories_root_dir,
|
||||
"system_service": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def install_salt(request, salt_factories_root_dir):
|
||||
with SaltPkgInstall(
|
||||
conf_dir=salt_factories_root_dir / "etc" / "salt",
|
||||
pkg_system_service=request.config.getoption("--pkg-system-service"),
|
||||
upgrade=request.config.getoption("--upgrade"),
|
||||
downgrade=request.config.getoption("--downgrade"),
|
||||
no_uninstall=request.config.getoption("--no-uninstall"),
|
||||
no_install=request.config.getoption("--no-install"),
|
||||
classic=request.config.getoption("--classic"),
|
||||
prev_version=request.config.getoption("--prev-version"),
|
||||
use_prev_version=request.config.getoption("--use-prev-version"),
|
||||
) as fixture:
|
||||
yield fixture
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def salt_factories(salt_factories, salt_factories_root_dir):
|
||||
salt_factories.root_dir = salt_factories_root_dir
|
||||
return salt_factories
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def salt_master(salt_factories, install_salt, pkg_tests_account):
|
||||
"""
|
||||
Start up a master
|
||||
"""
|
||||
if platform.is_windows():
|
||||
state_tree = "C:/salt/srv/salt"
|
||||
pillar_tree = "C:/salt/srv/pillar"
|
||||
elif platform.is_darwin():
|
||||
state_tree = "/opt/srv/salt"
|
||||
pillar_tree = "/opt/srv/pillar"
|
||||
else:
|
||||
state_tree = "/srv/salt"
|
||||
pillar_tree = "/srv/pillar"
|
||||
|
||||
start_timeout = None
|
||||
# Since the daemons are "packaged" with tiamat, the salt plugins provided
|
||||
# by salt-factories won't be discovered. Provide the required `*_dirs` on
|
||||
# the configuration so that they can still be used.
|
||||
config_defaults = {
|
||||
"engines_dirs": [
|
||||
str(salt_factories.get_salt_engines_path()),
|
||||
],
|
||||
"log_handlers_dirs": [
|
||||
str(salt_factories.get_salt_log_handlers_path()),
|
||||
],
|
||||
}
|
||||
if platform.is_darwin():
|
||||
config_defaults["enable_fqdns_grains"] = False
|
||||
config_overrides = {
|
||||
"timeout": 30,
|
||||
"file_roots": {
|
||||
"base": [
|
||||
state_tree,
|
||||
]
|
||||
},
|
||||
"pillar_roots": {
|
||||
"base": [
|
||||
pillar_tree,
|
||||
]
|
||||
},
|
||||
"rest_cherrypy": {
|
||||
"port": 8000,
|
||||
"disable_ssl": True,
|
||||
},
|
||||
"netapi_enable_clients": ["local"],
|
||||
"external_auth": {
|
||||
"auto": {
|
||||
pkg_tests_account.username: [
|
||||
".*",
|
||||
],
|
||||
},
|
||||
},
|
||||
"fips_mode": FIPS_TESTRUN,
|
||||
"open_mode": True,
|
||||
}
|
||||
test_user = False
|
||||
master_config = install_salt.config_path / "master"
|
||||
if master_config.exists():
|
||||
with salt.utils.files.fopen(master_config) as fp:
|
||||
data = yaml.safe_load(fp)
|
||||
if data and "user" in data:
|
||||
test_user = True
|
||||
# We are testing a different user, so we need to test the system
|
||||
# configs, or else permissions will not be correct.
|
||||
config_overrides["user"] = data["user"]
|
||||
config_overrides["log_file"] = salt.config.DEFAULT_MASTER_OPTS.get(
|
||||
"log_file"
|
||||
)
|
||||
config_overrides["root_dir"] = salt.config.DEFAULT_MASTER_OPTS.get(
|
||||
"root_dir"
|
||||
)
|
||||
config_overrides["key_logfile"] = salt.config.DEFAULT_MASTER_OPTS.get(
|
||||
"key_logfile"
|
||||
)
|
||||
config_overrides["pki_dir"] = salt.config.DEFAULT_MASTER_OPTS.get(
|
||||
"pki_dir"
|
||||
)
|
||||
config_overrides["api_logfile"] = salt.config.DEFAULT_API_OPTS.get(
|
||||
"api_logfile"
|
||||
)
|
||||
config_overrides["api_pidfile"] = salt.config.DEFAULT_API_OPTS.get(
|
||||
"api_pidfile"
|
||||
)
|
||||
# verify files were set with correct owner/group
|
||||
verify_files = [
|
||||
pathlib.Path("/etc", "salt", "pki", "master"),
|
||||
pathlib.Path("/etc", "salt", "master.d"),
|
||||
pathlib.Path("/var", "cache", "salt", "master"),
|
||||
]
|
||||
for _file in verify_files:
|
||||
assert _file.owner() == "salt"
|
||||
assert _file.group() == "salt"
|
||||
|
||||
master_script = False
|
||||
if platform.is_windows():
|
||||
if install_salt.classic:
|
||||
master_script = True
|
||||
if install_salt.relenv:
|
||||
master_script = True
|
||||
elif not install_salt.upgrade:
|
||||
master_script = True
|
||||
if (
|
||||
not install_salt.relenv
|
||||
and install_salt.use_prev_version
|
||||
and not install_salt.classic
|
||||
):
|
||||
master_script = False
|
||||
|
||||
if master_script:
|
||||
salt_factories.system_service = False
|
||||
salt_factories.generate_scripts = True
|
||||
scripts_dir = salt_factories.root_dir / "Scripts"
|
||||
scripts_dir.mkdir(exist_ok=True)
|
||||
salt_factories.scripts_dir = scripts_dir
|
||||
python_executable = install_salt.bin_dir / "Scripts" / "python.exe"
|
||||
if install_salt.classic:
|
||||
python_executable = install_salt.bin_dir / "python.exe"
|
||||
if install_salt.relenv:
|
||||
python_executable = install_salt.install_dir / "Scripts" / "python.exe"
|
||||
salt_factories.python_executable = python_executable
|
||||
factory = salt_factories.salt_master_daemon(
|
||||
random_string("master-"),
|
||||
defaults=config_defaults,
|
||||
overrides=config_overrides,
|
||||
factory_class=SaltMasterWindows,
|
||||
salt_pkg_install=install_salt,
|
||||
)
|
||||
salt_factories.system_service = True
|
||||
else:
|
||||
|
||||
if install_salt.classic and platform.is_darwin():
|
||||
os.environ["PATH"] += ":/opt/salt/bin"
|
||||
|
||||
factory = salt_factories.salt_master_daemon(
|
||||
random_string("master-"),
|
||||
defaults=config_defaults,
|
||||
overrides=config_overrides,
|
||||
factory_class=SaltMaster,
|
||||
salt_pkg_install=install_salt,
|
||||
)
|
||||
factory.after_terminate(pytest.helpers.remove_stale_master_key, factory)
|
||||
if test_user:
|
||||
# Salt factories calls salt.utils.verify.verify_env
|
||||
# which sets root perms on /etc/salt/pki/master since we are running
|
||||
# the test suite as root, but we want to run Salt master as salt
|
||||
# We ensure those permissions where set by the package earlier
|
||||
subprocess.run(
|
||||
[
|
||||
"chown",
|
||||
"-R",
|
||||
"salt:salt",
|
||||
str(pathlib.Path("/etc", "salt", "pki", "master")),
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
# The engines_dirs is created in .nox path. We need to set correct perms
|
||||
# for the user running the Salt Master
|
||||
check_paths = [state_tree, pillar_tree, CODE_DIR / ".nox"]
|
||||
for path in check_paths:
|
||||
if os.path.exists(path) is False:
|
||||
continue
|
||||
subprocess.run(["chown", "-R", "salt:salt", str(path)], check=False)
|
||||
|
||||
with factory.started(start_timeout=start_timeout):
|
||||
yield factory
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def salt_minion(salt_factories, salt_master, install_salt):
|
||||
"""
|
||||
Start up a minion
|
||||
"""
|
||||
start_timeout = None
|
||||
minion_id = random_string("minion-")
|
||||
# Since the daemons are "packaged" with tiamat, the salt plugins provided
|
||||
# by salt-factories won't be discovered. Provide the required `*_dirs` on
|
||||
# the configuration so that they can still be used.
|
||||
config_defaults = {
|
||||
"engines_dirs": salt_master.config["engines_dirs"].copy(),
|
||||
"log_handlers_dirs": salt_master.config["log_handlers_dirs"].copy(),
|
||||
}
|
||||
if platform.is_darwin():
|
||||
config_defaults["enable_fqdns_grains"] = False
|
||||
config_overrides = {
|
||||
"id": minion_id,
|
||||
"file_roots": salt_master.config["file_roots"].copy(),
|
||||
"pillar_roots": salt_master.config["pillar_roots"].copy(),
|
||||
"fips_mode": FIPS_TESTRUN,
|
||||
"open_mode": True,
|
||||
}
|
||||
if platform.is_windows():
|
||||
config_overrides[
|
||||
"winrepo_dir"
|
||||
] = rf"{salt_factories.root_dir}\srv\salt\win\repo"
|
||||
config_overrides[
|
||||
"winrepo_dir_ng"
|
||||
] = rf"{salt_factories.root_dir}\srv\salt\win\repo_ng"
|
||||
config_overrides["winrepo_source_dir"] = r"salt://win/repo_ng"
|
||||
|
||||
if install_salt.classic and platform.is_windows():
|
||||
salt_factories.python_executable = None
|
||||
|
||||
if install_salt.classic and platform.is_darwin():
|
||||
os.environ["PATH"] += ":/opt/salt/bin"
|
||||
|
||||
factory = salt_master.salt_minion_daemon(
|
||||
minion_id,
|
||||
overrides=config_overrides,
|
||||
defaults=config_defaults,
|
||||
)
|
||||
|
||||
# Salt factories calls salt.utils.verify.verify_env
|
||||
# which sets root perms on /srv/salt and /srv/pillar since we are running
|
||||
# the test suite as root, but we want to run Salt master as salt
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
state_tree = "/srv/salt"
|
||||
pillar_tree = "/srv/pillar"
|
||||
check_paths = [state_tree, pillar_tree, CODE_DIR / ".nox"]
|
||||
for path in check_paths:
|
||||
if os.path.exists(path) is False:
|
||||
continue
|
||||
subprocess.run(["chown", "-R", "salt:salt", str(path)], check=False)
|
||||
|
||||
factory.after_terminate(
|
||||
pytest.helpers.remove_stale_minion_key, salt_master, factory.id
|
||||
)
|
||||
with factory.started(start_timeout=start_timeout):
|
||||
yield factory
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_cli(salt_master):
|
||||
return salt_master.salt_cli()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_key_cli(salt_master):
|
||||
return salt_master.salt_key_cli()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_call_cli(salt_minion):
|
||||
return salt_minion.salt_call_cli()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def pkg_tests_account():
|
||||
with pytest.helpers.create_account() as account:
|
||||
yield account
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def extras_pypath():
|
||||
extras_dir = "extras-{}.{}".format(*sys.version_info)
|
||||
if platform.is_windows():
|
||||
return pathlib.Path(
|
||||
os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir
|
||||
)
|
||||
elif platform.is_darwin():
|
||||
return pathlib.Path("/opt", "salt", extras_dir)
|
||||
else:
|
||||
return pathlib.Path("/opt", "saltstack", "salt", extras_dir)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def extras_pypath_bin(extras_pypath):
|
||||
return extras_pypath / "bin"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_api(salt_master, install_salt, extras_pypath):
|
||||
"""
|
||||
start up and configure salt_api
|
||||
"""
|
||||
shutil.rmtree(str(extras_pypath), ignore_errors=True)
|
||||
start_timeout = None
|
||||
factory = salt_master.salt_api_daemon()
|
||||
with factory.started(start_timeout=start_timeout):
|
||||
yield factory
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def api_request(pkg_tests_account, salt_api):
|
||||
with ApiRequest(
|
||||
port=salt_api.config["rest_cherrypy"]["port"], account=pkg_tests_account
|
||||
) as session:
|
||||
yield session
|
0
tests/pytests/pkg/downgrade/__init__.py
Normal file
0
tests/pytests/pkg/downgrade/__init__.py
Normal file
59
tests/pytests/pkg/downgrade/test_salt_downgrade.py
Normal file
59
tests/pytests/pkg/downgrade/test_salt_downgrade.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
import packaging.version
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
|
||||
def test_salt_downgrade(salt_call_cli, install_salt):
|
||||
"""
|
||||
Test an upgrade of Salt.
|
||||
"""
|
||||
if not install_salt.downgrade:
|
||||
pytest.skip("Not testing a downgrade, do not run")
|
||||
|
||||
is_downgrade_to_relenv = packaging.version.parse(
|
||||
install_salt.prev_version
|
||||
) >= packaging.version.parse("3006.0")
|
||||
|
||||
if is_downgrade_to_relenv:
|
||||
original_py_version = install_salt.package_python_version()
|
||||
|
||||
# Verify current install version is setup correctly and works
|
||||
ret = salt_call_cli.run("test.version")
|
||||
assert ret.returncode == 0
|
||||
assert packaging.version.parse(ret.data) == packaging.version.parse(
|
||||
install_salt.artifact_version
|
||||
)
|
||||
|
||||
# Test pip install before a downgrade
|
||||
dep = "PyGithub==1.56.0"
|
||||
install = salt_call_cli.run("--local", "pip.install", dep)
|
||||
assert install.returncode == 0
|
||||
|
||||
# Verify we can use the module dependent on the installed package
|
||||
repo = "https://github.com/saltstack/salt.git"
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
||||
assert "Authentication information could" in use_lib.stderr
|
||||
|
||||
# Downgrade Salt to the previous version and test
|
||||
install_salt.install(downgrade=True)
|
||||
bin_file = "salt"
|
||||
if platform.is_windows():
|
||||
if not is_downgrade_to_relenv:
|
||||
bin_file = install_salt.install_dir / "salt-call.bat"
|
||||
else:
|
||||
bin_file = install_salt.install_dir / "salt-call.exe"
|
||||
elif platform.is_darwin() and install_salt.classic:
|
||||
bin_file = install_salt.bin_dir / "salt-call"
|
||||
|
||||
ret = install_salt.proc.run(bin_file, "--version")
|
||||
assert ret.returncode == 0
|
||||
assert packaging.version.parse(
|
||||
ret.stdout.strip().split()[1]
|
||||
) < packaging.version.parse(install_salt.artifact_version)
|
||||
|
||||
if is_downgrade_to_relenv:
|
||||
new_py_version = install_salt.package_python_version()
|
||||
if new_py_version == original_py_version:
|
||||
# test pip install after a downgrade
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
||||
assert "Authentication information could" in use_lib.stderr
|
0
tests/pytests/pkg/download/__init__.py
Normal file
0
tests/pytests/pkg/download/__init__.py
Normal file
559
tests/pytests/pkg/download/test_pkg_download.py
Normal file
559
tests/pytests/pkg/download/test_pkg_download.py
Normal file
|
@ -0,0 +1,559 @@
|
|||
"""
|
||||
Test Salt Pkg Downloads
|
||||
"""
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
|
||||
import packaging
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_salt_test_commands():
|
||||
salt_release = get_salt_release()
|
||||
if platform.is_windows():
|
||||
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
|
||||
salt_test_commands = [
|
||||
["salt-call.exe", "--local", "test.versions"],
|
||||
["salt-call.exe", "--local", "grains.items"],
|
||||
["salt-minion.exe", "--version"],
|
||||
]
|
||||
else:
|
||||
salt_test_commands = [
|
||||
["salt-call.bat", "--local", "test.versions"],
|
||||
["salt-call.bat", "--local", "grains.items"],
|
||||
["salt.bat", "--version"],
|
||||
["salt-master.bat", "--version"],
|
||||
["salt-minion.bat", "--version"],
|
||||
["salt-ssh.bat", "--version"],
|
||||
["salt-syndic.bat", "--version"],
|
||||
["salt-api.bat", "--version"],
|
||||
["salt-cloud.bat", "--version"],
|
||||
]
|
||||
else:
|
||||
salt_test_commands = [
|
||||
["salt-call", "--local", "test.versions"],
|
||||
["salt-call", "--local", "grains.items"],
|
||||
["salt", "--version"],
|
||||
["salt-master", "--version"],
|
||||
["salt-minion", "--version"],
|
||||
["salt-ssh", "--version"],
|
||||
["salt-syndic", "--version"],
|
||||
["salt-api", "--version"],
|
||||
["salt-cloud", "--version"],
|
||||
]
|
||||
return salt_test_commands
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def root_url(salt_release):
|
||||
if os.environ.get("SALT_REPO_TYPE", "release") == "staging":
|
||||
repo_domain = os.environ.get(
|
||||
"SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io"
|
||||
)
|
||||
else:
|
||||
repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io")
|
||||
if "rc" in salt_release:
|
||||
salt_path = "salt_rc/salt"
|
||||
else:
|
||||
salt_path = "salt"
|
||||
salt_repo_user = os.environ.get("SALT_REPO_USER")
|
||||
if salt_repo_user:
|
||||
log.info(
|
||||
"SALT_REPO_USER: %s",
|
||||
salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1],
|
||||
)
|
||||
salt_repo_pass = os.environ.get("SALT_REPO_PASS")
|
||||
if salt_repo_pass:
|
||||
log.info(
|
||||
"SALT_REPO_PASS: %s",
|
||||
salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1],
|
||||
)
|
||||
if salt_repo_user and salt_repo_pass:
|
||||
repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}"
|
||||
_root_url = f"https://{repo_domain}/{salt_path}/py3"
|
||||
log.info("Repository Root URL: %s", _root_url)
|
||||
return _root_url
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def package_type():
|
||||
return os.environ.get("DOWNLOAD_TEST_PACKAGE_TYPE")
|
||||
|
||||
|
||||
def get_salt_release():
|
||||
salt_release = os.environ.get("SALT_RELEASE")
|
||||
pkg_test_type = os.environ.get("PKG_TEST_TYPE", "install")
|
||||
if salt_release is None:
|
||||
if pkg_test_type == "download-pkgs":
|
||||
log.warning(
|
||||
"Setting salt release to 3006.0rc2 which is probably not what you want."
|
||||
)
|
||||
salt_release = "3006.0rc2"
|
||||
if pkg_test_type == "download-pkgs":
|
||||
if packaging.version.parse(salt_release) < packaging.version.parse("3006.0rc1"):
|
||||
log.warning(f"The salt release being tested, {salt_release!r} looks off.")
|
||||
return salt_release
|
||||
|
||||
|
||||
def get_repo_subpath_params():
|
||||
current_release = packaging.version.parse(get_salt_release())
|
||||
params = ["minor", current_release.major]
|
||||
latest_env_var = os.environ.get("LATEST_SALT_RELEASE")
|
||||
if latest_env_var is not None:
|
||||
latest_release = packaging.version.parse(latest_env_var)
|
||||
if current_release >= latest_release:
|
||||
log.debug(
|
||||
f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}"
|
||||
)
|
||||
params.append("latest")
|
||||
return params
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
scope="module",
|
||||
params=get_repo_subpath_params(),
|
||||
)
|
||||
def repo_subpath(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def gpg_key_name(salt_release):
|
||||
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
|
||||
return "SALT-PROJECT-GPG-PUBKEY-2023.pub"
|
||||
return "salt-archive-keyring.gpg"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_release():
|
||||
yield get_salt_release()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def onedir_install_path(tmp_path_factory):
|
||||
install_path = tmp_path_factory.mktemp("onedir_install")
|
||||
yield install_path
|
||||
shutil.rmtree(install_path, ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def _setup_system(
|
||||
grains,
|
||||
shell,
|
||||
root_url,
|
||||
salt_release,
|
||||
gpg_key_name,
|
||||
repo_subpath,
|
||||
package_type,
|
||||
tmp_path_factory,
|
||||
onedir_install_path,
|
||||
):
|
||||
downloads_path = tmp_path_factory.mktemp("downloads")
|
||||
try:
|
||||
# Windows is a special case, because sometimes we need to uninstall the packages
|
||||
if grains["os_family"] == "Windows":
|
||||
with setup_windows(
|
||||
shell,
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
repo_subpath=repo_subpath,
|
||||
package_type=package_type,
|
||||
onedir_install_path=onedir_install_path,
|
||||
):
|
||||
yield
|
||||
else:
|
||||
if grains["os_family"] == "MacOS":
|
||||
setup_macos(
|
||||
shell,
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
repo_subpath=repo_subpath,
|
||||
package_type=package_type,
|
||||
onedir_install_path=onedir_install_path,
|
||||
)
|
||||
elif grains["os"] == "Amazon":
|
||||
setup_redhat_family(
|
||||
shell,
|
||||
os_name=grains["os"].lower(),
|
||||
os_version=grains["osmajorrelease"],
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
gpg_key_name=gpg_key_name,
|
||||
repo_subpath=repo_subpath,
|
||||
)
|
||||
elif grains["os"] == "Fedora":
|
||||
setup_redhat_family(
|
||||
shell,
|
||||
os_name=grains["os"].lower(),
|
||||
os_version=grains["osmajorrelease"],
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
gpg_key_name=gpg_key_name,
|
||||
repo_subpath=repo_subpath,
|
||||
)
|
||||
elif grains["os"] == "VMware Photon OS":
|
||||
setup_redhat_family(
|
||||
shell,
|
||||
os_name="photon",
|
||||
os_version=grains["osmajorrelease"],
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
gpg_key_name=gpg_key_name,
|
||||
repo_subpath=repo_subpath,
|
||||
)
|
||||
elif grains["os_family"] == "RedHat":
|
||||
setup_redhat_family(
|
||||
shell,
|
||||
os_name="redhat",
|
||||
os_version=grains["osmajorrelease"],
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
gpg_key_name=gpg_key_name,
|
||||
repo_subpath=repo_subpath,
|
||||
)
|
||||
elif grains["os_family"] == "Debian":
|
||||
setup_debian_family(
|
||||
shell,
|
||||
os_name=grains["os"].lower(),
|
||||
os_version=grains["osrelease"],
|
||||
os_codename=grains["oscodename"],
|
||||
root_url=root_url,
|
||||
salt_release=salt_release,
|
||||
downloads_path=downloads_path,
|
||||
gpg_key_name=gpg_key_name,
|
||||
repo_subpath=repo_subpath,
|
||||
package_type=package_type,
|
||||
onedir_install_path=onedir_install_path,
|
||||
)
|
||||
else:
|
||||
pytest.fail("Don't know how to handle %s", grains["osfinger"])
|
||||
yield
|
||||
finally:
|
||||
shutil.rmtree(downloads_path, ignore_errors=True)
|
||||
|
||||
|
||||
def setup_redhat_family(
|
||||
shell,
|
||||
os_name,
|
||||
os_version,
|
||||
root_url,
|
||||
salt_release,
|
||||
downloads_path,
|
||||
gpg_key_name,
|
||||
repo_subpath,
|
||||
):
|
||||
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
|
||||
|
||||
if repo_subpath == "minor":
|
||||
repo_url_base = (
|
||||
f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
|
||||
)
|
||||
else:
|
||||
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
|
||||
|
||||
gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
|
||||
|
||||
try:
|
||||
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
|
||||
|
||||
ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False)
|
||||
if ret.returncode != 0:
|
||||
pytest.fail("Failed to import gpg key")
|
||||
|
||||
repo_file = pytest.helpers.download_file(
|
||||
f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo"
|
||||
)
|
||||
|
||||
commands = [
|
||||
("mv", str(repo_file), "/etc/yum.repos.d/salt.repo"),
|
||||
("yum", "clean", "all" if os_name == "photon" else "expire-cache"),
|
||||
(
|
||||
"yum",
|
||||
"install",
|
||||
"-y",
|
||||
"salt-master",
|
||||
"salt-minion",
|
||||
"salt-ssh",
|
||||
"salt-syndic",
|
||||
"salt-cloud",
|
||||
"salt-api",
|
||||
"salt-debuginfo",
|
||||
),
|
||||
]
|
||||
|
||||
for cmd in commands:
|
||||
ret = shell.run(*cmd, check=False)
|
||||
if ret.returncode != 0:
|
||||
pytest.fail(f"Failed to run '{' '.join(cmd)!r}':\n{ret}")
|
||||
|
||||
|
||||
def setup_debian_family(
|
||||
shell,
|
||||
os_name,
|
||||
os_version,
|
||||
os_codename,
|
||||
root_url,
|
||||
salt_release,
|
||||
downloads_path,
|
||||
gpg_key_name,
|
||||
repo_subpath,
|
||||
package_type,
|
||||
onedir_install_path,
|
||||
):
|
||||
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
|
||||
ret = shell.run("apt-get", "update", "-y", check=False)
|
||||
if ret.returncode != 0:
|
||||
pytest.fail(str(ret))
|
||||
|
||||
if package_type == "package":
|
||||
if arch == "aarch64":
|
||||
arch = "arm64"
|
||||
elif arch == "x86_64":
|
||||
arch = "amd64"
|
||||
|
||||
if repo_subpath == "minor":
|
||||
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}"
|
||||
else:
|
||||
repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}"
|
||||
gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}"
|
||||
|
||||
try:
|
||||
pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
pytest.fail(f"Failed to download {gpg_file_url}: {exc}")
|
||||
|
||||
salt_sources_path = downloads_path / "salt.list"
|
||||
salt_sources_path.write_text(
|
||||
f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n"
|
||||
)
|
||||
commands = [
|
||||
(
|
||||
"mv",
|
||||
str(downloads_path / gpg_key_name),
|
||||
f"/usr/share/keyrings/{gpg_key_name}",
|
||||
),
|
||||
(
|
||||
"mv",
|
||||
str(salt_sources_path),
|
||||
"/etc/apt/sources.list.d/salt.list",
|
||||
),
|
||||
("apt-get", "install", "-y", "ca-certificates"),
|
||||
("update-ca-certificates",),
|
||||
("apt-get", "update"),
|
||||
(
|
||||
"apt-get",
|
||||
"install",
|
||||
"-y",
|
||||
"salt-master",
|
||||
"salt-minion",
|
||||
"salt-ssh",
|
||||
"salt-syndic",
|
||||
"salt-cloud",
|
||||
"salt-api",
|
||||
"salt-dbg",
|
||||
),
|
||||
]
|
||||
for cmd in commands:
|
||||
ret = shell.run(*cmd)
|
||||
if ret.returncode != 0:
|
||||
pytest.fail(str(ret))
|
||||
else:
|
||||
# We are testing the onedir download
|
||||
onedir_name = f"salt-{salt_release}-onedir-linux-{arch}.tar.xz"
|
||||
if repo_subpath == "minor":
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
|
||||
else:
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}"
|
||||
onedir_url = f"{repo_url_base}/{onedir_name}"
|
||||
onedir_location = downloads_path / onedir_name
|
||||
onedir_extracted = onedir_install_path
|
||||
|
||||
try:
|
||||
pytest.helpers.download_file(onedir_url, onedir_location)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
pytest.fail(f"Failed to download {onedir_url}: {exc}")
|
||||
|
||||
shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted))
|
||||
|
||||
|
||||
def setup_macos(
|
||||
shell,
|
||||
root_url,
|
||||
salt_release,
|
||||
downloads_path,
|
||||
repo_subpath,
|
||||
package_type,
|
||||
onedir_install_path,
|
||||
):
|
||||
arch = os.environ.get("SALT_REPO_ARCH") or "x86_64"
|
||||
if package_type == "package":
|
||||
|
||||
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
|
||||
mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg"
|
||||
if repo_subpath == "minor":
|
||||
mac_pkg_url = (
|
||||
f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}"
|
||||
)
|
||||
else:
|
||||
mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}"
|
||||
else:
|
||||
mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}"
|
||||
mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg"
|
||||
|
||||
mac_pkg_path = downloads_path / mac_pkg
|
||||
pytest.helpers.download_file(mac_pkg_url, mac_pkg_path)
|
||||
|
||||
ret = shell.run(
|
||||
"installer",
|
||||
"-pkg",
|
||||
str(mac_pkg_path),
|
||||
"-target",
|
||||
"/",
|
||||
check=False,
|
||||
)
|
||||
assert ret.returncode == 0, ret
|
||||
else:
|
||||
# We are testing the onedir download
|
||||
onedir_name = f"salt-{salt_release}-onedir-darwin-{arch}.tar.xz"
|
||||
if repo_subpath == "minor":
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
|
||||
else:
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}"
|
||||
onedir_url = f"{repo_url_base}/{onedir_name}"
|
||||
onedir_location = downloads_path / onedir_name
|
||||
onedir_extracted = onedir_install_path
|
||||
|
||||
try:
|
||||
pytest.helpers.download_file(onedir_url, onedir_location)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
pytest.fail(f"Failed to download {onedir_url}: {exc}")
|
||||
|
||||
shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def setup_windows(
|
||||
shell,
|
||||
root_url,
|
||||
salt_release,
|
||||
downloads_path,
|
||||
repo_subpath,
|
||||
package_type,
|
||||
onedir_install_path,
|
||||
):
|
||||
try:
|
||||
arch = os.environ.get("SALT_REPO_ARCH") or "amd64"
|
||||
if package_type != "onedir":
|
||||
root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt")
|
||||
|
||||
if packaging.version.parse(salt_release) > packaging.version.parse("3005"):
|
||||
if package_type.lower() == "nsis":
|
||||
if arch.lower() != "x86":
|
||||
arch = arch.upper()
|
||||
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe"
|
||||
else:
|
||||
if arch.lower() != "x86":
|
||||
arch = arch.upper()
|
||||
win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi"
|
||||
if repo_subpath == "minor":
|
||||
win_pkg_url = (
|
||||
f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}"
|
||||
)
|
||||
else:
|
||||
win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}"
|
||||
ssm_bin = root_dir / "ssm.exe"
|
||||
else:
|
||||
win_pkg = f"salt-{salt_release}-windows-{arch}.exe"
|
||||
win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}"
|
||||
ssm_bin = root_dir / "bin" / "ssm_bin"
|
||||
|
||||
pkg_path = downloads_path / win_pkg
|
||||
|
||||
pytest.helpers.download_file(win_pkg_url, pkg_path)
|
||||
if package_type.lower() == "nsis":
|
||||
ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False)
|
||||
else:
|
||||
ret = shell.run(
|
||||
"msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""'
|
||||
)
|
||||
assert ret.returncode == 0, ret
|
||||
|
||||
log.debug("Removing installed salt-minion service")
|
||||
ret = shell.run(
|
||||
"cmd",
|
||||
"/c",
|
||||
str(ssm_bin),
|
||||
"remove",
|
||||
"salt-minion",
|
||||
"confirm",
|
||||
check=False,
|
||||
)
|
||||
assert ret.returncode == 0, ret
|
||||
else:
|
||||
# We are testing the onedir download
|
||||
onedir_name = f"salt-{salt_release}-onedir-windows-{arch}.zip"
|
||||
if repo_subpath == "minor":
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
|
||||
else:
|
||||
repo_url_base = f"{root_url}/onedir/{repo_subpath}"
|
||||
onedir_url = f"{repo_url_base}/{onedir_name}"
|
||||
onedir_location = downloads_path / onedir_name
|
||||
onedir_extracted = onedir_install_path
|
||||
|
||||
try:
|
||||
pytest.helpers.download_file(onedir_url, onedir_location)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
pytest.fail(f"Failed to download {onedir_url}: {exc}")
|
||||
|
||||
shell.run("unzip", str(onedir_location), "-d", str(onedir_extracted))
|
||||
yield
|
||||
finally:
|
||||
# We need to uninstall the MSI packages, otherwise they will not install correctly
|
||||
if package_type.lower() == "msi":
|
||||
ret = shell.run("msiexec", "/qn", "/x", str(pkg_path))
|
||||
assert ret.returncode == 0, ret
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def install_dir(_setup_system, package_type, onedir_install_path):
|
||||
if package_type != "onedir":
|
||||
if platform.is_windows():
|
||||
return pathlib.Path(
|
||||
os.getenv("ProgramFiles"), "Salt Project", "Salt"
|
||||
).resolve()
|
||||
if platform.is_darwin():
|
||||
return pathlib.Path("/opt", "salt")
|
||||
return pathlib.Path("/opt", "saltstack", "salt")
|
||||
else:
|
||||
# We are testing the onedir
|
||||
return onedir_install_path / "salt"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_test_command(request, install_dir):
|
||||
command = request.param
|
||||
command[0] = str(install_dir / command[0])
|
||||
return command
|
||||
|
||||
|
||||
@pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True)
|
||||
def test_download(shell, salt_test_command):
|
||||
"""
|
||||
Test downloading of Salt packages and running various commands.
|
||||
"""
|
||||
ret = shell.run(*salt_test_command, check=False)
|
||||
assert ret.returncode == 0, ret
|
0
tests/pytests/pkg/integration/__init__.py
Normal file
0
tests/pytests/pkg/integration/__init__.py
Normal file
102
tests/pytests/pkg/integration/test_check_imports.py
Normal file
102
tests/pytests/pkg/integration/test_check_imports.py
Normal file
|
@ -0,0 +1,102 @@
|
|||
import logging
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
from saltfactories.utils.functional import MultiStateResult
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CHECK_IMPORTS_SLS_CONTENTS = """
|
||||
#!py
|
||||
import importlib
|
||||
|
||||
def run():
|
||||
config = {}
|
||||
for module in [
|
||||
'templates', 'platform', 'cli', 'executors', 'config', 'wheel', 'netapi',
|
||||
'cache', 'proxy', 'transport', 'metaproxy', 'modules', 'tokens', 'matchers',
|
||||
'acl', 'auth', 'log', 'engines', 'client', 'returners', 'runners', 'tops',
|
||||
'output', 'daemons', 'thorium', 'renderers', 'states', 'cloud', 'roster',
|
||||
'beacons', 'pillar', 'spm', 'utils', 'sdb', 'fileserver', 'defaults',
|
||||
'ext', 'queues', 'grains', 'serializers'
|
||||
]:
|
||||
import_name = "salt.{}".format(module)
|
||||
try:
|
||||
importlib.import_module(import_name)
|
||||
config[import_name] = {
|
||||
'test.succeed_without_changes': [
|
||||
{
|
||||
"name": import_name,
|
||||
'comment': "The '{}' import succeeded.".format(import_name)
|
||||
}
|
||||
]
|
||||
}
|
||||
except ModuleNotFoundError as err:
|
||||
config[import_name] = {
|
||||
'test.fail_without_changes': [
|
||||
{
|
||||
"name": import_name,
|
||||
'comment': "The '{}' import failed. The error was: {}".format(import_name, err)
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
for import_name in ["telnetlib"]:
|
||||
try:
|
||||
importlib.import_module(import_name)
|
||||
config[import_name] = {
|
||||
'test.succeed_without_changes': [
|
||||
{
|
||||
"name": import_name,
|
||||
'comment': "The '{}' import succeeded.".format(import_name)
|
||||
}
|
||||
]
|
||||
}
|
||||
except ModuleNotFoundError as err:
|
||||
config[import_name] = {
|
||||
'test.fail_without_changes': [
|
||||
{
|
||||
"name": import_name,
|
||||
'comment': "The '{}' import failed. The error was: {}".format(import_name, err)
|
||||
}
|
||||
]
|
||||
}
|
||||
return config
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def state_name(salt_master):
|
||||
name = "check-imports"
|
||||
with salt_master.state_tree.base.temp_file(
|
||||
f"{name}.sls", CHECK_IMPORTS_SLS_CONTENTS
|
||||
):
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
subprocess.run(
|
||||
[
|
||||
"chown",
|
||||
"-R",
|
||||
"salt:salt",
|
||||
str(salt_master.state_tree.base.write_path),
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
yield name
|
||||
|
||||
|
||||
def test_check_imports(salt_cli, salt_minion, state_name):
|
||||
"""
|
||||
Test imports
|
||||
"""
|
||||
ret = salt_cli.run("state.sls", state_name, minion_tgt=salt_minion.id)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data
|
||||
result = MultiStateResult(raw=ret.data)
|
||||
for state_ret in result:
|
||||
assert state_ret.result is True
|
61
tests/pytests/pkg/integration/test_clean_zmq_teardown.py
Normal file
61
tests/pytests/pkg/integration/test_clean_zmq_teardown.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _skip_on_non_relenv(install_salt):
|
||||
if not install_salt.relenv:
|
||||
pytest.skip("This test is for relenv versions of salt")
|
||||
|
||||
|
||||
def test_check_no_import_error(salt_call_cli, salt_master):
|
||||
"""
|
||||
Test that we don't have any errors on teardown of python when using a py-rendered sls file
|
||||
This is a package test because the issue was not reproducible in our normal test suite
|
||||
"""
|
||||
init_sls = textwrap.dedent(
|
||||
"""#!py
|
||||
|
||||
|
||||
def run():
|
||||
return {
|
||||
"file_foobar": {
|
||||
"file.managed": [
|
||||
{
|
||||
"name": "/foobar"
|
||||
},
|
||||
{
|
||||
"template": "jinja"
|
||||
},
|
||||
{
|
||||
"context": {
|
||||
"foobar": "baz",
|
||||
}
|
||||
},
|
||||
{
|
||||
"source": "salt://breaks/foobar.jinja",
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
)
|
||||
base_tree = pathlib.Path(salt_master.config["file_roots"]["base"][0])
|
||||
breaks_tree = base_tree / "breaks"
|
||||
breaks_tree.mkdir(exist_ok=True)
|
||||
(breaks_tree / "init.sls").write_text(init_sls)
|
||||
(breaks_tree / "foobar.jinja").write_text("{{ foobar }}")
|
||||
output = salt_call_cli.run("state.apply", "breaks", "--output-diff", "test=true")
|
||||
log.debug(output.stderr)
|
||||
shutil.rmtree(str(breaks_tree), ignore_errors=True)
|
||||
assert not output.stderr
|
40
tests/pytests/pkg/integration/test_enabled_disabled.py
Normal file
40
tests/pytests/pkg/integration/test_enabled_disabled.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Linux test only")
|
||||
def test_services(install_salt, salt_cli, salt_minion):
|
||||
"""
|
||||
Check if Services are enabled/disabled
|
||||
"""
|
||||
if install_salt.distro_id in ("ubuntu", "debian"):
|
||||
services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
|
||||
services_disabled = []
|
||||
elif install_salt.distro_id in ("centos", "redhat", "amzn", "fedora"):
|
||||
services_enabled = []
|
||||
services_disabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
|
||||
elif install_salt.distro_id == "photon":
|
||||
if float(install_salt.distro_version) < 5:
|
||||
services_enabled = []
|
||||
services_disabled = [
|
||||
"salt-master",
|
||||
"salt-minion",
|
||||
"salt-syndic",
|
||||
"salt-api",
|
||||
]
|
||||
else:
|
||||
services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"]
|
||||
services_disabled = []
|
||||
elif platform.is_darwin():
|
||||
services_enabled = ["salt-minion"]
|
||||
services_disabled = []
|
||||
else:
|
||||
pytest.fail(f"Don't know how to handle os_family={install_salt.distro_id}")
|
||||
|
||||
for service in services_enabled:
|
||||
ret = salt_cli.run("service.enabled", service, minion_tgt=salt_minion.id)
|
||||
assert "true" in ret.stdout
|
||||
|
||||
for service in services_disabled:
|
||||
ret = salt_cli.run("service.disabled", service, minion_tgt=salt_minion.id)
|
||||
assert "true" in ret.stdout
|
25
tests/pytests/pkg/integration/test_help.py
Normal file
25
tests/pytests/pkg/integration/test_help.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import subprocess
|
||||
|
||||
|
||||
def test_help(install_salt):
|
||||
"""
|
||||
Test --help works for all salt cmds
|
||||
"""
|
||||
for cmd in install_salt.binary_paths.values():
|
||||
cmd = [str(x) for x in cmd]
|
||||
|
||||
if len(cmd) > 1 and "shell" in cmd[1]:
|
||||
# Singlebin build, unable to get the version
|
||||
continue
|
||||
|
||||
if "python" in cmd[0] and len(cmd) == 1:
|
||||
ret = install_salt.proc.run(
|
||||
*cmd, "--version", stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
assert "Python" in ret.stdout
|
||||
else:
|
||||
ret = install_salt.proc.run(
|
||||
*cmd, "--help", stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
assert "Usage" in ret.stdout
|
||||
assert ret.returncode == 0
|
46
tests/pytests/pkg/integration/test_logrotate_config.py
Normal file
46
tests/pytests/pkg/integration/test_logrotate_config.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
"""
|
||||
Tests for logrotate config
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
import packaging.version
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_linux,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logrotate_config_file(grains):
|
||||
"""
|
||||
Fixture for logrotate config file path
|
||||
"""
|
||||
if grains["os_family"] == "RedHat":
|
||||
return pathlib.Path("/etc/logrotate.d", "salt")
|
||||
elif grains["os_family"] == "Debian":
|
||||
return pathlib.Path("/etc/logrotate.d", "salt-common")
|
||||
|
||||
|
||||
def test_logrotate_config(logrotate_config_file):
|
||||
"""
|
||||
Test that logrotate config has been installed in correctly
|
||||
"""
|
||||
assert logrotate_config_file.is_file()
|
||||
assert logrotate_config_file.owner() == "root"
|
||||
assert logrotate_config_file.group() == "root"
|
||||
|
||||
|
||||
def test_issue_65231_etc_logrotate_salt_dir_removed(install_salt):
|
||||
"""
|
||||
Test that /etc/logrotate.d/salt is not a directory
|
||||
"""
|
||||
if install_salt.prev_version and packaging.version.parse(
|
||||
install_salt.prev_version
|
||||
) <= packaging.version.parse("3006.4"):
|
||||
pytest.skip("Testing a downgrade to 3006.4, do not run")
|
||||
|
||||
path = pathlib.Path("/etc/logrotate.d/salt")
|
||||
if path.exists():
|
||||
assert path.is_dir() is False
|
132
tests/pytests/pkg/integration/test_multi_minion.py
Normal file
132
tests/pytests/pkg/integration/test_multi_minion.py
Normal file
|
@ -0,0 +1,132 @@
|
|||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
import packaging.version
|
||||
import psutil
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_windows,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _skip_on_less_than_3006_1(install_salt):
|
||||
if packaging.version.parse(install_salt.version) <= packaging.version.parse(
|
||||
"3006.1"
|
||||
):
|
||||
pytest.skip(
|
||||
"Multi-minion script only available on versions greater than 3006.1"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mm_script(install_salt):
|
||||
yield install_salt.ssm_bin.parent / "multi-minion.ps1"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mm_conf(mm_script):
|
||||
yield pathlib.Path(os.getenv("LocalAppData"), "Salt Project", "Salt", "conf")
|
||||
subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-d"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
|
||||
|
||||
def test_script_present(mm_script):
|
||||
"""
|
||||
Ensure the multi-minion.ps1 file is present in the root of the installation
|
||||
"""
|
||||
assert mm_script.exists()
|
||||
|
||||
|
||||
def test_install(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion with default settings. Should create a minion config
|
||||
file in Local AppData
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '")],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: salt") > -1
|
||||
|
||||
|
||||
def test_install_master(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and set the master to spongebob
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-m", "spongebob"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: spongebob") > -1
|
||||
|
||||
|
||||
def test_install_prefix(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and add a prefix to the minion id
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-p", "squarepants"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("id: squarepants") > -1
|
||||
|
||||
|
||||
def test_install_log_level(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and set the log level in the log file to debug
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-l", "debug"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("log_level_logfile: debug") > -1
|
||||
|
||||
|
||||
def test_install_start(mm_script, mm_conf):
|
||||
"""
|
||||
Install a second minion and start that minion in a hidden process
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
["powershell", str(mm_script).replace(" ", "' '"), "-s"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
conf_file = mm_conf / "minion"
|
||||
assert conf_file.exists()
|
||||
assert conf_file.read_text().find("master: salt") > -1
|
||||
|
||||
found = False
|
||||
for p in psutil.process_iter(["cmdline", "name"]):
|
||||
if p.info["name"] and p.info["name"] == "salt-minion.exe":
|
||||
if f"{mm_conf}" in p.info["cmdline"]:
|
||||
found = True
|
||||
assert found is True
|
217
tests/pytests/pkg/integration/test_pip.py
Normal file
217
tests/pytests/pkg/integration/test_pip.py
Normal file
|
@ -0,0 +1,217 @@
|
|||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pypath():
|
||||
if platform.is_windows():
|
||||
return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt")
|
||||
else:
|
||||
return pathlib.Path("/opt", "saltstack", "salt", "pypath", "bin")
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def wipe_pydeps(shell, install_salt, extras_pypath):
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Note, uninstalling anything with an associated script will leave the script.
|
||||
# This is due to a bug in pip.
|
||||
for dep in ["pep8", "PyGithub"]:
|
||||
shell.run(
|
||||
*(install_salt.binary_paths["pip"] + ["uninstall", "-y", dep]),
|
||||
)
|
||||
# Let's remove everything under the extras directory, uninstalling doesn't get dependencies
|
||||
dirs = []
|
||||
files = []
|
||||
for filename in extras_pypath.glob("**/**"):
|
||||
if filename != extras_pypath and filename.exists():
|
||||
if filename.is_dir():
|
||||
dirs.append(filename)
|
||||
else:
|
||||
files.append(filename)
|
||||
for fp in files:
|
||||
fp.unlink()
|
||||
for dirname in dirs:
|
||||
shutil.rmtree(dirname, ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_tests_account_environ(pkg_tests_account):
|
||||
environ = os.environ.copy()
|
||||
environ["LOGNAME"] = environ["USER"] = pkg_tests_account.username
|
||||
environ["HOME"] = pkg_tests_account.info.home
|
||||
return environ
|
||||
|
||||
|
||||
def test_pip_install(salt_call_cli, install_salt, shell):
|
||||
"""
|
||||
Test pip.install and ensure module can use installed library
|
||||
"""
|
||||
dep = "PyGithub==1.56.0"
|
||||
repo = "https://github.com/saltstack/salt.git"
|
||||
|
||||
try:
|
||||
install = salt_call_cli.run("--local", "pip.install", dep)
|
||||
assert install.returncode == 0
|
||||
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
||||
assert "Authentication information could" in use_lib.stderr
|
||||
finally:
|
||||
ret = salt_call_cli.run("--local", "pip.uninstall", dep)
|
||||
assert ret.returncode == 0
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
||||
assert "The github execution module cannot be loaded" in use_lib.stderr
|
||||
|
||||
|
||||
def test_pip_install_extras(shell, install_salt, extras_pypath_bin):
|
||||
"""
|
||||
Test salt-pip installs into the correct directory
|
||||
"""
|
||||
if not install_salt.relenv:
|
||||
pytest.skip("The extras directory is only in relenv versions")
|
||||
dep = "pep8"
|
||||
extras_keyword = "extras-3"
|
||||
if platform.is_windows():
|
||||
check_path = extras_pypath_bin / f"{dep}.exe"
|
||||
else:
|
||||
check_path = extras_pypath_bin / dep
|
||||
|
||||
install_ret = shell.run(*(install_salt.binary_paths["pip"] + ["install", dep]))
|
||||
assert install_ret.returncode == 0
|
||||
|
||||
ret = shell.run(*(install_salt.binary_paths["pip"] + ["list", "--format=json"]))
|
||||
assert ret.returncode == 0
|
||||
assert ret.data # We can parse the JSON output
|
||||
for pkg in ret.data:
|
||||
if pkg["name"] == dep:
|
||||
break
|
||||
else:
|
||||
pytest.fail(
|
||||
f"The {dep!r} package was not found installed. Packages Installed: {ret.data}"
|
||||
)
|
||||
|
||||
show_ret = shell.run(*(install_salt.binary_paths["pip"] + ["show", dep]))
|
||||
assert show_ret.returncode == 0
|
||||
assert extras_keyword in show_ret.stdout
|
||||
assert check_path.exists()
|
||||
|
||||
ret = shell.run(str(check_path), "--version")
|
||||
assert ret.returncode == 0
|
||||
|
||||
|
||||
def demote(account):
|
||||
def result():
|
||||
# os.setgid does not remove group membership, so we remove them here so they are REALLY non-root
|
||||
os.setgroups([])
|
||||
os.setgid(account.info.gid)
|
||||
os.setuid(account.info.uid)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows")
|
||||
def test_pip_non_root(
|
||||
shell,
|
||||
install_salt,
|
||||
pkg_tests_account,
|
||||
extras_pypath_bin,
|
||||
pypath,
|
||||
pkg_tests_account_environ,
|
||||
):
|
||||
if install_salt.classic:
|
||||
pytest.skip("We can install non-root for classic packages")
|
||||
check_path = extras_pypath_bin / "pep8"
|
||||
if not install_salt.relenv and not install_salt.classic:
|
||||
check_path = pypath / "pep8"
|
||||
# We should be able to issue a --help without being root
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["salt"] + ["--help"],
|
||||
preexec_fn=demote(pkg_tests_account),
|
||||
env=pkg_tests_account_environ,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
assert "Usage" in ret.stdout
|
||||
|
||||
# Let tiamat-pip create the pypath directory for us
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["pip"] + ["install", "-h"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
|
||||
# Now, we should still not be able to install as non-root
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["pip"] + ["install", "pep8"],
|
||||
preexec_fn=demote(pkg_tests_account),
|
||||
env=pkg_tests_account_environ,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
assert ret.returncode != 0, ret.stderr
|
||||
# But we should be able to install as root
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["pip"] + ["install", "pep8"],
|
||||
capture_output=True,
|
||||
check=False,
|
||||
text=True,
|
||||
)
|
||||
|
||||
assert check_path.exists(), shutil.which("pep8")
|
||||
|
||||
assert ret.returncode == 0, ret.stderr
|
||||
|
||||
|
||||
def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell):
|
||||
"""
|
||||
Test salt-pip installs into the correct directory and the salt extension
|
||||
is properly loaded.
|
||||
"""
|
||||
if not install_salt.relenv:
|
||||
pytest.skip("The extras directory is only in relenv versions")
|
||||
dep = "salt-analytics-framework"
|
||||
dep_version = "0.1.0"
|
||||
|
||||
install_ret = shell.run(
|
||||
*(install_salt.binary_paths["pip"] + ["install", f"{dep}=={dep_version}"]),
|
||||
)
|
||||
assert install_ret.returncode == 0
|
||||
|
||||
ret = shell.run(
|
||||
*(install_salt.binary_paths["pip"] + ["list", "--format=json"]),
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
pkgs_installed = json.loads(ret.stdout.strip())
|
||||
for pkg in pkgs_installed:
|
||||
if pkg["name"] == dep:
|
||||
break
|
||||
else:
|
||||
pytest.fail(
|
||||
f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}"
|
||||
)
|
||||
|
||||
show_ret = shell.run(
|
||||
*(install_salt.binary_paths["pip"] + ["show", dep]),
|
||||
)
|
||||
assert show_ret.returncode == 0
|
||||
|
||||
assert extras_pypath.joinpath("saf").is_dir()
|
||||
|
||||
ret = shell.run(
|
||||
*(install_salt.binary_paths["minion"] + ["--versions-report"]),
|
||||
)
|
||||
assert show_ret.returncode == 0
|
||||
assert "Salt Extensions" in ret.stdout
|
||||
assert f"{dep}: {dep_version}" in ret.stdout
|
96
tests/pytests/pkg/integration/test_pip_upgrade.py
Normal file
96
tests/pytests/pkg/integration/test_pip_upgrade.py
Normal file
|
@ -0,0 +1,96 @@
|
|||
import logging
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_pip_install(install_salt, salt_call_cli):
|
||||
"""
|
||||
Test pip.install and ensure that a package included in the tiamat build can be upgraded
|
||||
"""
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["salt"] + ["--versions-report"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=True,
|
||||
shell=False,
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
possible_upgrades = [
|
||||
"docker-py",
|
||||
"msgpack",
|
||||
"pycparser",
|
||||
"python-gnupg",
|
||||
"pyyaml",
|
||||
"pyzmq",
|
||||
"jinja2",
|
||||
]
|
||||
found_new = False
|
||||
for dep in possible_upgrades:
|
||||
get_latest = salt_call_cli.run("--local", "pip.list_all_versions", dep)
|
||||
if not get_latest.data:
|
||||
# No information available
|
||||
continue
|
||||
dep_version = get_latest.data[-1]
|
||||
installed_version = None
|
||||
for line in ret.stdout.splitlines():
|
||||
if dep in line.lower():
|
||||
installed_version = line.lower().strip().split(":")[-1].strip()
|
||||
break
|
||||
else:
|
||||
pytest.fail(f"Failed to find {dep} in the versions report output")
|
||||
|
||||
if dep_version == installed_version:
|
||||
log.warning(f"The {dep} dependency is already latest")
|
||||
else:
|
||||
found_new = True
|
||||
break
|
||||
|
||||
if found_new:
|
||||
try:
|
||||
install = salt_call_cli.run(
|
||||
"--local", "pip.install", f"{dep}=={dep_version}"
|
||||
)
|
||||
assert install
|
||||
log.warning(install)
|
||||
# The assert is commented out because pip will actually trigger a failure since
|
||||
# we're breaking the dependency tree, but, for the purpose of this test, we can
|
||||
# ignore it.
|
||||
#
|
||||
# assert install.returncode == 0
|
||||
|
||||
ret = subprocess.run(
|
||||
install_salt.binary_paths["salt"] + ["--versions-report"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
check=True,
|
||||
shell=False,
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
for line in ret.stdout.splitlines():
|
||||
if dep in line.lower():
|
||||
new_version = line.lower().strip().split(":")[-1].strip()
|
||||
if new_version == installed_version:
|
||||
pytest.fail(
|
||||
f"The newly installed version of {dep} does not show in the versions report"
|
||||
)
|
||||
assert new_version == dep_version
|
||||
break
|
||||
else:
|
||||
pytest.fail(f"Failed to find {dep} in the versions report output")
|
||||
finally:
|
||||
log.info(f"Uninstalling {dep_version}")
|
||||
assert salt_call_cli.run(
|
||||
"--local", "pip.uninstall", f"{dep}=={dep_version}"
|
||||
)
|
||||
else:
|
||||
pytest.skip("Did not find an upgrade version for any of the dependencies")
|
36
tests/pytests/pkg/integration/test_pkg.py
Normal file
36
tests/pytests/pkg/integration/test_pkg.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
import sys
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def pkg_name(salt_call_cli, grains):
|
||||
if sys.platform.startswith("win"):
|
||||
ret = salt_call_cli.run("--local", "winrepo.update_git_repos")
|
||||
assert ret.returncode == 0
|
||||
attempts = 3
|
||||
while attempts:
|
||||
attempts -= 1
|
||||
ret = salt_call_cli.run("--local", "pkg.refresh_db")
|
||||
if ret.returncode:
|
||||
time.sleep(5)
|
||||
continue
|
||||
break
|
||||
else:
|
||||
pytest.fail("Failed to run 'pkg.refresh_db' 3 times.")
|
||||
return "putty"
|
||||
elif grains["os_family"] == "RedHat":
|
||||
if grains["os"] == "VMware Photon OS":
|
||||
return "snoopy"
|
||||
elif grains["osfinger"] == "Amazon Linux-2023":
|
||||
return "dnf-utils"
|
||||
return "units"
|
||||
elif grains["os_family"] == "Debian":
|
||||
return "ifenslave"
|
||||
return "figlet"
|
||||
|
||||
|
||||
def test_pkg_install(salt_call_cli, pkg_name):
|
||||
ret = salt_call_cli.run("--local", "state.single", "pkg.installed", pkg_name)
|
||||
assert ret.returncode == 0
|
75
tests/pytests/pkg/integration/test_python.py
Normal file
75
tests/pytests/pkg/integration/test_python.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import subprocess
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def python_script_bin(install_salt):
|
||||
# Tiamat builds run scripts via `salt python`
|
||||
if not install_salt.relenv and not install_salt.classic:
|
||||
return install_salt.binary_paths["python"][:1] + ["python"]
|
||||
return install_salt.binary_paths["python"]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def check_python_file(tmp_path):
|
||||
script_path = tmp_path / "check_python.py"
|
||||
script_path.write_text(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import sys
|
||||
|
||||
import salt.utils.data
|
||||
|
||||
user_arg = sys.argv
|
||||
|
||||
if user_arg[1] == "raise":
|
||||
raise Exception("test")
|
||||
|
||||
if salt.utils.data.is_true(user_arg[1]):
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
"""
|
||||
)
|
||||
)
|
||||
return script_path
|
||||
|
||||
|
||||
@pytest.mark.parametrize("exp_ret,user_arg", [(1, "false"), (0, "true")])
|
||||
def test_python_script(
|
||||
install_salt, exp_ret, user_arg, python_script_bin, check_python_file
|
||||
):
|
||||
ret = install_salt.proc.run(
|
||||
*(
|
||||
python_script_bin
|
||||
+ [
|
||||
str(check_python_file),
|
||||
user_arg,
|
||||
]
|
||||
),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
universal_newlines=True,
|
||||
)
|
||||
|
||||
assert ret.returncode == exp_ret, ret.stderr
|
||||
|
||||
|
||||
def test_python_script_exception(install_salt, python_script_bin, check_python_file):
|
||||
ret = install_salt.proc.run(
|
||||
*(
|
||||
python_script_bin
|
||||
+ [
|
||||
str(check_python_file),
|
||||
"raise",
|
||||
]
|
||||
),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
universal_newlines=True,
|
||||
)
|
||||
assert "Exception: test" in ret.stderr
|
21
tests/pytests/pkg/integration/test_salt_api.py
Normal file
21
tests/pytests/pkg/integration/test_salt_api.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_salt_api(api_request):
|
||||
"""
|
||||
Test running a command against the salt api
|
||||
"""
|
||||
ret = api_request.post(
|
||||
"/run",
|
||||
data={
|
||||
"client": "local",
|
||||
"tgt": "*",
|
||||
"fun": "test.arg",
|
||||
"arg": ["foo", "bar"],
|
||||
},
|
||||
)
|
||||
assert ret["args"] == ["foo", "bar"]
|
86
tests/pytests/pkg/integration/test_salt_call.py
Normal file
86
tests/pytests/pkg/integration/test_salt_call.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
import subprocess
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
|
||||
def test_salt_call_local(salt_call_cli):
|
||||
"""
|
||||
Test salt-call --local test.ping
|
||||
"""
|
||||
ret = salt_call_cli.run("--local", "test.ping")
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
|
||||
def test_salt_call(salt_call_cli):
|
||||
"""
|
||||
Test salt-call test.ping
|
||||
"""
|
||||
ret = salt_call_cli.run("test.ping")
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def state_name(salt_master):
|
||||
name = "some-test-state"
|
||||
sls_contents = """
|
||||
test_foo:
|
||||
test.succeed_with_changes:
|
||||
- name: foo
|
||||
"""
|
||||
with salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents):
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
subprocess.run(
|
||||
[
|
||||
"chown",
|
||||
"-R",
|
||||
"salt:salt",
|
||||
str(salt_master.state_tree.base.write_path),
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
yield name
|
||||
|
||||
|
||||
def test_sls(salt_call_cli, state_name):
|
||||
"""
|
||||
Test calling a sls file
|
||||
"""
|
||||
ret = salt_call_cli.run("state.apply", state_name)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data
|
||||
sls_ret = ret.data[next(iter(ret.data))]
|
||||
assert sls_ret["changes"]["testing"]["new"] == "Something pretended to change"
|
||||
|
||||
|
||||
def test_salt_call_local_sys_doc_none(salt_call_cli):
|
||||
"""
|
||||
Test salt-call --local sys.doc none
|
||||
"""
|
||||
ret = salt_call_cli.run("--local", "sys.doc", "none")
|
||||
assert ret.returncode == 0
|
||||
assert not ret.data
|
||||
|
||||
|
||||
def test_salt_call_local_sys_doc_aliases(salt_call_cli):
|
||||
"""
|
||||
Test salt-call --local sys.doc aliases
|
||||
"""
|
||||
ret = salt_call_cli.run("--local", "sys.doc", "aliases.list_aliases")
|
||||
assert ret.returncode == 0
|
||||
assert "aliases.list_aliases" in ret.data
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_salt_call_cmd_run_id_runas(salt_call_cli, pkg_tests_account, caplog):
|
||||
"""
|
||||
Test salt-call --local cmd_run id with runas
|
||||
"""
|
||||
ret = salt_call_cli.run(
|
||||
"--local", "cmd.run", "id", runas=pkg_tests_account.username
|
||||
)
|
||||
assert "Environment could not be retrieved for user" not in caplog.text
|
||||
assert str(pkg_tests_account.info.uid) in ret.stdout
|
||||
assert str(pkg_tests_account.info.gid) in ret.stdout
|
38
tests/pytests/pkg/integration/test_salt_exec.py
Normal file
38
tests/pytests/pkg/integration/test_salt_exec.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
from sys import platform
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cat_file(tmp_path):
|
||||
fp = tmp_path / "cat-file"
|
||||
fp.write_text(str(fp))
|
||||
return fp
|
||||
|
||||
|
||||
def test_salt_cmd_run(salt_cli, salt_minion, cat_file):
|
||||
"""
|
||||
Test salt cmd.run 'ipconfig' or 'cat <file>'
|
||||
"""
|
||||
ret = None
|
||||
if platform.startswith("win"):
|
||||
ret = salt_cli.run("cmd.run", "ipconfig", minion_tgt=salt_minion.id)
|
||||
else:
|
||||
ret = salt_cli.run("cmd.run", f"cat {str(cat_file)}", minion_tgt=salt_minion.id)
|
||||
assert ret
|
||||
assert ret.stdout
|
||||
|
||||
|
||||
def test_salt_list_users(salt_cli, salt_minion):
|
||||
"""
|
||||
Test salt user.list_users
|
||||
"""
|
||||
ret = salt_cli.run("user.list_users", minion_tgt=salt_minion.id)
|
||||
if platform.startswith("win"):
|
||||
assert "Administrator" in ret.stdout
|
||||
else:
|
||||
assert "root" in ret.stdout
|
58
tests/pytests/pkg/integration/test_salt_grains.py
Normal file
58
tests/pytests/pkg/integration/test_salt_grains.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
import packaging.version
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_grains_items(salt_cli, salt_minion):
|
||||
"""
|
||||
Test grains.items
|
||||
"""
|
||||
ret = salt_cli.run("grains.items", minion_tgt=salt_minion.id)
|
||||
assert ret.data, ret
|
||||
assert "osrelease" in ret.data
|
||||
|
||||
|
||||
def test_grains_item_os(salt_cli, salt_minion):
|
||||
"""
|
||||
Test grains.item os
|
||||
"""
|
||||
ret = salt_cli.run("grains.item", "os", minion_tgt=salt_minion.id)
|
||||
assert ret.data, ret
|
||||
assert "os" in ret.data
|
||||
|
||||
|
||||
def test_grains_item_pythonversion(salt_cli, salt_minion):
|
||||
"""
|
||||
Test grains.item pythonversion
|
||||
"""
|
||||
ret = salt_cli.run("grains.item", "pythonversion", minion_tgt=salt_minion.id)
|
||||
assert ret.data, ret
|
||||
assert "pythonversion" in ret.data
|
||||
|
||||
|
||||
def test_grains_setval_key_val(salt_cli, salt_minion):
|
||||
"""
|
||||
Test grains.setval key val
|
||||
"""
|
||||
ret = salt_cli.run("grains.setval", "key", "val", minion_tgt=salt_minion.id)
|
||||
assert ret.data, ret
|
||||
assert "key" in ret.data
|
||||
|
||||
|
||||
def test_grains_package_onedir(salt_cli, salt_minion, install_salt):
|
||||
"""
|
||||
Test that the package grain returns onedir
|
||||
"""
|
||||
# This grain was added in 3007.0
|
||||
if packaging.version.parse(install_salt.version) < packaging.version.parse(
|
||||
"3007.0"
|
||||
):
|
||||
pytest.skip(
|
||||
"The package grain is only going to equal 'onedir' in version 3007.0 or later"
|
||||
)
|
||||
ret = salt_cli.run("grains.get", "package", minion_tgt=salt_minion.id)
|
||||
assert ret.data == "onedir"
|
||||
assert ret.data, ret
|
14
tests/pytests/pkg/integration/test_salt_key.py
Normal file
14
tests/pytests/pkg/integration/test_salt_key.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_salt_key(salt_key_cli, salt_minion):
|
||||
"""
|
||||
Test running salt-key -L
|
||||
"""
|
||||
ret = salt_key_cli.run("-L")
|
||||
assert ret.data
|
||||
assert salt_minion.id in ret.data["minions"]
|
26
tests/pytests/pkg/integration/test_salt_minion.py
Normal file
26
tests/pytests/pkg/integration/test_salt_minion.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_salt_minion_ping(salt_cli, salt_minion):
|
||||
"""
|
||||
Test running a command against a targeted minion
|
||||
"""
|
||||
ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
|
||||
def test_salt_minion_setproctitle(salt_cli, salt_minion):
|
||||
"""
|
||||
Test that setproctitle is working
|
||||
for the running Salt minion
|
||||
"""
|
||||
ret = salt_cli.run(
|
||||
"ps.pgrep", "MinionProcessManager", full=True, minion_tgt=salt_minion.id
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data != ""
|
19
tests/pytests/pkg/integration/test_salt_output.py
Normal file
19
tests/pytests/pkg/integration/test_salt_output.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("output_fmt", ["yaml", "json"])
|
||||
def test_salt_output(salt_cli, salt_minion, output_fmt):
|
||||
"""
|
||||
Test --output
|
||||
"""
|
||||
ret = salt_cli.run(
|
||||
f"--output={output_fmt}", "test.fib", "7", minion_tgt=salt_minion.id
|
||||
)
|
||||
if output_fmt == "json":
|
||||
assert 13 in ret.data
|
||||
else:
|
||||
ret.stdout.matcher.fnmatch_lines(["*- 13*"])
|
44
tests/pytests/pkg/integration/test_salt_pillar.py
Normal file
44
tests/pytests/pkg/integration/test_salt_pillar.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
import subprocess
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pillar_name(salt_master):
|
||||
name = "info"
|
||||
top_file_contents = """
|
||||
base:
|
||||
'*':
|
||||
- test
|
||||
"""
|
||||
test_file_contents = f"""
|
||||
{name}: test
|
||||
"""
|
||||
with salt_master.pillar_tree.base.temp_file(
|
||||
"top.sls", top_file_contents
|
||||
), salt_master.pillar_tree.base.temp_file("test.sls", test_file_contents):
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
subprocess.run(
|
||||
[
|
||||
"chown",
|
||||
"-R",
|
||||
"salt:salt",
|
||||
str(salt_master.pillar_tree.base.write_path),
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
yield name
|
||||
|
||||
|
||||
def test_salt_pillar(salt_cli, salt_minion, pillar_name):
|
||||
"""
|
||||
Test pillar.items
|
||||
"""
|
||||
ret = salt_cli.run("pillar.items", minion_tgt=salt_minion.id)
|
||||
assert ret.returncode == 0
|
||||
assert pillar_name in ret.data
|
70
tests/pytests/pkg/integration/test_salt_state_file.py
Normal file
70
tests/pytests/pkg/integration/test_salt_state_file.py
Normal file
|
@ -0,0 +1,70 @@
|
|||
import subprocess
|
||||
import types
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
from saltfactories.utils.functional import MultiStateResult
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def files(tmp_path):
|
||||
return types.SimpleNamespace(
|
||||
fpath_1=tmp_path / "fpath_1.txt",
|
||||
fpath_2=tmp_path / "fpath_2.txt",
|
||||
fpath_3=tmp_path / "fpath_3.txt",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def state_name(files, salt_master):
|
||||
name = "some-state"
|
||||
sls_contents = f"""
|
||||
create-fpath-1-file:
|
||||
file.managed:
|
||||
- name: {files.fpath_1}
|
||||
|
||||
create-fpath-2-file:
|
||||
file.managed:
|
||||
- name: {files.fpath_2}
|
||||
|
||||
create-fpath-3-file:
|
||||
file.managed:
|
||||
- name: {files.fpath_3}
|
||||
"""
|
||||
assert files.fpath_1.exists() is False
|
||||
assert files.fpath_2.exists() is False
|
||||
assert files.fpath_3.exists() is False
|
||||
with salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents):
|
||||
if not platform.is_windows() and not platform.is_darwin():
|
||||
subprocess.run(
|
||||
[
|
||||
"chown",
|
||||
"-R",
|
||||
"salt:salt",
|
||||
str(salt_master.state_tree.base.write_path),
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
yield name
|
||||
|
||||
|
||||
def test_salt_state_file(salt_cli, salt_minion, state_name, files):
|
||||
"""
|
||||
Test state file
|
||||
"""
|
||||
assert files.fpath_1.exists() is False
|
||||
assert files.fpath_2.exists() is False
|
||||
assert files.fpath_3.exists() is False
|
||||
|
||||
ret = salt_cli.run("state.apply", state_name, minion_tgt=salt_minion.id)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data
|
||||
if ret.stdout and "Minion did not return" in ret.stdout:
|
||||
pytest.skip("Skipping test, state took too long to apply")
|
||||
|
||||
for state_return in MultiStateResult(ret.data):
|
||||
assert state_return.result is True
|
||||
|
||||
assert files.fpath_1.exists() is True
|
||||
assert files.fpath_2.exists() is True
|
||||
assert files.fpath_3.exists() is True
|
38
tests/pytests/pkg/integration/test_salt_ufw.py
Normal file
38
tests/pytests/pkg/integration/test_salt_ufw.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
@pytest.mark.skip_if_binaries_missing("ufw")
|
||||
def test_salt_ufw(salt_master, salt_call_cli, install_salt):
|
||||
"""
|
||||
Test salt.ufw for Debian/Ubuntu salt-master
|
||||
"""
|
||||
if install_salt.distro_id not in ("debian", "ubuntu"):
|
||||
pytest.skip("Only tests Debian / Ubuntu packages")
|
||||
|
||||
# check that the salt_master is running
|
||||
assert salt_master.is_running()
|
||||
|
||||
ufw_master_path = pathlib.Path("/etc/ufw/applications.d/salt.ufw")
|
||||
assert ufw_master_path.exists()
|
||||
assert ufw_master_path.is_file()
|
||||
|
||||
ufw_list_cmd = "/usr/sbin/ufw app list"
|
||||
ret = salt_call_cli.run("--local", "cmd.run", ufw_list_cmd)
|
||||
assert "Available applications" in ret.stdout
|
||||
assert "Salt" in ret.stdout
|
||||
ufw_upd_cmd = "/usr/sbin/ufw app update Salt"
|
||||
ret = salt_call_cli.run("--local", "cmd.run", ufw_upd_cmd)
|
||||
assert ret.returncode == 0
|
||||
expected_info = """Profile: Salt
|
||||
Title: salt
|
||||
Description: fast and powerful configuration management and remote
|
||||
execution
|
||||
|
||||
Ports:
|
||||
4505,4506/tcp"""
|
||||
ufw_info_cmd = "/usr/sbin/ufw app info Salt"
|
||||
ret = salt_call_cli.run("--local", "cmd.run", ufw_info_cmd)
|
||||
assert expected_info in ret.data
|
363
tests/pytests/pkg/integration/test_salt_user.py
Normal file
363
tests/pytests/pkg/integration/test_salt_user.py
Normal file
|
@ -0,0 +1,363 @@
|
|||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import packaging.version
|
||||
import psutil
|
||||
import pytest
|
||||
from saltfactories.utils.tempfiles import temp_directory
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows,
|
||||
pytest.mark.skip_on_darwin,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_paths():
|
||||
"""
|
||||
Paths created by package installs
|
||||
"""
|
||||
paths = [
|
||||
"/etc/salt",
|
||||
"/var/cache/salt",
|
||||
"/var/log/salt",
|
||||
"/var/run/salt",
|
||||
"/opt/saltstack/salt",
|
||||
]
|
||||
return paths
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_paths_salt_user():
|
||||
"""
|
||||
Paths created by package installs and owned by salt user
|
||||
"""
|
||||
return [
|
||||
"/etc/salt/cloud.deploy.d",
|
||||
"/var/log/salt/cloud",
|
||||
"/opt/saltstack/salt/lib/python{}.{}/site-packages/salt/cloud/deploy".format(
|
||||
*sys.version_info
|
||||
),
|
||||
"/etc/salt/pki/master",
|
||||
"/etc/salt/master.d",
|
||||
"/var/log/salt/master",
|
||||
"/var/log/salt/api",
|
||||
"/var/log/salt/key",
|
||||
"/var/cache/salt/master",
|
||||
"/var/run/salt/master",
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pkg_paths_salt_user_exclusions():
|
||||
"""
|
||||
Exclusions from paths created by package installs and owned by salt user
|
||||
"""
|
||||
paths = [
|
||||
"/var/cache/salt/master/.root_key" # written by salt, salt-run and salt-key as root
|
||||
]
|
||||
return paths
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _skip_on_non_relenv(install_salt):
|
||||
if not install_salt.relenv:
|
||||
pytest.skip("The salt user only exists on relenv versions of salt")
|
||||
|
||||
|
||||
def test_salt_user_master(salt_master, install_salt):
|
||||
"""
|
||||
Test the correct user is running the Salt Master
|
||||
"""
|
||||
match = False
|
||||
for proc in psutil.Process(salt_master.pid).children():
|
||||
assert proc.username() == "salt"
|
||||
match = True
|
||||
|
||||
assert match
|
||||
|
||||
|
||||
def test_salt_user_home(install_salt):
|
||||
"""
|
||||
Test the salt user's home is /opt/saltstack/salt
|
||||
"""
|
||||
proc = subprocess.run(
|
||||
["getent", "passwd", "salt"], check=False, capture_output=True
|
||||
)
|
||||
assert proc.returncode == 0
|
||||
home = ""
|
||||
try:
|
||||
home = proc.stdout.decode().split(":")[5]
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
assert home == "/opt/saltstack/salt"
|
||||
|
||||
|
||||
def test_salt_user_group(install_salt):
|
||||
"""
|
||||
Test the salt user is in the salt group
|
||||
"""
|
||||
proc = subprocess.run(["id", "salt"], check=False, capture_output=True)
|
||||
assert proc.returncode == 0
|
||||
in_group = False
|
||||
try:
|
||||
for group in proc.stdout.decode().split(" "):
|
||||
if "salt" in group:
|
||||
in_group = True
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
assert in_group is True
|
||||
|
||||
|
||||
def test_salt_user_shell(install_salt):
|
||||
"""
|
||||
Test the salt user's login shell
|
||||
"""
|
||||
proc = subprocess.run(
|
||||
["getent", "passwd", "salt"], check=False, capture_output=True
|
||||
)
|
||||
assert proc.returncode == 0
|
||||
shell = ""
|
||||
shell_exists = False
|
||||
try:
|
||||
shell = proc.stdout.decode().split(":")[6].strip()
|
||||
shell_exists = pathlib.Path(shell).exists()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
assert shell_exists is True
|
||||
|
||||
|
||||
def test_pkg_paths(
|
||||
install_salt, pkg_paths, pkg_paths_salt_user, pkg_paths_salt_user_exclusions
|
||||
):
|
||||
"""
|
||||
Test package paths ownership
|
||||
"""
|
||||
if packaging.version.parse(install_salt.version) <= packaging.version.parse(
|
||||
"3006.4"
|
||||
):
|
||||
pytest.skip("Package path ownership was changed in salt 3006.4")
|
||||
salt_user_subdirs = []
|
||||
for _path in pkg_paths:
|
||||
pkg_path = pathlib.Path(_path)
|
||||
assert pkg_path.exists()
|
||||
for dirpath, sub_dirs, files in os.walk(pkg_path):
|
||||
path = pathlib.Path(dirpath)
|
||||
# Directories owned by salt:salt or their subdirs/files
|
||||
if (
|
||||
str(path) in pkg_paths_salt_user or str(path) in salt_user_subdirs
|
||||
) and str(path) not in pkg_paths_salt_user_exclusions:
|
||||
assert path.owner() == "salt"
|
||||
assert path.group() == "salt"
|
||||
salt_user_subdirs.extend(
|
||||
[str(path.joinpath(sub_dir)) for sub_dir in sub_dirs]
|
||||
)
|
||||
# Individual files owned by salt user
|
||||
for file in files:
|
||||
file_path = path.joinpath(file)
|
||||
if str(file_path) not in pkg_paths_salt_user_exclusions:
|
||||
assert file_path.owner() == "salt"
|
||||
# Directories owned by root:root
|
||||
else:
|
||||
assert path.owner() == "root"
|
||||
assert path.group() == "root"
|
||||
for file in files:
|
||||
file_path = path.joinpath(file)
|
||||
# Individual files owned by salt user
|
||||
if str(file_path) in pkg_paths_salt_user:
|
||||
assert file_path.owner() == "salt"
|
||||
else:
|
||||
assert file_path.owner() == "root"
|
||||
assert file_path.group() == "root"
|
||||
|
||||
|
||||
@pytest.mark.skip_if_binaries_missing("logrotate")
|
||||
def test_paths_log_rotation(
|
||||
salt_master, salt_minion, salt_call_cli, install_salt, pkg_tests_account
|
||||
):
|
||||
"""
|
||||
Test the correct ownership is assigned when log rotation occurs
|
||||
Change the user in the Salt Master, chage ownership, force logrotation
|
||||
Check ownership and premissions.
|
||||
Assumes test_pkg_paths successful
|
||||
"""
|
||||
if packaging.version.parse(install_salt.version) <= packaging.version.parse(
|
||||
"3006.4"
|
||||
):
|
||||
pytest.skip("Package path ownership was changed in salt 3006.4")
|
||||
|
||||
if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"):
|
||||
pytest.skip(
|
||||
"Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231"
|
||||
)
|
||||
|
||||
# check that the salt_master is running
|
||||
assert salt_master.is_running()
|
||||
match = False
|
||||
for proc in psutil.Process(salt_master.pid).children():
|
||||
assert proc.username() == "salt"
|
||||
match = True
|
||||
|
||||
assert match
|
||||
|
||||
# Paths created by package installs with adjustment for current conf_dir /etc/salt
|
||||
log_pkg_paths = [
|
||||
install_salt.conf_dir, # "bkup0"
|
||||
"/var/cache/salt", # "bkup1"
|
||||
"/var/log/salt", # "bkup2"
|
||||
"/var/run/salt", # "bkup3"
|
||||
"/opt/saltstack/salt", # "bkup4"
|
||||
]
|
||||
|
||||
# backup those about to change
|
||||
bkup_count = 0
|
||||
bkup_count_max = 5
|
||||
with temp_directory("bkup0") as temp_dir_path_0:
|
||||
with temp_directory("bkup1") as temp_dir_path_1:
|
||||
with temp_directory("bkup2") as temp_dir_path_2:
|
||||
with temp_directory("bkup3") as temp_dir_path_3:
|
||||
with temp_directory("bkup4") as temp_dir_path_4:
|
||||
|
||||
assert temp_dir_path_0.is_dir()
|
||||
assert temp_dir_path_1.is_dir()
|
||||
assert temp_dir_path_2.is_dir()
|
||||
assert temp_dir_path_3.is_dir()
|
||||
assert temp_dir_path_4.is_dir()
|
||||
|
||||
# stop the salt_master, so can change user
|
||||
with salt_master.stopped():
|
||||
assert salt_master.is_running() is False
|
||||
|
||||
for _path in log_pkg_paths:
|
||||
if bkup_count == 0:
|
||||
cmd_to_run = (
|
||||
f"cp -a {_path}/* {str(temp_dir_path_0)}/"
|
||||
)
|
||||
elif bkup_count == 1:
|
||||
cmd_to_run = (
|
||||
f"cp -a {_path}/* {str(temp_dir_path_1)}/"
|
||||
)
|
||||
elif bkup_count == 2:
|
||||
cmd_to_run = (
|
||||
f"cp -a {_path}/* {str(temp_dir_path_2)}/"
|
||||
)
|
||||
elif bkup_count == 3:
|
||||
cmd_to_run = (
|
||||
f"cp -a {_path}/* {str(temp_dir_path_3)}/"
|
||||
)
|
||||
elif bkup_count == 4:
|
||||
cmd_to_run = (
|
||||
f"cp -a {_path}/* {str(temp_dir_path_4)}/"
|
||||
)
|
||||
elif bkup_count > 5:
|
||||
# force assertion
|
||||
assert bkup_count < bkup_count_max
|
||||
|
||||
ret = salt_call_cli.run(
|
||||
"--local", "cmd.run", cmd_to_run
|
||||
)
|
||||
bkup_count += 1
|
||||
assert ret.returncode == 0
|
||||
|
||||
# change the user in the master's config file.
|
||||
ret = salt_call_cli.run(
|
||||
"--local",
|
||||
"file.replace",
|
||||
f"{install_salt.conf_dir}/master",
|
||||
"user: salt",
|
||||
f"user: {pkg_tests_account.username}",
|
||||
"flags=['IGNORECASE']",
|
||||
"append_if_not_found=True",
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
# change ownership of appropriate paths to user
|
||||
for _path in log_pkg_paths:
|
||||
chg_ownership_cmd = (
|
||||
f"chown -R {pkg_tests_account.username} {_path}"
|
||||
)
|
||||
ret = salt_call_cli.run(
|
||||
"--local", "cmd.run", chg_ownership_cmd
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
# restart the salt_master
|
||||
with salt_master.started():
|
||||
assert salt_master.is_running() is True
|
||||
|
||||
# ensure some data in files
|
||||
log_files_list = [
|
||||
"/var/log/salt/api",
|
||||
"/var/log/salt/key",
|
||||
"/var/log/salt/master",
|
||||
]
|
||||
for _path in log_files_list:
|
||||
log_path = pathlib.Path(_path)
|
||||
assert log_path.exists()
|
||||
with log_path.open("a") as f:
|
||||
f.write("This is a log rotation test\n")
|
||||
|
||||
# force log rotation
|
||||
logr_conf_file = "/etc/logrotate.d/salt"
|
||||
logr_conf_path = pathlib.Path(logr_conf_file)
|
||||
if not logr_conf_path.exists():
|
||||
logr_conf_file = "/etc/logrotate.conf"
|
||||
logr_conf_path = pathlib.Path(logr_conf_file)
|
||||
assert logr_conf_path.exists()
|
||||
|
||||
# force log rotation
|
||||
log_rotate_cmd = f"logrotate -f {logr_conf_file}"
|
||||
ret = salt_call_cli.run(
|
||||
"--local", "cmd.run", log_rotate_cmd
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
for _path in log_files_list:
|
||||
log_path = pathlib.Path(_path)
|
||||
assert log_path.exists()
|
||||
assert (
|
||||
log_path.owner() == pkg_tests_account.username
|
||||
)
|
||||
assert log_path.stat().st_mode & 0o7777 == 0o640
|
||||
|
||||
# cleanup
|
||||
assert salt_master.is_running() is False
|
||||
|
||||
# change the user in the master's config file.
|
||||
ret = salt_call_cli.run(
|
||||
"--local",
|
||||
"file.replace",
|
||||
f"{install_salt.conf_dir}/master",
|
||||
f"user: {pkg_tests_account.username}",
|
||||
"user: salt",
|
||||
"flags=['IGNORECASE']",
|
||||
"append_if_not_found=True",
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
# restore from backed up
|
||||
bkup_count = 0
|
||||
for _path in log_pkg_paths:
|
||||
if bkup_count == 0:
|
||||
cmd_to_run = f"cp -a --force {str(temp_dir_path_0)}/* {_path}/"
|
||||
elif bkup_count == 1:
|
||||
cmd_to_run = f"cp -a --force {str(temp_dir_path_1)}/* {_path}/"
|
||||
elif bkup_count == 2:
|
||||
cmd_to_run = f"cp -a --force {str(temp_dir_path_2)}/* {_path}/"
|
||||
elif bkup_count == 3:
|
||||
cmd_to_run = f"cp -a --force {str(temp_dir_path_3)}/* {_path}/"
|
||||
elif bkup_count == 4:
|
||||
# use --update since /opt/saltstack/salt and would get SIGSEGV since mucking with running code
|
||||
cmd_to_run = f"cp -a --update --force {str(temp_dir_path_4)}/* {_path}/"
|
||||
elif bkup_count > 5:
|
||||
# force assertion
|
||||
assert bkup_count < bkup_count_max
|
||||
|
||||
ret = salt_call_cli.run(
|
||||
"--local", "cmd.run", cmd_to_run
|
||||
)
|
||||
|
||||
bkup_count += 1
|
||||
assert ret.returncode == 0
|
15
tests/pytests/pkg/integration/test_ssm.py
Normal file
15
tests/pytests/pkg/integration/test_ssm.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_unless_on_windows,
|
||||
]
|
||||
|
||||
|
||||
def test_ssm_present(install_salt):
|
||||
"""
|
||||
The ssm.exe binary needs to be present in both the zip and the exe/msi
|
||||
builds
|
||||
"""
|
||||
assert os.path.exists(install_salt.ssm_bin)
|
42
tests/pytests/pkg/integration/test_systemd_config.py
Normal file
42
tests/pytests/pkg/integration/test_systemd_config.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(reason="Linux test only"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("salt_minion")
|
||||
def test_system_config(grains):
|
||||
"""
|
||||
Test system config
|
||||
"""
|
||||
if grains["os_family"] == "RedHat":
|
||||
if grains["osfinger"] in (
|
||||
"CentOS Stream-8",
|
||||
"CentOS Linux-8",
|
||||
"CentOS Stream-9",
|
||||
"Fedora Linux-36",
|
||||
"VMware Photon OS-3",
|
||||
"VMware Photon OS-4",
|
||||
"VMware Photon OS-5",
|
||||
"Amazon Linux-2023",
|
||||
):
|
||||
expected_retcode = 0
|
||||
else:
|
||||
expected_retcode = 1
|
||||
ret = subprocess.call(
|
||||
"systemctl show -p ${config} salt-minion.service", shell=True
|
||||
)
|
||||
assert ret == expected_retcode
|
||||
|
||||
elif grains["os_family"] == "Debian":
|
||||
if grains["osfinger"] == "Debian-9":
|
||||
expected_retcode = 1
|
||||
else:
|
||||
expected_retcode = 0
|
||||
ret = subprocess.call(
|
||||
"systemctl show -p ${config} salt-minion.service", shell=True
|
||||
)
|
||||
assert ret == expected_retcode
|
141
tests/pytests/pkg/integration/test_version.py
Normal file
141
tests/pytests/pkg/integration/test_version.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
import os.path
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
from pytestskipmarkers.utils import platform
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_salt_version(version, install_salt):
|
||||
"""
|
||||
Test version output from salt --version
|
||||
"""
|
||||
test_bin = os.path.join(*install_salt.binary_paths["salt"])
|
||||
ret = install_salt.proc.run(test_bin, "--version")
|
||||
actual = ret.stdout.strip().split(" ")[:2]
|
||||
expected = ["salt", version]
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_salt_versions_report_master(install_salt):
|
||||
"""
|
||||
Test running --versions-report on master
|
||||
"""
|
||||
if not install_salt.relenv and not install_salt.classic:
|
||||
pytest.skip("Unable to get the python version dynamically from tiamat builds")
|
||||
test_bin = os.path.join(*install_salt.binary_paths["master"])
|
||||
python_bin = os.path.join(*install_salt.binary_paths["python"])
|
||||
ret = install_salt.proc.run(test_bin, "--versions-report")
|
||||
ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"])
|
||||
py_version = subprocess.run(
|
||||
[str(python_bin), "--version"],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
capture_output=True,
|
||||
).stdout
|
||||
py_version = py_version.decode().strip().replace(" ", ": ")
|
||||
ret.stdout.matcher.fnmatch_lines([f"*{py_version}*"])
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_salt_versions_report_minion(salt_cli, salt_minion):
|
||||
"""
|
||||
Test running test.versions_report on minion
|
||||
"""
|
||||
# Make sure the minion is running
|
||||
assert salt_minion.is_running()
|
||||
# Make sure we can ping the minion ...
|
||||
ret = salt_cli.run(
|
||||
"--timeout=240", "test.ping", minion_tgt=salt_minion.id, _timeout=240
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
ret = salt_cli.run(
|
||||
"--hard-crash",
|
||||
"--failhard",
|
||||
"--timeout=240",
|
||||
"test.versions_report",
|
||||
minion_tgt=salt_minion.id,
|
||||
_timeout=240,
|
||||
)
|
||||
ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"])
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"binary", ["master", "cloud", "syndic", "minion", "call", "api"]
|
||||
)
|
||||
def test_compare_versions(version, binary, install_salt):
|
||||
"""
|
||||
Test compare versions
|
||||
"""
|
||||
if binary in install_salt.binary_paths:
|
||||
ret = install_salt.proc.run(
|
||||
*install_salt.binary_paths[binary],
|
||||
"--version",
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
ret.stdout.matcher.fnmatch_lines([f"*{version}*"])
|
||||
else:
|
||||
if platform.is_windows():
|
||||
pytest.skip(f"Binary not available on windows: {binary}")
|
||||
pytest.fail(
|
||||
f"Platform is not Windows and yet the binary {binary!r} is not available"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_darwin()
|
||||
@pytest.mark.parametrize(
|
||||
"symlink",
|
||||
[
|
||||
# We can't create a salt symlink because there is a salt directory
|
||||
"salt",
|
||||
"salt-api",
|
||||
"salt-call",
|
||||
"salt-cloud",
|
||||
"salt-cp",
|
||||
"salt-key",
|
||||
"salt-master",
|
||||
"salt-minion",
|
||||
"salt-proxy",
|
||||
"salt-run",
|
||||
"spm",
|
||||
"salt-ssh",
|
||||
"salt-syndic",
|
||||
],
|
||||
)
|
||||
def test_symlinks_created(version, symlink, install_salt):
|
||||
"""
|
||||
Test symlinks created
|
||||
"""
|
||||
if install_salt.classic:
|
||||
pytest.skip("Symlinks not created for classic macos builds, we adjust the path")
|
||||
if not install_salt.relenv and symlink == "spm":
|
||||
symlink = "salt-spm"
|
||||
ret = install_salt.proc.run(pathlib.Path("/usr/local/sbin") / symlink, "--version")
|
||||
ret.stdout.matcher.fnmatch_lines([f"*{version}*"])
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows()
|
||||
def test_compare_pkg_versions_redhat_rc(version, install_salt):
|
||||
"""
|
||||
Test compare pkg versions for redhat RC packages. A tilde should be included
|
||||
in RC Packages and it should test to be a lower version than a non RC
|
||||
package of the same version. For example, v3004~rc1 should be less than
|
||||
v3004.
|
||||
"""
|
||||
if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"):
|
||||
pytest.skip("Only tests rpm packages")
|
||||
|
||||
pkg = [x for x in install_salt.pkgs if "rpm" in x]
|
||||
if not pkg:
|
||||
pytest.skip("Not testing rpm packages")
|
||||
pkg = pkg[0].split("/")[-1]
|
||||
if "rc" not in ".".join(pkg.split(".")[:2]):
|
||||
pytest.skip("Not testing an RC package")
|
||||
assert "~" in pkg
|
||||
comp_pkg = pkg.split("~")[0]
|
||||
ret = install_salt.proc.run("rpmdev-vercmp", pkg, comp_pkg)
|
||||
ret.stdout.matcher.fnmatch_lines([f"{pkg} < {comp_pkg}"])
|
0
tests/pytests/pkg/upgrade/__init__.py
Normal file
0
tests/pytests/pkg/upgrade/__init__.py
Normal file
44
tests/pytests/pkg/upgrade/test_salt_upgrade.py
Normal file
44
tests/pytests/pkg/upgrade/test_salt_upgrade.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
import packaging.version
|
||||
import pytest
|
||||
|
||||
|
||||
def test_salt_upgrade(salt_call_cli, install_salt):
|
||||
"""
|
||||
Test an upgrade of Salt.
|
||||
"""
|
||||
if not install_salt.upgrade:
|
||||
pytest.skip("Not testing an upgrade, do not run")
|
||||
|
||||
if install_salt.relenv:
|
||||
original_py_version = install_salt.package_python_version()
|
||||
|
||||
# Verify previous install version is setup correctly and works
|
||||
ret = salt_call_cli.run("test.version")
|
||||
assert ret.returncode == 0
|
||||
assert packaging.version.parse(ret.data) < packaging.version.parse(
|
||||
install_salt.artifact_version
|
||||
)
|
||||
|
||||
# Test pip install before an upgrade
|
||||
dep = "PyGithub==1.56.0"
|
||||
install = salt_call_cli.run("--local", "pip.install", dep)
|
||||
assert install.returncode == 0
|
||||
|
||||
# Verify we can use the module dependent on the installed package
|
||||
repo = "https://github.com/saltstack/salt.git"
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
||||
assert "Authentication information could" in use_lib.stderr
|
||||
|
||||
# Upgrade Salt from previous version and test
|
||||
install_salt.install(upgrade=True)
|
||||
ret = salt_call_cli.run("test.version")
|
||||
assert ret.returncode == 0
|
||||
assert packaging.version.parse(ret.data) == packaging.version.parse(
|
||||
install_salt.artifact_version
|
||||
)
|
||||
|
||||
if install_salt.relenv:
|
||||
new_py_version = install_salt.package_python_version()
|
||||
if new_py_version == original_py_version:
|
||||
# test pip install after an upgrade
|
||||
use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo)
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,19 +1,50 @@
|
|||
"""
|
||||
Test cases for salt.modules.dig
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.cmdmod as cmdmod
|
||||
import salt.modules.dig as dig
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
dig: {
|
||||
"__salt__": {
|
||||
"cmd.run_all": cmdmod.run_all,
|
||||
return {dig: {}}
|
||||
|
||||
|
||||
class SpfValues:
|
||||
def __call__(self, key, python_shell=False):
|
||||
_spf_values = {
|
||||
"dig +short xmission.com TXT": {
|
||||
"pid": 27282,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": '"v=spf1 a mx include:_spf.xmission.com ?all"',
|
||||
},
|
||||
"dig +short _spf.xmission.com TXT": {
|
||||
"pid": 27282,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": '"v=spf1 a mx ip4:198.60.22.0/24 ip4:166.70.13.0/24 ~all"',
|
||||
},
|
||||
"dig +short xmission-redirect.com TXT": {
|
||||
"pid": 27282,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "v=spf1 redirect=_spf.xmission.com",
|
||||
},
|
||||
"dig +short foo.com TXT": {
|
||||
"pid": 27282,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "v=spf1 ip4:216.73.93.70/31 ip4:216.73.93.72/31 ~all",
|
||||
},
|
||||
}
|
||||
}
|
||||
return _spf_values.get(
|
||||
" ".join(key), {"pid": 27310, "retcode": 0, "stderr": "", "stdout": ""}
|
||||
)
|
||||
|
||||
|
||||
def test_dig_cname_found():
|
||||
|
@ -40,3 +71,143 @@ def test_dig_cname_none_found():
|
|||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.CNAME("www.google.com") == ""
|
||||
|
||||
|
||||
def test_check_ip():
|
||||
assert dig.check_ip("127.0.0.1")
|
||||
|
||||
|
||||
def test_check_ip_ipv6():
|
||||
assert dig.check_ip("1111:2222:3333:4444:5555:6666:7777:8888")
|
||||
|
||||
|
||||
def test_check_ip_ipv6_valid():
|
||||
assert dig.check_ip("2607:fa18:0:3::4")
|
||||
|
||||
|
||||
def test_check_ip_neg():
|
||||
assert not dig.check_ip("-127.0.0.1")
|
||||
|
||||
|
||||
def test_check_ip_empty():
|
||||
assert not dig.check_ip("")
|
||||
|
||||
|
||||
def test_a():
|
||||
dig_mock = MagicMock(
|
||||
return_value={
|
||||
"pid": 3656,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": (
|
||||
"74.125.193.104\n"
|
||||
"74.125.193.105\n"
|
||||
"74.125.193.99\n"
|
||||
"74.125.193.106\n"
|
||||
"74.125.193.103\n"
|
||||
"74.125.193.147"
|
||||
),
|
||||
}
|
||||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.A("www.google.com") == [
|
||||
"74.125.193.104",
|
||||
"74.125.193.105",
|
||||
"74.125.193.99",
|
||||
"74.125.193.106",
|
||||
"74.125.193.103",
|
||||
"74.125.193.147",
|
||||
]
|
||||
|
||||
|
||||
def test_ptr():
|
||||
dig_mock = MagicMock(
|
||||
return_value={
|
||||
"pid": 3657,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": ("dns.google."),
|
||||
}
|
||||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.ptr("8.8.8.8") == [
|
||||
"dns.google.",
|
||||
]
|
||||
|
||||
|
||||
def test_aaaa():
|
||||
dig_mock = MagicMock(
|
||||
return_value={
|
||||
"pid": 25451,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "2607:f8b0:400f:801::1014",
|
||||
}
|
||||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.AAAA("www.google.com") == ["2607:f8b0:400f:801::1014"]
|
||||
|
||||
|
||||
def test_ns():
|
||||
with patch("salt.modules.dig.A", MagicMock(return_value=["ns4.google.com."])):
|
||||
dig_mock = MagicMock(
|
||||
return_value={
|
||||
"pid": 26136,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "ns4.google.com.",
|
||||
}
|
||||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.NS("google.com") == ["ns4.google.com."]
|
||||
|
||||
|
||||
def test_spf():
|
||||
dig_mock = MagicMock(side_effect=SpfValues())
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.SPF("foo.com") == ["216.73.93.70/31", "216.73.93.72/31"]
|
||||
|
||||
|
||||
def test_spf_redir():
|
||||
"""
|
||||
Test for SPF records which use the 'redirect' SPF mechanism
|
||||
https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms
|
||||
"""
|
||||
dig_mock = MagicMock(side_effect=SpfValues())
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.SPF("xmission-redirect.com") == ["198.60.22.0/24", "166.70.13.0/24"]
|
||||
|
||||
|
||||
def test_spf_include():
|
||||
"""
|
||||
Test for SPF records which use the 'include' SPF mechanism
|
||||
https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms
|
||||
"""
|
||||
dig_mock = MagicMock(side_effect=SpfValues())
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.SPF("xmission.com") == ["198.60.22.0/24", "166.70.13.0/24"]
|
||||
|
||||
|
||||
def test_mx():
|
||||
dig_mock = MagicMock(
|
||||
return_value={
|
||||
"pid": 27780,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": (
|
||||
"10 aspmx.l.google.com.\n"
|
||||
"20 alt1.aspmx.l.google.com.\n"
|
||||
"40 alt3.aspmx.l.google.com.\n"
|
||||
"50 alt4.aspmx.l.google.com.\n"
|
||||
"30 alt2.aspmx.l.google.com."
|
||||
),
|
||||
}
|
||||
)
|
||||
with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}):
|
||||
assert dig.MX("google.com") == [
|
||||
["10", "aspmx.l.google.com."],
|
||||
["20", "alt1.aspmx.l.google.com."],
|
||||
["40", "alt3.aspmx.l.google.com."],
|
||||
["50", "alt4.aspmx.l.google.com."],
|
||||
["30", "alt2.aspmx.l.google.com."],
|
||||
]
|
||||
|
|
137
tests/pytests/unit/modules/test_dnsutil.py
Normal file
137
tests/pytests/unit/modules/test_dnsutil.py
Normal file
|
@ -0,0 +1,137 @@
|
|||
"""
|
||||
:codeauthor: Nicole Thomas <nicole@saltstack.com>
|
||||
|
||||
TestCase for salt.modules.dnsutil
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.dnsutil as dnsutil
|
||||
import salt.utils.stringutils
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_hosts_file():
|
||||
return (
|
||||
"##\n"
|
||||
"# Host Database\n"
|
||||
"#\n"
|
||||
"# localhost is used to configure the loopback interface\n"
|
||||
"# when the system is booting. Do not change this entry.\n"
|
||||
"##\n"
|
||||
"127.0.0.1 localhost\n"
|
||||
"255.255.255.255 broadcasthost\n"
|
||||
"::1 localhost\n"
|
||||
"fe80::1%lo0 localhost"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_hosts_file_rtn():
|
||||
return {
|
||||
"::1": ["localhost"],
|
||||
"255.255.255.255": ["broadcasthost"],
|
||||
"127.0.0.1": ["localhost"],
|
||||
"fe80::1%lo0": ["localhost"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_soa_zone():
|
||||
return (
|
||||
"$TTL 3D\n"
|
||||
"@ IN SOA land-5.com. root.land-5.com. (\n"
|
||||
"199609203 ; Serial\n"
|
||||
"28800 ; Refresh\n"
|
||||
"7200 ; Retry\n"
|
||||
"604800 ; Expire\n"
|
||||
"86400) ; Minimum TTL\n"
|
||||
"NS land-5.com.\n\n"
|
||||
"1 PTR localhost."
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_writes_list():
|
||||
return [
|
||||
"##\n",
|
||||
"# Host Database\n",
|
||||
"#\n",
|
||||
"# localhost is used to configure the loopback interface\n",
|
||||
"# when the system is booting. Do not change this entry.\n",
|
||||
"##\n",
|
||||
"127.0.0.1 localhost",
|
||||
"\n",
|
||||
"255.255.255.255 broadcasthost",
|
||||
"\n",
|
||||
"::1 localhost",
|
||||
"\n",
|
||||
"fe80::1%lo0 localhost",
|
||||
"\n",
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {dnsutil: {}}
|
||||
|
||||
|
||||
def test_parse_hosts(mock_hosts_file):
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=mock_hosts_file)):
|
||||
assert dnsutil.parse_hosts() == {
|
||||
"::1": ["localhost"],
|
||||
"255.255.255.255": ["broadcasthost"],
|
||||
"127.0.0.1": ["localhost"],
|
||||
"fe80::1%lo0": ["localhost"],
|
||||
}
|
||||
|
||||
|
||||
def test_hosts_append(mock_hosts_file, mock_hosts_file_rtn):
|
||||
with patch(
|
||||
"salt.utils.files.fopen", mock_open(read_data=mock_hosts_file)
|
||||
) as m_open, patch(
|
||||
"salt.modules.dnsutil.parse_hosts",
|
||||
MagicMock(return_value=mock_hosts_file_rtn),
|
||||
):
|
||||
dnsutil.hosts_append("/etc/hosts", "127.0.0.1", "ad1.yuk.co,ad2.yuk.co")
|
||||
writes = m_open.write_calls()
|
||||
# We should have called .write() only once, with the expected
|
||||
# content
|
||||
num_writes = len(writes)
|
||||
assert num_writes == 1, num_writes
|
||||
expected = salt.utils.stringutils.to_str("\n127.0.0.1 ad1.yuk.co ad2.yuk.co")
|
||||
assert writes[0] == expected, writes[0]
|
||||
|
||||
|
||||
def test_hosts_remove(mock_hosts_file, mock_writes_list):
|
||||
to_remove = "ad1.yuk.co"
|
||||
new_mock_file = mock_hosts_file + "\n127.0.0.1 " + to_remove + "\n"
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=new_mock_file)) as m_open:
|
||||
dnsutil.hosts_remove("/etc/hosts", to_remove)
|
||||
writes = m_open.write_calls()
|
||||
assert writes == mock_writes_list, writes
|
||||
|
||||
|
||||
def test_to_seconds_hour():
|
||||
assert dnsutil._to_seconds("4H") == 14400, "Did not detect valid hours as invalid"
|
||||
|
||||
|
||||
def test_to_seconds_day():
|
||||
assert dnsutil._to_seconds("1D") == 86400, "Did not detect valid day as invalid"
|
||||
|
||||
|
||||
def test_to_seconds_week():
|
||||
assert (
|
||||
dnsutil._to_seconds("2W") == 604800
|
||||
), "Did not set time greater than one week to one week"
|
||||
|
||||
|
||||
def test_to_seconds_empty():
|
||||
assert dnsutil._to_seconds("") == 604800, "Did not set empty time to one week"
|
||||
|
||||
|
||||
def test_to_seconds_large():
|
||||
assert (
|
||||
dnsutil._to_seconds("604801") == 604800
|
||||
), "Did not set time greater than one week to one week"
|
|
@ -1,9 +1,369 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
Test cases for salt.modules.dpkg
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.dpkg_lowpkg as dpkg
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {dpkg: {}}
|
||||
|
||||
|
||||
def setUp(self):
|
||||
dpkg_lowpkg_logger = logging.getLogger("salt.modules.dpkg_lowpkg")
|
||||
self.level = dpkg_lowpkg_logger.level
|
||||
dpkg_lowpkg_logger.setLevel(logging.FATAL)
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
logging.getLogger("salt.modules.dpkg_lowpkg").setLevel(self.level)
|
||||
|
||||
|
||||
def dpkg_L_side_effect(cmd, **kwargs):
|
||||
assert cmd[:2] == ["dpkg", "-L"]
|
||||
package = cmd[2]
|
||||
return dpkg_l_output[package]
|
||||
|
||||
|
||||
dpkg_error_msg = """dpkg-query: package 'httpd' is not installed
|
||||
Use dpkg --contents (= dpkg-deb --contents) to list archive files contents.
|
||||
"""
|
||||
|
||||
|
||||
dpkg_l_output = {
|
||||
"hostname": """\
|
||||
/.
|
||||
/bin
|
||||
/bin/hostname
|
||||
/usr
|
||||
/usr/share
|
||||
/usr/share/doc
|
||||
/usr/share/doc/hostname
|
||||
/usr/share/doc/hostname/changelog.gz
|
||||
/usr/share/doc/hostname/copyright
|
||||
/usr/share/man
|
||||
/usr/share/man/man1
|
||||
/usr/share/man/man1/hostname.1.gz
|
||||
/bin/dnsdomainname
|
||||
/bin/domainname
|
||||
/bin/nisdomainname
|
||||
/bin/ypdomainname
|
||||
/usr/share/man/man1/dnsdomainname.1.gz
|
||||
/usr/share/man/man1/domainname.1.gz
|
||||
/usr/share/man/man1/nisdomainname.1.gz
|
||||
/usr/share/man/man1/ypdomainname.1.gz
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
# 'unpurge' function tests: 2
|
||||
|
||||
|
||||
def test_unpurge():
|
||||
"""
|
||||
Test if it change package selection for each package
|
||||
specified to 'install'
|
||||
"""
|
||||
mock = MagicMock(return_value=[])
|
||||
with patch.dict(dpkg.__salt__, {"pkg.list_pkgs": mock, "cmd.run": mock}):
|
||||
assert dpkg.unpurge("curl") == {}
|
||||
|
||||
|
||||
def test_unpurge_empty_package():
|
||||
"""
|
||||
Test if it change package selection for each package
|
||||
specified to 'install'
|
||||
"""
|
||||
assert dpkg.unpurge() == {}
|
||||
|
||||
|
||||
# 'list_pkgs' function tests: 1
|
||||
|
||||
|
||||
def test_list_pkgs():
|
||||
"""
|
||||
Test if it lists the packages currently installed
|
||||
"""
|
||||
mock = MagicMock(
|
||||
return_value={
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "installed\thostname\t3.21",
|
||||
}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}):
|
||||
assert dpkg.list_pkgs("hostname") == {"hostname": "3.21"}
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={
|
||||
"retcode": 1,
|
||||
"stderr": "dpkg-query: no packages found matching httpd",
|
||||
"stdout": "",
|
||||
}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}):
|
||||
assert (
|
||||
dpkg.list_pkgs("httpd")
|
||||
== "Error: dpkg-query: no packages found matching httpd"
|
||||
)
|
||||
|
||||
|
||||
# 'file_list' function tests: 1
|
||||
|
||||
|
||||
def test_file_list():
|
||||
"""
|
||||
Test if it lists the files that belong to a package.
|
||||
"""
|
||||
dpkg_query_mock = MagicMock(
|
||||
return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"}
|
||||
)
|
||||
dpkg_L_mock = MagicMock(side_effect=dpkg_L_side_effect)
|
||||
with patch.dict(
|
||||
dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock}
|
||||
):
|
||||
assert dpkg.file_list("hostname") == {
|
||||
"errors": [],
|
||||
"files": [
|
||||
"/.",
|
||||
"/bin",
|
||||
"/bin/dnsdomainname",
|
||||
"/bin/domainname",
|
||||
"/bin/hostname",
|
||||
"/bin/nisdomainname",
|
||||
"/bin/ypdomainname",
|
||||
"/usr",
|
||||
"/usr/share",
|
||||
"/usr/share/doc",
|
||||
"/usr/share/doc/hostname",
|
||||
"/usr/share/doc/hostname/changelog.gz",
|
||||
"/usr/share/doc/hostname/copyright",
|
||||
"/usr/share/man",
|
||||
"/usr/share/man/man1",
|
||||
"/usr/share/man/man1/dnsdomainname.1.gz",
|
||||
"/usr/share/man/man1/domainname.1.gz",
|
||||
"/usr/share/man/man1/hostname.1.gz",
|
||||
"/usr/share/man/man1/nisdomainname.1.gz",
|
||||
"/usr/share/man/man1/ypdomainname.1.gz",
|
||||
],
|
||||
}
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": dpkg_error_msg, "stdout": ""}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}):
|
||||
assert dpkg.file_list("httpd") == "Error: " + dpkg_error_msg
|
||||
|
||||
|
||||
# 'file_dict' function tests: 1
|
||||
|
||||
|
||||
def test_file_dict():
|
||||
"""
|
||||
Test if it lists the files that belong to a package, grouped by package
|
||||
"""
|
||||
dpkg_query_mock = MagicMock(
|
||||
return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"}
|
||||
)
|
||||
dpkg_L_mock = MagicMock(side_effect=dpkg_L_side_effect)
|
||||
with patch.dict(
|
||||
dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock}
|
||||
):
|
||||
expected = {
|
||||
"errors": [],
|
||||
"packages": {
|
||||
"hostname": [
|
||||
"/.",
|
||||
"/bin",
|
||||
"/bin/hostname",
|
||||
"/usr",
|
||||
"/usr/share",
|
||||
"/usr/share/doc",
|
||||
"/usr/share/doc/hostname",
|
||||
"/usr/share/doc/hostname/changelog.gz",
|
||||
"/usr/share/doc/hostname/copyright",
|
||||
"/usr/share/man",
|
||||
"/usr/share/man/man1",
|
||||
"/usr/share/man/man1/hostname.1.gz",
|
||||
"/bin/dnsdomainname",
|
||||
"/bin/domainname",
|
||||
"/bin/nisdomainname",
|
||||
"/bin/ypdomainname",
|
||||
"/usr/share/man/man1/dnsdomainname.1.gz",
|
||||
"/usr/share/man/man1/domainname.1.gz",
|
||||
"/usr/share/man/man1/nisdomainname.1.gz",
|
||||
"/usr/share/man/man1/ypdomainname.1.gz",
|
||||
]
|
||||
},
|
||||
}
|
||||
assert dpkg.file_dict("hostname") == expected
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": dpkg_error_msg, "stdout": ""}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}):
|
||||
assert dpkg.file_dict("httpd") == "Error: " + dpkg_error_msg
|
||||
|
||||
|
||||
def test_bin_pkg_info_spaces():
|
||||
"""
|
||||
Test the bin_pkg_info function
|
||||
"""
|
||||
file_proto_mock = MagicMock(return_value=True)
|
||||
with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}):
|
||||
cache_mock = MagicMock(return_value="/path/to/some/package.deb")
|
||||
with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}):
|
||||
dpkg_info_mock = MagicMock(
|
||||
return_value={
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": (
|
||||
" new Debian package, version 2.0\n"
|
||||
" size 123456 bytes: control archive: 4029 bytes.\n"
|
||||
" Package : package_name\n"
|
||||
" Version : 1.0\n"
|
||||
" Section : section_name\n"
|
||||
" Priority : priority\n"
|
||||
" Architecture : all\n"
|
||||
" Description : some package\n"
|
||||
),
|
||||
}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}):
|
||||
assert dpkg.bin_pkg_info("package.deb")["name"] == "package_name"
|
||||
|
||||
|
||||
def test_bin_pkg_info_no_spaces():
|
||||
"""
|
||||
Test the bin_pkg_info function
|
||||
"""
|
||||
file_proto_mock = MagicMock(return_value=True)
|
||||
with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}):
|
||||
cache_mock = MagicMock(return_value="/path/to/some/package.deb")
|
||||
with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}):
|
||||
dpkg_info_mock = MagicMock(
|
||||
return_value={
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": (
|
||||
" new Debian package, version 2.0\n"
|
||||
" size 123456 bytes: control archive: 4029 bytes.\n"
|
||||
" Package: package_name\n"
|
||||
" Version: 1.0\n"
|
||||
" Section: section_name\n"
|
||||
" Priority: priority\n"
|
||||
" Architecture: all\n"
|
||||
" Description: some package\n"
|
||||
),
|
||||
}
|
||||
)
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}):
|
||||
assert dpkg.bin_pkg_info("package.deb")["name"] == "package_name"
|
||||
|
||||
|
||||
def test_info():
|
||||
"""
|
||||
Test package info
|
||||
"""
|
||||
mock = MagicMock(
|
||||
return_value={
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": os.linesep.join(
|
||||
[
|
||||
"package:bash",
|
||||
"revision:",
|
||||
"architecture:amd64",
|
||||
"maintainer:Ubuntu Developers"
|
||||
" <ubuntu-devel-discuss@lists.ubuntu.com>",
|
||||
"summary:",
|
||||
"source:bash",
|
||||
"version:4.4.18-2ubuntu1",
|
||||
"section:shells",
|
||||
"installed_size:1588",
|
||||
"size:",
|
||||
"MD5:",
|
||||
"SHA1:",
|
||||
"SHA256:",
|
||||
"origin:",
|
||||
"homepage:http://tiswww.case.edu/php/chet/bash/bashtop.html",
|
||||
"status:ii ",
|
||||
"description:GNU Bourne Again SHell",
|
||||
" Bash is an sh-compatible command language interpreter that"
|
||||
" executes",
|
||||
" commands read from the standard input or from a file. Bash"
|
||||
" also",
|
||||
" incorporates useful features from the Korn and C shells (ksh"
|
||||
" and csh).",
|
||||
" .",
|
||||
" Bash is ultimately intended to be a conformant implementation"
|
||||
" of the",
|
||||
" IEEE POSIX Shell and Tools specification (IEEE Working Group"
|
||||
" 1003.2).",
|
||||
" .",
|
||||
" The Programmable Completion Code, by Ian Macdonald, is now"
|
||||
" found in",
|
||||
" the bash-completion package.",
|
||||
"",
|
||||
"*/~^\\*", # pylint: disable=W1401
|
||||
]
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}), patch.dict(
|
||||
dpkg.__grains__, {"os": "Ubuntu", "osrelease_info": (18, 4)}
|
||||
), patch("salt.utils.path.which", MagicMock(return_value=False)), patch(
|
||||
"os.path.exists", MagicMock(return_value=False)
|
||||
), patch(
|
||||
"os.path.getmtime", MagicMock(return_value=1560199259.0)
|
||||
):
|
||||
assert dpkg.info("bash") == {
|
||||
"bash": {
|
||||
"architecture": "amd64",
|
||||
"description": os.linesep.join(
|
||||
[
|
||||
"GNU Bourne Again SHell",
|
||||
" Bash is an sh-compatible command language interpreter"
|
||||
" that executes",
|
||||
" commands read from the standard input or from a file."
|
||||
" Bash also",
|
||||
" incorporates useful features from the Korn and C"
|
||||
" shells (ksh and csh).",
|
||||
" .",
|
||||
" Bash is ultimately intended to be a conformant"
|
||||
" implementation of the",
|
||||
" IEEE POSIX Shell and Tools specification (IEEE"
|
||||
" Working Group 1003.2).",
|
||||
" .",
|
||||
" The Programmable Completion Code, by Ian Macdonald,"
|
||||
" is now found in",
|
||||
" the bash-completion package." + os.linesep,
|
||||
]
|
||||
),
|
||||
"homepage": "http://tiswww.case.edu/php/chet/bash/bashtop.html",
|
||||
"maintainer": (
|
||||
"Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>"
|
||||
),
|
||||
"package": "bash",
|
||||
"section": "shells",
|
||||
"source": "bash",
|
||||
"status": "ii",
|
||||
"version": "4.4.18-2ubuntu1",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_get_pkg_license():
|
||||
"""
|
||||
Test _get_pkg_license for ignore errors on reading license from copyright files
|
||||
|
|
898
tests/pytests/unit/modules/test_glusterfs.py
Normal file
898
tests/pytests/unit/modules/test_glusterfs.py
Normal file
|
@ -0,0 +1,898 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
:codeauthor: Joe Julian <me@joejulian.name>
|
||||
|
||||
Test cases for salt.modules.glusterfs
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.glusterfs as glusterfs
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
class GlusterResults:
|
||||
"""This class holds the xml results from gluster cli transactions"""
|
||||
|
||||
class v34:
|
||||
"""This is for version 3.4 results"""
|
||||
|
||||
class list_peers:
|
||||
"""results from "peer status" """
|
||||
|
||||
class peer_probe:
|
||||
fail_cant_connect = fail_bad_hostname = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
|
||||
"<cliOutput>",
|
||||
" <opRet>-1</opRet>",
|
||||
" <opErrno>107</opErrno>",
|
||||
" <opErrstr>Probe returned with unknown errno 107</opErrstr>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
success_self = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>1</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success: on localhost not needed</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
success_other = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>0</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
success_hostname_after_ip = success_other
|
||||
success_ip_after_hostname = success_other
|
||||
success_already_peer = {
|
||||
"ip": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success: host 10.0.0.2 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
"hostname": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success: host server2 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
}
|
||||
success_reverse_already_peer = {
|
||||
"ip": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success: host 10.0.0.1 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
"hostname": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr>(null)</opErrstr>",
|
||||
" <output>success: host server1 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
}
|
||||
success_first_hostname_from_second_first_time = success_other
|
||||
success_first_hostname_from_second_second_time = (
|
||||
success_reverse_already_peer["hostname"]
|
||||
)
|
||||
success_first_ip_from_second_first_time = success_reverse_already_peer["ip"]
|
||||
|
||||
class v37:
|
||||
class peer_probe:
|
||||
fail_cant_connect = fail_bad_hostname = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
|
||||
"<cliOutput>",
|
||||
" <opRet>-1</opRet>",
|
||||
" <opErrno>107</opErrno>",
|
||||
" <opErrstr>Probe returned with Transport endpoint is not"
|
||||
" connected</opErrstr>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
success_self = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>1</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output>Probe on localhost not needed</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
success_other = "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>0</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output/>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
success_hostname_after_ip = success_other
|
||||
success_ip_after_hostname = success_other
|
||||
success_already_peer = {
|
||||
"ip": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output>Host 10.0.0.2 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
"hostname": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output>Host server2 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
}
|
||||
success_reverse_already_peer = {
|
||||
"ip": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output>Host 10.0.0.1 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
"hostname": "\n".join(
|
||||
[
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
||||
" <cliOutput>",
|
||||
" <opRet>0</opRet>",
|
||||
" <opErrno>2</opErrno>",
|
||||
" <opErrstr/>",
|
||||
" <output>Host server1 port 24007 already in peer"
|
||||
" list</output>",
|
||||
"</cliOutput>",
|
||||
"",
|
||||
]
|
||||
),
|
||||
}
|
||||
success_first_hostname_from_second_first_time = (
|
||||
success_reverse_already_peer["hostname"]
|
||||
)
|
||||
success_first_ip_from_second_first_time = success_other
|
||||
success_first_ip_from_second_second_time = success_reverse_already_peer[
|
||||
"ip"
|
||||
]
|
||||
|
||||
|
||||
# gluster --version output collected in the wild.
|
||||
version_output_362 = """
|
||||
glusterfs 3.6.2 built on Jan 22 2015 12:59:57
|
||||
Repository revision: git://git.gluster.com/glusterfs.git
|
||||
Copyright (c) 2006-2011 Gluster Inc. <http://www.gluster.com>
|
||||
GlusterFS comes with ABSOLUTELY NO WARRANTY.
|
||||
You may redistribute copies of GlusterFS under the terms of the GNU General Public License
|
||||
"""
|
||||
|
||||
version_output_61 = """
|
||||
glusterfs 6.1
|
||||
Repository revision: git://git.gluster.org/glusterfs.git
|
||||
Copyright (c) 2006-2016 Red Hat, Inc. <https://www.gluster.org/>
|
||||
GlusterFS comes with ABSOLUTELY NO WARRANTY.
|
||||
It is licensed to you under your choice of the GNU Lesser
|
||||
General Public License, version 3 or any later version (LGPLv3
|
||||
or later), or the GNU General Public License, version 2 (GPLv2),
|
||||
in all cases as published by the Free Software Foundation.
|
||||
"""
|
||||
|
||||
xml_peer_present = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<peer>
|
||||
<uuid>uuid1</uuid>
|
||||
<hostname>node02</hostname>
|
||||
<hostnames>
|
||||
<hostname>node02.domain.dom</hostname>
|
||||
<hostname>10.0.0.2</hostname>
|
||||
</hostnames>
|
||||
</peer>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_volume_present = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<volList>
|
||||
<volume>Newvolume1</volume>
|
||||
<volume>Newvolume2</volume>
|
||||
</volList>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_volume_absent = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<volList>
|
||||
<count>0</count>
|
||||
</volList>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_volume_status = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<volStatus>
|
||||
<volumes>
|
||||
<volume>
|
||||
<volName>myvol1</volName>
|
||||
<nodeCount>3</nodeCount>
|
||||
<node>
|
||||
<hostname>node01</hostname>
|
||||
<path>/tmp/foo</path>
|
||||
<peerid>830700d7-0684-497c-a12c-c02e365fb90b</peerid>
|
||||
<status>1</status>
|
||||
<port>49155</port>
|
||||
<ports>
|
||||
<tcp>49155</tcp>
|
||||
<rdma>N/A</rdma>
|
||||
</ports>
|
||||
<pid>2470</pid>
|
||||
</node>
|
||||
<node>
|
||||
<hostname>NFS Server</hostname>
|
||||
<path>localhost</path>
|
||||
<peerid>830700d7-0684-497c-a12c-c02e365fb90b</peerid>
|
||||
<status>0</status>
|
||||
<port>N/A</port>
|
||||
<ports>
|
||||
<tcp>N/A</tcp>
|
||||
<rdma>N/A</rdma>
|
||||
</ports>
|
||||
<pid>-1</pid>
|
||||
</node>
|
||||
<tasks/>
|
||||
</volume>
|
||||
</volumes>
|
||||
</volStatus>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_volume_info_running = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<volInfo>
|
||||
<volumes>
|
||||
<volume>
|
||||
<name>myvol1</name>
|
||||
<id>f03c2180-cf55-4f77-ae0b-3650f57c82a1</id>
|
||||
<status>1</status>
|
||||
<statusStr>Started</statusStr>
|
||||
<brickCount>1</brickCount>
|
||||
<distCount>1</distCount>
|
||||
<stripeCount>1</stripeCount>
|
||||
<replicaCount>1</replicaCount>
|
||||
<disperseCount>0</disperseCount>
|
||||
<redundancyCount>0</redundancyCount>
|
||||
<type>0</type>
|
||||
<typeStr>Distribute</typeStr>
|
||||
<transport>0</transport>
|
||||
<bricks>
|
||||
<brick uuid="830700d7-0684-497c-a12c-c02e365fb90b">node01:/tmp/foo<name>node01:/tmp/foo</name><hostUuid>830700d7-0684-497c-a12c-c02e365fb90b</hostUuid></brick>
|
||||
</bricks>
|
||||
<optCount>1</optCount>
|
||||
<options>
|
||||
<option>
|
||||
<name>performance.readdir-ahead</name>
|
||||
<value>on</value>
|
||||
</option>
|
||||
</options>
|
||||
</volume>
|
||||
<count>1</count>
|
||||
</volumes>
|
||||
</volInfo>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_volume_info_stopped = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<volInfo>
|
||||
<volumes>
|
||||
<volume>
|
||||
<name>myvol1</name>
|
||||
<status>1</status>
|
||||
</volume>
|
||||
</volumes>
|
||||
</volInfo>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_peer_probe_success = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr/>
|
||||
<output/>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_peer_probe_already_member = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>2</opErrno>
|
||||
<opErrstr/>
|
||||
<output>Host salt port 24007 already in peer list</output>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_peer_probe_localhost = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>1</opErrno>
|
||||
<opErrstr/>
|
||||
<output>Probe on localhost not needed</output>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_peer_probe_fail_cant_connect = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>-1</opRet>
|
||||
<opErrno>107</opErrno>
|
||||
<opErrstr>Probe returned with Transport endpoint is not connected</opErrstr>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_command_success = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_command_fail = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>-1</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr>Command Failed</opErrstr>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_op_version_37 = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr/>
|
||||
<volGetopts>
|
||||
<count>1</count>
|
||||
<Option>cluster.op-version</Option>
|
||||
<Value>30707</Value>
|
||||
</volGetopts>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_op_version_312 = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr/>
|
||||
<volGetopts>
|
||||
<count>1</count>
|
||||
<Opt>
|
||||
<Option>cluster.op-version</Option>
|
||||
<Value>30707</Value>
|
||||
</Opt>
|
||||
</volGetopts>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_max_op_version = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr/>
|
||||
<volGetopts>
|
||||
<count>1</count>
|
||||
<Opt>
|
||||
<Option>cluster.max-op-version</Option>
|
||||
<Value>31200</Value>
|
||||
</Opt>
|
||||
</volGetopts>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_set_op_version_failure = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>-1</opRet>
|
||||
<opErrno>30800</opErrno>
|
||||
<opErrstr>Required op-version (30707) should not be equal or lower than current cluster op-version (30707).</opErrstr>
|
||||
<cliOp>volSet</cliOp>
|
||||
<output>Set volume unsuccessful</output>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
xml_set_op_version_success = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<cliOutput>
|
||||
<opRet>0</opRet>
|
||||
<opErrno>0</opErrno>
|
||||
<opErrstr/>
|
||||
<cliOp>volSet</cliOp>
|
||||
<output>Set volume successful</output>
|
||||
</cliOutput>
|
||||
"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {glusterfs: {}}
|
||||
|
||||
|
||||
maxDiff = None
|
||||
|
||||
# 'peer_status' function tests: 1
|
||||
|
||||
|
||||
def test__get_version():
|
||||
"""
|
||||
Test parsing of gluster --version.
|
||||
"""
|
||||
mock_version = MagicMock(return_value="foo")
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}):
|
||||
assert glusterfs._get_version() == (3, 6), "default behaviour"
|
||||
|
||||
mock_version = MagicMock(return_value=version_output_362)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}):
|
||||
assert glusterfs._get_version() == (3, 6, 2)
|
||||
|
||||
mock_version = MagicMock(return_value=version_output_61)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}):
|
||||
assert glusterfs._get_version() == (6, 1)
|
||||
|
||||
more_versions = {
|
||||
"6.0": (6, 0),
|
||||
"4.1.10": (4, 1, 10),
|
||||
"5.13": (5, 13),
|
||||
"10.0": (10, 0),
|
||||
}
|
||||
for v in more_versions:
|
||||
mock_version = MagicMock(return_value="glusterfs {}".format(v))
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}):
|
||||
assert glusterfs._get_version() == more_versions[v]
|
||||
|
||||
|
||||
def test_peer_status():
|
||||
"""
|
||||
Test gluster peer status
|
||||
"""
|
||||
mock_run = MagicMock(return_value=xml_peer_present)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.peer_status() == {
|
||||
"uuid1": {"hostnames": ["node02", "node02.domain.dom", "10.0.0.2"]}
|
||||
}
|
||||
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.peer_status() == {}
|
||||
|
||||
|
||||
# 'peer' function tests: 1
|
||||
|
||||
|
||||
def test_peer():
|
||||
"""
|
||||
Test if gluster peer call is successful.
|
||||
"""
|
||||
mock_run = MagicMock()
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
mock_run.return_value = xml_peer_probe_already_member
|
||||
assert glusterfs.peer("salt")
|
||||
|
||||
mock_run.return_value = xml_peer_probe_localhost
|
||||
assert glusterfs.peer("salt")
|
||||
|
||||
mock_run.return_value = xml_peer_probe_fail_cant_connect
|
||||
assert not glusterfs.peer("salt")
|
||||
|
||||
|
||||
# 'create_volume' function tests: 1
|
||||
|
||||
|
||||
def test_create_volume():
|
||||
"""
|
||||
Test if it creates a glusterfs volume.
|
||||
"""
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
pytest.raises(
|
||||
SaltInvocationError, glusterfs.create_volume, "newvolume", "host1:brick"
|
||||
)
|
||||
|
||||
pytest.raises(
|
||||
SaltInvocationError, glusterfs.create_volume, "newvolume", "host1/brick"
|
||||
)
|
||||
|
||||
assert not mock_run.called
|
||||
|
||||
mock_start_volume = MagicMock(return_value=True)
|
||||
with patch.object(glusterfs, "start_volume", mock_start_volume):
|
||||
# Create, do not start
|
||||
assert glusterfs.create_volume("newvolume", "host1:/brick")
|
||||
assert not mock_start_volume.called
|
||||
|
||||
# Create and start
|
||||
assert glusterfs.create_volume("newvolume", "host1:/brick", start=True)
|
||||
assert mock_start_volume.called
|
||||
|
||||
mock_start_volume.return_value = False
|
||||
# Create and fail start
|
||||
assert not glusterfs.create_volume("newvolume", "host1:/brick", start=True)
|
||||
|
||||
mock_run.return_value = xml_command_fail
|
||||
assert not glusterfs.create_volume(
|
||||
"newvolume", "host1:/brick", True, True, True, "tcp", True
|
||||
)
|
||||
|
||||
|
||||
# 'list_volumes' function tests: 1
|
||||
|
||||
|
||||
def test_list_volumes():
|
||||
"""
|
||||
Test if it list configured volumes
|
||||
"""
|
||||
mock = MagicMock(return_value=xml_volume_absent)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock}):
|
||||
assert glusterfs.list_volumes() == []
|
||||
|
||||
mock = MagicMock(return_value=xml_volume_present)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock}):
|
||||
assert glusterfs.list_volumes() == ["Newvolume1", "Newvolume2"]
|
||||
|
||||
|
||||
# 'status' function tests: 1
|
||||
|
||||
|
||||
def test_status():
|
||||
"""
|
||||
Test if it check the status of a gluster volume.
|
||||
"""
|
||||
mock_run = MagicMock(return_value=xml_command_fail)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.status("myvol1") is None
|
||||
|
||||
res = {
|
||||
"bricks": {
|
||||
"node01:/tmp/foo": {
|
||||
"host": "node01",
|
||||
"hostname": "node01",
|
||||
"online": True,
|
||||
"path": "/tmp/foo",
|
||||
"peerid": "830700d7-0684-497c-a12c-c02e365fb90b",
|
||||
"pid": "2470",
|
||||
"port": "49155",
|
||||
"ports": {"rdma": "N/A", "tcp": "49155"},
|
||||
"status": "1",
|
||||
}
|
||||
},
|
||||
"healers": {},
|
||||
"nfs": {
|
||||
"node01": {
|
||||
"host": "NFS Server",
|
||||
"hostname": "NFS Server",
|
||||
"online": False,
|
||||
"path": "localhost",
|
||||
"peerid": "830700d7-0684-497c-a12c-c02e365fb90b",
|
||||
"pid": "-1",
|
||||
"port": "N/A",
|
||||
"ports": {"rdma": "N/A", "tcp": "N/A"},
|
||||
"status": "0",
|
||||
}
|
||||
},
|
||||
}
|
||||
mock = MagicMock(return_value=xml_volume_status)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock}):
|
||||
assert glusterfs.status("myvol1") == res
|
||||
|
||||
|
||||
# 'start_volume' function tests: 1
|
||||
|
||||
|
||||
def test_volume_info():
|
||||
"""
|
||||
Test if it returns the volume info.
|
||||
"""
|
||||
res = {
|
||||
"myvol1": {
|
||||
"brickCount": "1",
|
||||
"bricks": {
|
||||
"brick1": {
|
||||
"hostUuid": "830700d7-0684-497c-a12c-c02e365fb90b",
|
||||
"path": "node01:/tmp/foo",
|
||||
"uuid": "830700d7-0684-497c-a12c-c02e365fb90b",
|
||||
}
|
||||
},
|
||||
"disperseCount": "0",
|
||||
"distCount": "1",
|
||||
"id": "f03c2180-cf55-4f77-ae0b-3650f57c82a1",
|
||||
"name": "myvol1",
|
||||
"optCount": "1",
|
||||
"options": {"performance.readdir-ahead": "on"},
|
||||
"redundancyCount": "0",
|
||||
"replicaCount": "1",
|
||||
"status": "1",
|
||||
"statusStr": "Started",
|
||||
"stripeCount": "1",
|
||||
"transport": "0",
|
||||
"type": "0",
|
||||
"typeStr": "Distribute",
|
||||
}
|
||||
}
|
||||
mock = MagicMock(return_value=xml_volume_info_running)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock}):
|
||||
assert glusterfs.info("myvol1") == res
|
||||
|
||||
|
||||
def test_start_volume():
|
||||
"""
|
||||
Test if it start a gluster volume.
|
||||
"""
|
||||
# Stopped volume
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.start_volume("Newvolume1") is True
|
||||
assert glusterfs.start_volume("nonExisting") is False
|
||||
mock_run = MagicMock(return_value=xml_command_fail)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.start_volume("Newvolume1") is False
|
||||
|
||||
# Started volume
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.start_volume("Newvolume1", force=True) is True
|
||||
mock_run = MagicMock(return_value=xml_command_fail)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
# cmd.run should not be called for already running volume:
|
||||
assert glusterfs.start_volume("Newvolume1") is True
|
||||
# except when forcing:
|
||||
assert glusterfs.start_volume("Newvolume1", force=True) is False
|
||||
|
||||
|
||||
# 'stop_volume' function tests: 1
|
||||
|
||||
|
||||
def test_stop_volume():
|
||||
"""
|
||||
Test if it stop a gluster volume.
|
||||
"""
|
||||
# Stopped volume
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.stop_volume("Newvolume1") is True
|
||||
assert glusterfs.stop_volume("nonExisting") is False
|
||||
mock_run = MagicMock(return_value=xml_command_fail)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
# cmd.run should not be called for already stopped volume:
|
||||
assert glusterfs.stop_volume("Newvolume1") is True
|
||||
|
||||
# Started volume
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.stop_volume("Newvolume1") is True
|
||||
assert glusterfs.stop_volume("nonExisting") is False
|
||||
mock_run = MagicMock(return_value=xml_command_fail)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.stop_volume("Newvolume1") is False
|
||||
|
||||
|
||||
# 'delete_volume' function tests: 1
|
||||
|
||||
|
||||
def test_delete_volume():
|
||||
"""
|
||||
Test if it deletes a gluster volume.
|
||||
"""
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
# volume doesn't exist
|
||||
assert not glusterfs.delete_volume("Newvolume3")
|
||||
|
||||
mock_stop_volume = MagicMock(return_value=True)
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
with patch.object(glusterfs, "stop_volume", mock_stop_volume):
|
||||
# volume exists, should not be stopped, and is started
|
||||
assert not glusterfs.delete_volume("Newvolume1", False)
|
||||
assert not mock_run.called
|
||||
assert not mock_stop_volume.called
|
||||
|
||||
# volume exists, should be stopped, and is started
|
||||
assert glusterfs.delete_volume("Newvolume1")
|
||||
assert mock_run.called
|
||||
assert mock_stop_volume.called
|
||||
|
||||
# volume exists and isn't started
|
||||
mock_info = MagicMock(return_value={"Newvolume1": {"status": "2"}})
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.delete_volume("Newvolume1")
|
||||
mock_run.return_value = xml_command_fail
|
||||
assert not glusterfs.delete_volume("Newvolume1")
|
||||
|
||||
|
||||
# 'add_volume_bricks' function tests: 1
|
||||
|
||||
|
||||
def test_add_volume_bricks():
|
||||
"""
|
||||
Test if it add brick(s) to an existing volume
|
||||
"""
|
||||
mock_info = MagicMock(
|
||||
return_value={
|
||||
"Newvolume1": {
|
||||
"status": "1",
|
||||
"bricks": {
|
||||
"brick1": {"path": "host:/path1"},
|
||||
"brick2": {"path": "host:/path2"},
|
||||
},
|
||||
}
|
||||
}
|
||||
)
|
||||
with patch.object(glusterfs, "info", mock_info):
|
||||
mock_run = MagicMock(return_value=xml_command_success)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
# Volume does not exist
|
||||
assert not glusterfs.add_volume_bricks("nonExisting", ["bricks"])
|
||||
# Brick already exists
|
||||
assert glusterfs.add_volume_bricks("Newvolume1", ["host:/path2"])
|
||||
# Already existing brick as a string
|
||||
assert glusterfs.add_volume_bricks("Newvolume1", "host:/path2")
|
||||
assert not mock_run.called
|
||||
# A new brick:
|
||||
assert glusterfs.add_volume_bricks("Newvolume1", ["host:/new1"])
|
||||
assert mock_run.called
|
||||
|
||||
# Gluster call fails
|
||||
mock_run.return_value = xml_command_fail
|
||||
assert not glusterfs.add_volume_bricks("Newvolume1", ["new:/path"])
|
||||
|
||||
|
||||
# 'get_op_version' function tests: 1
|
||||
|
||||
|
||||
def test_get_op_version():
|
||||
"""
|
||||
Test retrieving the glusterfs op-version
|
||||
"""
|
||||
|
||||
# Test with xml output structure from v3.7
|
||||
mock_run = MagicMock(return_value=xml_op_version_37)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.get_op_version("test") == "30707"
|
||||
|
||||
# Test with xml output structure from v3.12
|
||||
mock_run = MagicMock(return_value=xml_op_version_312)
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}):
|
||||
assert glusterfs.get_op_version("test") == "30707"
|
||||
|
||||
|
||||
# 'get_max_op_version' function tests: 1
|
||||
|
||||
|
||||
def test_get_max_op_version():
|
||||
"""
|
||||
Test retrieving the glusterfs max-op-version.
|
||||
"""
|
||||
|
||||
mock_xml = MagicMock(return_value=xml_max_op_version)
|
||||
mock_version = MagicMock(return_value="glusterfs 3.9.1")
|
||||
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}):
|
||||
assert not glusterfs.get_max_op_version()[0]
|
||||
|
||||
with patch.object(glusterfs, "_get_version", return_value=(3, 12, 0)):
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_xml}):
|
||||
assert glusterfs.get_max_op_version() == "31200"
|
||||
|
||||
|
||||
# 'set_op_version' function tests: 1
|
||||
|
||||
|
||||
def test_set_op_version():
|
||||
"""
|
||||
Test setting the glusterfs op-version
|
||||
"""
|
||||
mock_failure = MagicMock(return_value=xml_set_op_version_failure)
|
||||
mock_success = MagicMock(return_value=xml_set_op_version_success)
|
||||
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_failure}):
|
||||
assert not glusterfs.set_op_version(30707)[0]
|
||||
|
||||
with patch.dict(glusterfs.__salt__, {"cmd.run": mock_success}):
|
||||
assert glusterfs.set_op_version(31200) == "Set volume successful"
|
|
@ -65,7 +65,7 @@ def test_list_certs():
|
|||
out = keychain.list_certs("/path/to/cert.p12")
|
||||
mock.assert_called_once_with(
|
||||
"security find-certificate -a /path/to/cert.p12 | "
|
||||
'grep -o "alis".*\\" | grep -o \'\\"[-A-Za-z0-9.:() ]*\\"\'',
|
||||
'grep -o "alis.*" | grep -o \'\\"[-A-Za-z0-9.:() ]*\\"\'',
|
||||
python_shell=True,
|
||||
)
|
||||
|
||||
|
@ -79,7 +79,18 @@ def test_get_friendly_name():
|
|||
expected = "ID Installer Salt"
|
||||
mock = MagicMock(return_value="friendlyName: ID Installer Salt")
|
||||
with patch.dict(keychain.__salt__, {"cmd.run": mock}):
|
||||
out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd")
|
||||
out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd", legacy=True)
|
||||
mock.assert_called_once_with(
|
||||
"openssl pkcs12 -legacy -in /path/to/cert.p12 -passin pass:passw0rd -info "
|
||||
"-nodes -nokeys 2> /dev/null | grep friendlyName:",
|
||||
python_shell=True,
|
||||
)
|
||||
|
||||
assert out == expected
|
||||
|
||||
mock = MagicMock(return_value="friendlyName: ID Installer Salt")
|
||||
with patch.dict(keychain.__salt__, {"cmd.run": mock}):
|
||||
out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd", legacy=False)
|
||||
mock.assert_called_once_with(
|
||||
"openssl pkcs12 -in /path/to/cert.p12 -passin pass:passw0rd -info "
|
||||
"-nodes -nokeys 2> /dev/null | grep friendlyName:",
|
||||
|
|
|
@ -5,35 +5,31 @@ import pytest
|
|||
import salt.modules.openscap as openscap
|
||||
from tests.support.mock import MagicMock, Mock, patch
|
||||
|
||||
policy_file = "/usr/share/openscap/policy-file-xccdf.xml"
|
||||
|
||||
@pytest.fixture
|
||||
def policy_file():
|
||||
yield "/usr/share/openscap/policy-file-xccdf.xml"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def random_temp_dir(tmp_path):
|
||||
tmp_dir = tmp_path / "unique"
|
||||
tmp_dir.mkdir()
|
||||
return str(tmp_dir)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(random_temp_dir):
|
||||
def configure_loader_modules(tmp_path):
|
||||
random_temp_dir = tmp_path / "unique"
|
||||
random_temp_dir.mkdir()
|
||||
with patch("salt.modules.openscap.shutil.rmtree", Mock()), patch(
|
||||
"salt.modules.openscap.tempfile.mkdtemp",
|
||||
Mock(return_value=random_temp_dir),
|
||||
Mock(return_value=str(random_temp_dir)),
|
||||
), patch("salt.modules.openscap.os.path.exists", Mock(return_value=True)):
|
||||
yield {openscap: {"__salt__": {"cp.push_dir": MagicMock()}}}
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_success(random_temp_dir):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(**{"returncode": 0, "communicate.return_value": ("", "")})
|
||||
),
|
||||
):
|
||||
def test_openscap_xccdf_eval_success(policy_file):
|
||||
mock_popen = MagicMock(
|
||||
return_value=Mock(**{"returncode": 0, "communicate.return_value": ("", "")})
|
||||
)
|
||||
patch_popen = patch("salt.modules.openscap.subprocess.Popen", mock_popen)
|
||||
with patch_popen:
|
||||
response = openscap.xccdf(f"eval --profile Default {policy_file}")
|
||||
|
||||
assert openscap.tempfile.mkdtemp.call_count == 1
|
||||
expected_cmd = [
|
||||
"oscap",
|
||||
"xccdf",
|
||||
|
@ -47,34 +43,35 @@ def test_openscap_xccdf_eval_success(random_temp_dir):
|
|||
"Default",
|
||||
policy_file,
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(random_temp_dir)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(
|
||||
openscap.tempfile.mkdtemp.return_value
|
||||
)
|
||||
assert openscap.shutil.rmtree.call_count == 1
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
expected = {
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "",
|
||||
"success": True,
|
||||
"returncode": 0,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_success_with_failing_rules(random_temp_dir):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(
|
||||
**{"returncode": 2, "communicate.return_value": ("", "some error")}
|
||||
)
|
||||
),
|
||||
):
|
||||
def test_openscap_xccdf_eval_success_with_failing_rules(policy_file):
|
||||
mock_popen = MagicMock(
|
||||
return_value=Mock(
|
||||
**{"returncode": 2, "communicate.return_value": ("", "some error")}
|
||||
)
|
||||
)
|
||||
patch_popen = patch("salt.modules.openscap.subprocess.Popen", mock_popen)
|
||||
with patch_popen:
|
||||
response = openscap.xccdf(f"eval --profile Default {policy_file}")
|
||||
|
||||
assert openscap.tempfile.mkdtemp.call_count == 1
|
||||
expected_cmd = [
|
||||
"oscap",
|
||||
"xccdf",
|
||||
|
@ -88,49 +85,52 @@ def test_openscap_xccdf_eval_success_with_failing_rules(random_temp_dir):
|
|||
"Default",
|
||||
policy_file,
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(random_temp_dir)
|
||||
assert openscap.shutil.rmtree.call_count == 1
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(
|
||||
openscap.tempfile.mkdtemp.return_value
|
||||
)
|
||||
expected = {
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "some error",
|
||||
"success": True,
|
||||
"returncode": 2,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_fail_no_profile():
|
||||
response = openscap.xccdf("eval --param Default /unknown/param")
|
||||
error = "the following arguments are required: --profile"
|
||||
assert response == {
|
||||
expected = {
|
||||
"error": error,
|
||||
"upload_dir": None,
|
||||
"success": False,
|
||||
"returncode": None,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_success_ignore_unknown_params(random_temp_dir):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(
|
||||
**{"returncode": 2, "communicate.return_value": ("", "some error")}
|
||||
)
|
||||
),
|
||||
):
|
||||
def test_openscap_xccdf_eval_success_ignore_unknown_params():
|
||||
mock_popen = MagicMock(
|
||||
return_value=Mock(
|
||||
**{"returncode": 2, "communicate.return_value": ("", "some error")}
|
||||
)
|
||||
)
|
||||
patch_popen = patch("salt.modules.openscap.subprocess.Popen", mock_popen)
|
||||
with patch_popen:
|
||||
response = openscap.xccdf("eval --profile Default --param Default /policy/file")
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
expected = {
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "some error",
|
||||
"success": True,
|
||||
"returncode": 2,
|
||||
}
|
||||
assert response == expected
|
||||
expected_cmd = [
|
||||
"oscap",
|
||||
"xccdf",
|
||||
|
@ -144,7 +144,7 @@ def test_openscap_xccdf_eval_success_ignore_unknown_params(random_temp_dir):
|
|||
"Default",
|
||||
"/policy/file",
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
|
@ -152,43 +152,63 @@ def test_openscap_xccdf_eval_success_ignore_unknown_params(random_temp_dir):
|
|||
)
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_evaluation_error():
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(
|
||||
**{
|
||||
"returncode": 1,
|
||||
"communicate.return_value": ("", "evaluation error"),
|
||||
}
|
||||
)
|
||||
),
|
||||
):
|
||||
def test_openscap_xccdf_eval_evaluation_error(policy_file):
|
||||
mock_popen = MagicMock(
|
||||
return_value=Mock(
|
||||
**{
|
||||
"returncode": 1,
|
||||
"communicate.return_value": ("", "evaluation error"),
|
||||
}
|
||||
)
|
||||
)
|
||||
patch_popen = patch("salt.modules.openscap.subprocess.Popen", mock_popen)
|
||||
with patch_popen:
|
||||
response = openscap.xccdf(f"eval --profile Default {policy_file}")
|
||||
|
||||
assert response == {
|
||||
expected = {
|
||||
"upload_dir": None,
|
||||
"error": "evaluation error",
|
||||
"success": False,
|
||||
"returncode": 1,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_openscap_xccdf_eval_fail_not_implemented_action():
|
||||
def test_openscap_xccdf_eval_fail_not_implemented_action(policy_file):
|
||||
response = openscap.xccdf(f"info {policy_file}")
|
||||
mock_err = "argument action: invalid choice: 'info' (choose from 'eval')"
|
||||
|
||||
assert response == {
|
||||
expected = {
|
||||
"upload_dir": None,
|
||||
"error": mock_err,
|
||||
"success": False,
|
||||
"returncode": None,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_new_openscap_xccdf_eval_success(random_temp_dir):
|
||||
def test_openscap_xccdf_eval_evaluation_unknown_error(policy_file):
|
||||
mock_popen = MagicMock(
|
||||
return_value=Mock(
|
||||
**{
|
||||
"returncode": 255,
|
||||
"communicate.return_value": ("", "unknown error"),
|
||||
}
|
||||
)
|
||||
)
|
||||
patch_popen = patch("salt.modules.openscap.subprocess.Popen", mock_popen)
|
||||
with patch_popen:
|
||||
response = openscap.xccdf(f"eval --profile Default {policy_file}")
|
||||
expected = {
|
||||
"upload_dir": None,
|
||||
"error": "unknown error",
|
||||
"success": False,
|
||||
"returncode": 255,
|
||||
}
|
||||
assert response == expected
|
||||
|
||||
|
||||
def test_new_openscap_xccdf_eval_success(policy_file):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
"salt.modules.openscap.subprocess.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(**{"returncode": 0, "communicate.return_value": ("", "")})
|
||||
),
|
||||
|
@ -215,25 +235,27 @@ def test_new_openscap_xccdf_eval_success(random_temp_dir):
|
|||
"Default",
|
||||
policy_file,
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(random_temp_dir)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(
|
||||
openscap.tempfile.mkdtemp.return_value
|
||||
)
|
||||
assert openscap.shutil.rmtree.call_count == 1
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "",
|
||||
"success": True,
|
||||
"returncode": 0,
|
||||
}
|
||||
|
||||
|
||||
def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(random_temp_dir):
|
||||
def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(policy_file):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
"salt.modules.openscap.subprocess.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(**{"returncode": 0, "communicate.return_value": ("", "")})
|
||||
),
|
||||
|
@ -247,7 +269,6 @@ def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(random_temp_dir):
|
|||
report="report.html",
|
||||
)
|
||||
|
||||
assert openscap.tempfile.mkdtemp.call_count == 1
|
||||
expected_cmd = [
|
||||
"oscap",
|
||||
"xccdf",
|
||||
|
@ -263,25 +284,27 @@ def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(random_temp_dir):
|
|||
"/usr/share/xml/another-oval.xml",
|
||||
"/usr/share/xml/oval.xml",
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(random_temp_dir)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(
|
||||
openscap.tempfile.mkdtemp.return_value
|
||||
)
|
||||
assert openscap.shutil.rmtree.call_count == 1
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "",
|
||||
"success": True,
|
||||
"returncode": 0,
|
||||
}
|
||||
|
||||
|
||||
def test_new_openscap_xccdf_eval_success_with_failing_rules(random_temp_dir):
|
||||
def test_new_openscap_xccdf_eval_success_with_failing_rules(policy_file):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
"salt.modules.openscap.subprocess.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(
|
||||
**{"returncode": 2, "communicate.return_value": ("", "some error")}
|
||||
|
@ -310,25 +333,27 @@ def test_new_openscap_xccdf_eval_success_with_failing_rules(random_temp_dir):
|
|||
"Default",
|
||||
policy_file,
|
||||
]
|
||||
openscap.Popen.assert_called_once_with(
|
||||
openscap.subprocess.Popen.assert_called_once_with(
|
||||
expected_cmd,
|
||||
cwd=openscap.tempfile.mkdtemp.return_value,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(random_temp_dir)
|
||||
openscap.__salt__["cp.push_dir"].assert_called_once_with(
|
||||
openscap.tempfile.mkdtemp.return_value
|
||||
)
|
||||
assert openscap.shutil.rmtree.call_count == 1
|
||||
assert response == {
|
||||
"upload_dir": random_temp_dir,
|
||||
"upload_dir": openscap.tempfile.mkdtemp.return_value,
|
||||
"error": "some error",
|
||||
"success": True,
|
||||
"returncode": 2,
|
||||
}
|
||||
|
||||
|
||||
def test_new_openscap_xccdf_eval_evaluation_error():
|
||||
def test_new_openscap_xccdf_eval_evaluation_error(policy_file):
|
||||
with patch(
|
||||
"salt.modules.openscap.Popen",
|
||||
"salt.modules.openscap.subprocess.Popen",
|
||||
MagicMock(
|
||||
return_value=Mock(
|
||||
**{
|
||||
|
|
|
@ -6,7 +6,8 @@ import pytest
|
|||
import salt.modules.ps
|
||||
import salt.modules.ps as ps
|
||||
import salt.utils.data
|
||||
from salt.exceptions import SaltInvocationError
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
from tests.support.mock import MagicMock, Mock, call, patch
|
||||
|
||||
psutil = pytest.importorskip("salt.utils.psutil_compat")
|
||||
|
@ -14,6 +15,11 @@ psutil = pytest.importorskip("salt.utils.psutil_compat")
|
|||
# TestCase Exceptions are tested in tests/unit/modules/test_ps.py
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {ps: {}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_process():
|
||||
status = b"fnord"
|
||||
|
@ -135,9 +141,13 @@ def test__status_when_some_matching_processes_then_only_correct_info_should_be_r
|
|||
HAS_PSUTIL_VERSION = False
|
||||
|
||||
|
||||
PSUTIL2 = psutil.version_info >= (2, 0)
|
||||
|
||||
STUB_CPU_TIMES = namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4)
|
||||
STUB_CPU_TIMES_PERCPU = [
|
||||
namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4),
|
||||
namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4),
|
||||
namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4),
|
||||
namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4),
|
||||
]
|
||||
STUB_VIRT_MEM = namedtuple("vmem", "total available percent used free")(
|
||||
1000, 500, 50, 500, 500
|
||||
)
|
||||
|
@ -153,9 +163,39 @@ STUB_NETWORK_IO = namedtuple(
|
|||
"iostat",
|
||||
"bytes_sent, bytes_recv, packets_sent, packets_recv, errin errout dropin dropout",
|
||||
)(1000, 2000, 500, 600, 1, 2, 3, 4)
|
||||
STUB_NETWORK_IO_PERNIC = {
|
||||
"lo": STUB_NETWORK_IO,
|
||||
"eth0": STUB_NETWORK_IO,
|
||||
"eth1": STUB_NETWORK_IO,
|
||||
}
|
||||
STUB_DISK_IO = namedtuple(
|
||||
"iostat", "read_count, write_count, read_bytes, write_bytes, read_time, write_time"
|
||||
)(1000, 2000, 500, 600, 2000, 3000)
|
||||
STUB_DISK_IO_PERDISK = {
|
||||
"nvme0n1": STUB_DISK_IO,
|
||||
"nvme0n1p1": STUB_DISK_IO,
|
||||
"nvme0n1p2": STUB_DISK_IO,
|
||||
"nvme0n1p3": STUB_DISK_IO,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stub_memory_usage():
|
||||
return namedtuple(
|
||||
"vmem",
|
||||
"total available percent used free active inactive buffers cached shared",
|
||||
)(
|
||||
15722012672,
|
||||
9329594368,
|
||||
40.7,
|
||||
5137018880,
|
||||
4678086656,
|
||||
6991405056,
|
||||
2078953472,
|
||||
1156378624,
|
||||
4750528512,
|
||||
898908160,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
@ -180,7 +220,7 @@ except ImportError:
|
|||
|
||||
|
||||
def _get_proc_name(proc):
|
||||
return proc.name() if PSUTIL2 else proc.name
|
||||
return proc.name()
|
||||
|
||||
|
||||
def _get_proc_pid(proc):
|
||||
|
@ -202,6 +242,7 @@ class DummyProcess:
|
|||
status=None,
|
||||
username=None,
|
||||
pid=None,
|
||||
cpu_times=None,
|
||||
):
|
||||
self._cmdline = salt.utils.data.decode(
|
||||
cmdline if cmdline is not None else [], to_str=True
|
||||
|
@ -218,6 +259,25 @@ class DummyProcess:
|
|||
pid if pid is not None else 12345, to_str=True
|
||||
)
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
scputimes = namedtuple(
|
||||
"scputimes", ["user", "system", "children_user", "children_system"]
|
||||
)
|
||||
dummy_cpu_times = scputimes(7713.79, 1278.44, 17114.2, 2023.36)
|
||||
else:
|
||||
scputimes = namedtuple(
|
||||
"scputimes",
|
||||
["user", "system", "children_user", "children_system", "iowait"],
|
||||
)
|
||||
dummy_cpu_times = scputimes(7713.79, 1278.44, 17114.2, 2023.36, 0.0)
|
||||
self._cpu_times = cpu_times if cpu_times is not None else dummy_cpu_times
|
||||
|
||||
def __enter__(self):
|
||||
pass
|
||||
|
||||
def __exit__(self):
|
||||
pass
|
||||
|
||||
def cmdline(self):
|
||||
return self._cmdline
|
||||
|
||||
|
@ -236,16 +296,18 @@ class DummyProcess:
|
|||
def pid(self):
|
||||
return self._pid
|
||||
|
||||
def cpu_times(self):
|
||||
return self._cpu_times
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mocked_proc():
|
||||
mocked_proc = MagicMock("salt.utils.psutil_compat.Process")
|
||||
if PSUTIL2:
|
||||
mocked_proc.name = Mock(return_value="test_mock_proc")
|
||||
mocked_proc.pid = Mock(return_value=9999999999)
|
||||
else:
|
||||
mocked_proc.name = "test_mock_proc"
|
||||
mocked_proc.pid = 9999999999
|
||||
mocked_proc.name = Mock(return_value="test_mock_proc")
|
||||
mocked_proc.pid = Mock(return_value=9999999999)
|
||||
mocked_proc.cmdline = Mock(
|
||||
return_value=["test_mock_proc", "--arg", "--kwarg=value"]
|
||||
)
|
||||
|
||||
with patch("salt.utils.psutil_compat.Process.send_signal"), patch(
|
||||
"salt.utils.psutil_compat.process_iter",
|
||||
|
@ -254,12 +316,115 @@ def mocked_proc():
|
|||
yield mocked_proc
|
||||
|
||||
|
||||
@pytest.mark.skipif(not ps.PSUTIL2, reason="Only run for psutil 2.x")
|
||||
def test__get_proc_cmdline():
|
||||
cmdline = ["echo", "питон"]
|
||||
ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline))
|
||||
assert ret == cmdline, ret
|
||||
|
||||
with patch.object(DummyProcess, "cmdline") as mock_cmdline:
|
||||
mock_cmdline.side_effect = psutil.NoSuchProcess(DummyProcess(cmdline=cmdline))
|
||||
ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline))
|
||||
assert ret == []
|
||||
|
||||
with patch.object(DummyProcess, "cmdline") as mock_cmdline:
|
||||
mock_cmdline.side_effect = psutil.AccessDenied(DummyProcess(cmdline=cmdline))
|
||||
ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline))
|
||||
assert ret == []
|
||||
|
||||
|
||||
def test__get_proc_create_time():
|
||||
cmdline = ["echo", "питон"]
|
||||
create_time = 1694729500.1093624
|
||||
ret = ps._get_proc_create_time(
|
||||
DummyProcess(cmdline=cmdline, create_time=create_time)
|
||||
)
|
||||
assert ret == create_time
|
||||
|
||||
with patch.object(DummyProcess, "create_time") as mock_create_time:
|
||||
mock_create_time.side_effect = psutil.NoSuchProcess(
|
||||
DummyProcess(cmdline=cmdline, create_time=create_time)
|
||||
)
|
||||
ret = ps._get_proc_create_time(
|
||||
DummyProcess(cmdline=cmdline, create_time=create_time)
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
with patch.object(DummyProcess, "create_time") as mock_create_time:
|
||||
mock_create_time.side_effect = psutil.AccessDenied(
|
||||
DummyProcess(cmdline=cmdline, create_time=create_time)
|
||||
)
|
||||
ret = ps._get_proc_create_time(
|
||||
DummyProcess(cmdline=cmdline, create_time=create_time)
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test__get_proc_name():
|
||||
cmdline = ["echo", "питон"]
|
||||
proc_name = "proc_name"
|
||||
ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name))
|
||||
assert ret == proc_name
|
||||
|
||||
with patch.object(DummyProcess, "name") as mock_name:
|
||||
mock_name.side_effect = psutil.NoSuchProcess(
|
||||
DummyProcess(cmdline=cmdline, name=proc_name)
|
||||
)
|
||||
ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name))
|
||||
assert ret == []
|
||||
|
||||
with patch.object(DummyProcess, "name") as mock_name:
|
||||
mock_name.side_effect = psutil.AccessDenied(
|
||||
DummyProcess(cmdline=cmdline, name=proc_name)
|
||||
)
|
||||
ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name))
|
||||
assert ret == []
|
||||
|
||||
|
||||
def test__get_proc_status():
|
||||
cmdline = ["echo", "питон"]
|
||||
proc_status = "sleeping"
|
||||
ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status))
|
||||
assert ret == proc_status
|
||||
|
||||
with patch.object(DummyProcess, "status") as mock_status:
|
||||
mock_status.side_effect = psutil.NoSuchProcess(
|
||||
DummyProcess(cmdline=cmdline, status=proc_status)
|
||||
)
|
||||
ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status))
|
||||
assert ret is None
|
||||
|
||||
with patch.object(DummyProcess, "status") as mock_status:
|
||||
mock_status.side_effect = psutil.AccessDenied(
|
||||
DummyProcess(cmdline=cmdline, status=proc_status)
|
||||
)
|
||||
ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status))
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test__get_proc_username():
|
||||
cmdline = ["echo", "питон"]
|
||||
proc_username = "root"
|
||||
ret = ps._get_proc_username(DummyProcess(cmdline=cmdline, username=proc_username))
|
||||
assert ret == proc_username
|
||||
|
||||
with patch.object(DummyProcess, "username") as mock_username:
|
||||
mock_username.side_effect = psutil.NoSuchProcess(
|
||||
DummyProcess(cmdline=cmdline, username=proc_username)
|
||||
)
|
||||
ret = ps._get_proc_username(
|
||||
DummyProcess(cmdline=cmdline, username=proc_username)
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
with patch.object(DummyProcess, "username") as mock_username:
|
||||
mock_username.side_effect = psutil.AccessDenied(
|
||||
DummyProcess(cmdline=cmdline, username=proc_username)
|
||||
)
|
||||
ret = ps._get_proc_username(
|
||||
DummyProcess(cmdline=cmdline, username=proc_username)
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_get_pid_list():
|
||||
with patch("salt.utils.psutil_compat.pids", MagicMock(return_value=STUB_PID_LIST)):
|
||||
|
@ -267,6 +432,14 @@ def test_get_pid_list():
|
|||
|
||||
|
||||
def test_kill_pid():
|
||||
cmdline = ["echo", "питон"]
|
||||
top_proc = DummyProcess(cmdline=cmdline)
|
||||
|
||||
with patch("salt.utils.psutil_compat.Process") as mock_process:
|
||||
mock_process.side_effect = psutil.NoSuchProcess(top_proc)
|
||||
ret = ps.kill_pid(0, signal=999)
|
||||
assert not ret
|
||||
|
||||
with patch("salt.utils.psutil_compat.Process") as send_signal_mock:
|
||||
ps.kill_pid(0, signal=999)
|
||||
assert send_signal_mock.call_args == call(0)
|
||||
|
@ -278,6 +451,19 @@ def test_pkill(mocked_proc):
|
|||
ps.pkill(_get_proc_name(mocked_proc), signal=test_signal)
|
||||
assert mocked_proc.send_signal.call_args == call(test_signal)
|
||||
|
||||
mocked_proc.send_signal = MagicMock(side_effect=psutil.NoSuchProcess(mocked_proc))
|
||||
ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal)
|
||||
assert ret is None
|
||||
|
||||
mocked_proc.username = MagicMock(return_value="root")
|
||||
with patch.object(ps, "_get_proc_username", return_value=None):
|
||||
ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal, user="root")
|
||||
assert ret is None
|
||||
|
||||
mocked_proc.username = MagicMock(return_value="root")
|
||||
ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal, user="root")
|
||||
assert mocked_proc.send_signal.call_args == call(test_signal)
|
||||
|
||||
|
||||
def test_pgrep(mocked_proc):
|
||||
with patch(
|
||||
|
@ -286,6 +472,10 @@ def test_pgrep(mocked_proc):
|
|||
):
|
||||
assert mocked_proc.pid in (ps.pgrep(_get_proc_name(mocked_proc)) or [])
|
||||
|
||||
assert mocked_proc.pid in (
|
||||
ps.pgrep(_get_proc_name(mocked_proc), full=True) or []
|
||||
)
|
||||
|
||||
|
||||
def test_pgrep_regex(mocked_proc):
|
||||
with patch(
|
||||
|
@ -301,6 +491,14 @@ def test_cpu_percent():
|
|||
with patch("salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=1)):
|
||||
assert ps.cpu_percent() == 1
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=(1, 1, 1, 1))
|
||||
):
|
||||
assert ps.cpu_percent(per_cpu=True) == [1, 1, 1, 1]
|
||||
|
||||
with patch("salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=1)):
|
||||
assert ps.cpu_percent(per_cpu=False) == 1
|
||||
|
||||
|
||||
def test_cpu_times():
|
||||
with patch(
|
||||
|
@ -308,12 +506,31 @@ def test_cpu_times():
|
|||
):
|
||||
assert {"idle": 4, "nice": 2, "system": 3, "user": 1} == ps.cpu_times()
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.cpu_times",
|
||||
MagicMock(return_value=STUB_CPU_TIMES_PERCPU),
|
||||
):
|
||||
assert [
|
||||
{"idle": 4, "nice": 2, "system": 3, "user": 1},
|
||||
{"idle": 4, "nice": 2, "system": 3, "user": 1},
|
||||
{"idle": 4, "nice": 2, "system": 3, "user": 1},
|
||||
{"idle": 4, "nice": 2, "system": 3, "user": 1},
|
||||
] == ps.cpu_times(per_cpu=True)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
HAS_PSUTIL_VERSION is False,
|
||||
reason="psutil 0.6.0 or greater is required for this test",
|
||||
)
|
||||
def test_virtual_memory():
|
||||
with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)):
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ps.virtual_memory()
|
||||
assert (
|
||||
exc.value.error
|
||||
== "virtual_memory is only available in psutil 0.6.0 or greater"
|
||||
)
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.virtual_memory",
|
||||
MagicMock(return_value=STUB_VIRT_MEM),
|
||||
|
@ -332,6 +549,15 @@ def test_virtual_memory():
|
|||
reason="psutil 0.6.0 or greater is required for this test",
|
||||
)
|
||||
def test_swap_memory():
|
||||
|
||||
with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)):
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ps.swap_memory()
|
||||
assert (
|
||||
exc.value.error
|
||||
== "swap_memory is only available in psutil 0.6.0 or greater"
|
||||
)
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.swap_memory",
|
||||
MagicMock(return_value=STUB_SWAP_MEM),
|
||||
|
@ -377,12 +603,21 @@ def test_disk_partition_usage():
|
|||
"salt.utils.psutil_compat.disk_partitions",
|
||||
MagicMock(return_value=[STUB_DISK_PARTITION]),
|
||||
):
|
||||
assert {
|
||||
"device": "/dev/disk0s2",
|
||||
"mountpoint": "/",
|
||||
"opts": "rw,local,rootfs,dovolfs,journaled,multilabel",
|
||||
"fstype": "hfs",
|
||||
} == ps.disk_partitions()[0]
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.disk_usage",
|
||||
MagicMock(return_value=STUB_DISK_USAGE),
|
||||
):
|
||||
result = ps.disk_partition_usage()[0]
|
||||
assert {
|
||||
"device": "/dev/disk0s2",
|
||||
"mountpoint": "/",
|
||||
"fstype": "hfs",
|
||||
"opts": "rw,local,rootfs,dovolfs,journaled,multilabel",
|
||||
"total": 1000,
|
||||
"used": 500,
|
||||
"free": 500,
|
||||
"percent": 50,
|
||||
} == result
|
||||
|
||||
|
||||
def test_network_io_counters():
|
||||
|
@ -401,6 +636,23 @@ def test_network_io_counters():
|
|||
"dropin": 3,
|
||||
} == ps.network_io_counters()
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.net_io_counters",
|
||||
MagicMock(return_value=STUB_NETWORK_IO_PERNIC),
|
||||
):
|
||||
assert {
|
||||
"packets_sent": 500,
|
||||
"packets_recv": 600,
|
||||
"bytes_recv": 2000,
|
||||
"dropout": 4,
|
||||
"bytes_sent": 1000,
|
||||
"errout": 2,
|
||||
"errin": 1,
|
||||
"dropin": 3,
|
||||
} == ps.network_io_counters(interface="eth0")
|
||||
|
||||
assert not ps.network_io_counters(interface="eth2")
|
||||
|
||||
|
||||
def test_disk_io_counters():
|
||||
with patch(
|
||||
|
@ -416,6 +668,21 @@ def test_disk_io_counters():
|
|||
"write_count": 2000,
|
||||
} == ps.disk_io_counters()
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.disk_io_counters",
|
||||
MagicMock(return_value=STUB_DISK_IO_PERDISK),
|
||||
):
|
||||
assert {
|
||||
"read_time": 2000,
|
||||
"write_bytes": 600,
|
||||
"read_bytes": 500,
|
||||
"write_time": 3000,
|
||||
"read_count": 1000,
|
||||
"write_count": 2000,
|
||||
} == ps.disk_io_counters(device="nvme0n1p1")
|
||||
|
||||
assert not ps.disk_io_counters(device="nvme0n1p4")
|
||||
|
||||
|
||||
def test_get_users(stub_user):
|
||||
with patch("salt.utils.psutil_compat.users", MagicMock(return_value=[stub_user])):
|
||||
|
@ -438,6 +705,134 @@ def test_top():
|
|||
result = ps.top(num_processes=1, interval=0)
|
||||
assert len(result) == 1
|
||||
|
||||
cmdline = ["echo", "питон"]
|
||||
top_proc = DummyProcess(cmdline=cmdline)
|
||||
|
||||
with patch("salt.utils.psutil_compat.pids", return_value=[1]):
|
||||
with patch("salt.utils.psutil_compat.Process") as mock_process:
|
||||
mock_process.side_effect = psutil.NoSuchProcess(top_proc)
|
||||
ret = ps.top(num_processes=1, interval=0)
|
||||
assert ret == []
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
scputimes = namedtuple(
|
||||
"scputimes", ["user", "system", "children_user", "children_system"]
|
||||
)
|
||||
zombie_cpu_times = scputimes(0, 0, 0, 0)
|
||||
|
||||
smem_info = namedtuple(
|
||||
"pmem",
|
||||
[
|
||||
"rss",
|
||||
"vms",
|
||||
"num_page_faults",
|
||||
"peak_wset",
|
||||
"wset",
|
||||
"peak_paged_pool",
|
||||
"paged_pool",
|
||||
"peak_nonpaged_pool",
|
||||
"nonpaged_pool28144",
|
||||
"pagefile",
|
||||
"peak_pagefile",
|
||||
"private",
|
||||
],
|
||||
)
|
||||
zombie_mem_info = smem_info(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||
else:
|
||||
scputimes = namedtuple(
|
||||
"scputimes",
|
||||
["user", "system", "children_user", "children_system", "iowait"],
|
||||
)
|
||||
zombie_cpu_times = scputimes(0, 0, 0, 0, 0)
|
||||
|
||||
smem_info = namedtuple(
|
||||
"pmem", ["rss", "vms", "shared", "text", "lib", "data", "dirty"]
|
||||
)
|
||||
zombie_mem_info = smem_info(0, 0, 0, 0, 0, 0, 0)
|
||||
|
||||
with patch("salt.utils.psutil_compat.pids", return_value=[1]):
|
||||
with patch("salt.utils.psutil_compat.Process", return_value=top_proc):
|
||||
with patch.object(top_proc, "cpu_times") as mock_cpu_times:
|
||||
with patch.object(
|
||||
top_proc, "memory_info", return_value=zombie_mem_info, create=True
|
||||
):
|
||||
mock_cpu_times.side_effect = [
|
||||
psutil.ZombieProcess(top_proc),
|
||||
zombie_cpu_times,
|
||||
zombie_cpu_times,
|
||||
]
|
||||
ret = ps.top(num_processes=1, interval=0)
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
expected_mem = {
|
||||
"rss": 0,
|
||||
"vms": 0,
|
||||
"num_page_faults": 0,
|
||||
"peak_wset": 0,
|
||||
"wset": 0,
|
||||
"peak_paged_pool": 0,
|
||||
"paged_pool": 0,
|
||||
"peak_nonpaged_pool": 0,
|
||||
"nonpaged_pool28144": 0,
|
||||
"pagefile": 0,
|
||||
"peak_pagefile": 0,
|
||||
"private": 0,
|
||||
}
|
||||
|
||||
expected_cpu = {
|
||||
"user": 0,
|
||||
"system": 0,
|
||||
"children_user": 0,
|
||||
"children_system": 0,
|
||||
}
|
||||
|
||||
else:
|
||||
expected_mem = {
|
||||
"rss": 0,
|
||||
"vms": 0,
|
||||
"shared": 0,
|
||||
"text": 0,
|
||||
"lib": 0,
|
||||
"data": 0,
|
||||
"dirty": 0,
|
||||
}
|
||||
|
||||
expected_cpu = {
|
||||
"user": 0,
|
||||
"system": 0,
|
||||
"children_user": 0,
|
||||
"children_system": 0,
|
||||
"iowait": 0,
|
||||
}
|
||||
|
||||
assert ret[0]["mem"] == expected_mem
|
||||
assert ret[0]["cpu"] == expected_cpu
|
||||
|
||||
with patch("salt.utils.psutil_compat.pids", return_value=[1]):
|
||||
with patch("salt.utils.psutil_compat.Process", return_value=top_proc):
|
||||
with patch.object(top_proc, "cpu_times") as mock_cpu_times:
|
||||
mock_cpu_times.side_effect = [
|
||||
top_proc._cpu_times,
|
||||
psutil.NoSuchProcess(top_proc),
|
||||
]
|
||||
ret = ps.top(num_processes=1, interval=0)
|
||||
assert ret == []
|
||||
|
||||
with patch("salt.utils.psutil_compat.pids", return_value=[1]):
|
||||
with patch("salt.utils.psutil_compat.Process", return_value=top_proc):
|
||||
with patch.object(top_proc, "cpu_times") as mock_cpu_times:
|
||||
with patch.object(
|
||||
top_proc, "memory_info", create=True
|
||||
) as mock_memory_info:
|
||||
mock_memory_info.side_effect = psutil.NoSuchProcess(top_proc)
|
||||
mock_cpu_times.side_effect = [
|
||||
psutil.ZombieProcess(top_proc),
|
||||
zombie_cpu_times,
|
||||
zombie_cpu_times,
|
||||
]
|
||||
ret = ps.top(num_processes=1, interval=0)
|
||||
assert ret == []
|
||||
|
||||
|
||||
def test_top_zombie_process():
|
||||
# Get 3 pids that are currently running on the system
|
||||
|
@ -506,3 +901,388 @@ def test_status_when_access_denied_from_psutil_then_raise_exception():
|
|||
# @patch('salt.utils.psutil_compat.get_users', new=MagicMock(return_value=None)) # This will force the function to use utmp
|
||||
# def test_get_users_utmp():
|
||||
# pass
|
||||
|
||||
|
||||
def test_psaux():
|
||||
"""
|
||||
Testing psaux function in the ps module
|
||||
"""
|
||||
|
||||
cmd_run_mock = """
|
||||
USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND
|
||||
root 1 0.0 0.0 171584 15740 ? Ss Aug09 4:18 /usr/lib/systemd/systemd --system --deserialize=83
|
||||
root 2 0.0 0.0 0 0 ? S Aug09 0:02 [kthreadd]
|
||||
root 2710129 0.0 0.0 18000 7428 pts/4 S+ Aug21 0:33 sudo -E salt-master -l debug
|
||||
root 2710131 0.0 0.0 18000 1196 pts/6 Ss Aug21 0:00 sudo -E salt-master -l debug
|
||||
"""
|
||||
|
||||
with patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}):
|
||||
expected = [
|
||||
"salt-master",
|
||||
[
|
||||
"root 2710129 0.0 0.0 18000 7428 pts/4 S+ Aug21 0:33 sudo -E salt-master -l debug",
|
||||
"root 2710131 0.0 0.0 18000 1196 pts/6 Ss Aug21 0:00 sudo -E salt-master -l debug",
|
||||
],
|
||||
"2 occurrence(s).",
|
||||
]
|
||||
ret = ps.psaux("salt-master")
|
||||
assert ret == expected
|
||||
|
||||
expected = ["salt-minion", [], "0 occurrence(s)."]
|
||||
ret = ps.psaux("salt-minion")
|
||||
assert ret == expected
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="ss not available in Windows")
|
||||
def test_ss():
|
||||
"""
|
||||
Testing ss function in the ps module
|
||||
"""
|
||||
|
||||
cmd_run_mock = """
|
||||
tcp LISTEN 0 128 0.0.0.0:22 0.0.0.0:* ino:31907 sk:364b cgroup:/system.slice/sshd.service <->
|
||||
|
||||
tcp LISTEN 0 128 [::]:22 [::]:* ino:31916 sk:36c4 cgroup:/system.slice/sshd.service v6only:1 <->
|
||||
"""
|
||||
|
||||
with patch(
|
||||
"salt.utils.path.which", MagicMock(return_value="/usr/sbin/ss")
|
||||
), patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}):
|
||||
expected = [
|
||||
"sshd",
|
||||
[
|
||||
"tcp LISTEN 0 128 0.0.0.0:22 0.0.0.0:* ino:31907 sk:364b cgroup:/system.slice/sshd.service <->",
|
||||
"tcp LISTEN 0 128 [::]:22 [::]:* ino:31916 sk:36c4 cgroup:/system.slice/sshd.service v6only:1 <->",
|
||||
],
|
||||
]
|
||||
ret = ps.ss("sshd")
|
||||
assert ret == expected
|
||||
|
||||
expected = ["apache2", []]
|
||||
ret = ps.ss("apache2")
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test_netstat():
|
||||
"""
|
||||
Testing netstat function in the ps module
|
||||
"""
|
||||
|
||||
cmd_run_mock = """
|
||||
Active Internet connections (servers and established)
|
||||
Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name
|
||||
tcp 0 0 0.0.0.0:22 0.0.0.0:* LISTEN 668/sshd: /usr/sbin
|
||||
tcp6 0 0 :::22 :::* LISTEN 668/sshd: /usr/sbin
|
||||
"""
|
||||
|
||||
with patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/netstat")):
|
||||
with patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}):
|
||||
expected = [
|
||||
"sshd",
|
||||
[
|
||||
"tcp 0 0 0.0.0.0:22 0.0.0.0:* LISTEN 668/sshd: /usr/sbin",
|
||||
"tcp6 0 0 :::22 :::* LISTEN 668/sshd: /usr/sbin",
|
||||
],
|
||||
]
|
||||
ret = ps.netstat("sshd")
|
||||
assert ret == expected
|
||||
|
||||
expected = ["apache2", []]
|
||||
ret = ps.netstat("apache2")
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test_lsof():
|
||||
"""
|
||||
Testing lsof function in the ps module
|
||||
"""
|
||||
|
||||
sshd_cmd_run_mock = """
|
||||
COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME
|
||||
sshd 1743 root cwd DIR 254,0 4096 2 /
|
||||
sshd 1743 root rtd DIR 254,0 4096 2 /
|
||||
sshd 1743 root txt REG 254,0 925000 7533685 /usr/bin/sshd (deleted)
|
||||
sshd 1743 root DEL REG 254,0 7481413 /usr/lib/libc.so.6
|
||||
sshd 1743 root DEL REG 254,0 7477716 /usr/lib/libcrypto.so.3
|
||||
sshd 1743 root mem REG 254,0 26520 7482162 /usr/lib/libcap-ng.so.0.0.0
|
||||
sshd 1743 root DEL REG 254,0 7512187 /usr/lib/libresolv.so.2
|
||||
sshd 1743 root mem REG 254,0 22400 7481786 /usr/lib/libkeyutils.so.1.10
|
||||
sshd 1743 root mem REG 254,0 55352 7480841 /usr/lib/libkrb5support.so.0.1
|
||||
sshd 1743 root mem REG 254,0 18304 7475778 /usr/lib/libcom_err.so.2.1
|
||||
sshd 1743 root mem REG 254,0 182128 7477432 /usr/lib/libk5crypto.so.3.1
|
||||
sshd 1743 root DEL REG 254,0 7485543 /usr/lib/libaudit.so.1.0.0
|
||||
sshd 1743 root DEL REG 254,0 7485432 /usr/lib/libz.so.1.2.13
|
||||
sshd 1743 root mem REG 254,0 882552 7480814 /usr/lib/libkrb5.so.3.3
|
||||
sshd 1743 root mem REG 254,0 344160 7475833 /usr/lib/libgssapi_krb5.so.2.2
|
||||
sshd 1743 root mem REG 254,0 67536 7482132 /usr/lib/libpam.so.0.85.1
|
||||
sshd 1743 root mem REG 254,0 165832 7481746 /usr/lib/libcrypt.so.2.0.0
|
||||
sshd 1743 root DEL REG 254,0 7480993 /usr/lib/ld-linux-x86-64.so.2
|
||||
sshd 1743 root 0r CHR 1,3 0t0 4 /dev/null
|
||||
sshd 1743 root 1u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)
|
||||
sshd 1743 root 2u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)
|
||||
sshd 1743 root 3u IPv4 31907 0t0 TCP *:ssh (LISTEN)
|
||||
sshd 1743 root 4u IPv6 31916 0t0 TCP *:ssh (LISTEN)
|
||||
"""
|
||||
|
||||
apache2_cmd_run_mock = ""
|
||||
|
||||
with patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/netstat")):
|
||||
with patch.dict(
|
||||
ps.__salt__, {"cmd.run": MagicMock(return_value=sshd_cmd_run_mock)}
|
||||
):
|
||||
expected = [
|
||||
"sshd",
|
||||
"\nCOMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME\nsshd 1743 root cwd DIR 254,0 4096 2 /\nsshd 1743 root rtd DIR 254,0 4096 2 /\nsshd 1743 root txt REG 254,0 925000 7533685 /usr/bin/sshd (deleted)\nsshd 1743 root DEL REG 254,0 7481413 /usr/lib/libc.so.6\nsshd 1743 root DEL REG 254,0 7477716 /usr/lib/libcrypto.so.3\nsshd 1743 root mem REG 254,0 26520 7482162 /usr/lib/libcap-ng.so.0.0.0\nsshd 1743 root DEL REG 254,0 7512187 /usr/lib/libresolv.so.2\nsshd 1743 root mem REG 254,0 22400 7481786 /usr/lib/libkeyutils.so.1.10\nsshd 1743 root mem REG 254,0 55352 7480841 /usr/lib/libkrb5support.so.0.1\nsshd 1743 root mem REG 254,0 18304 7475778 /usr/lib/libcom_err.so.2.1\nsshd 1743 root mem REG 254,0 182128 7477432 /usr/lib/libk5crypto.so.3.1\nsshd 1743 root DEL REG 254,0 7485543 /usr/lib/libaudit.so.1.0.0\nsshd 1743 root DEL REG 254,0 7485432 /usr/lib/libz.so.1.2.13\nsshd 1743 root mem REG 254,0 882552 7480814 /usr/lib/libkrb5.so.3.3\nsshd 1743 root mem REG 254,0 344160 7475833 /usr/lib/libgssapi_krb5.so.2.2\nsshd 1743 root mem REG 254,0 67536 7482132 /usr/lib/libpam.so.0.85.1\nsshd 1743 root mem REG 254,0 165832 7481746 /usr/lib/libcrypt.so.2.0.0\nsshd 1743 root DEL REG 254,0 7480993 /usr/lib/ld-linux-x86-64.so.2\nsshd 1743 root 0r CHR 1,3 0t0 4 /dev/null\nsshd 1743 root 1u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)\nsshd 1743 root 2u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)\nsshd 1743 root 3u IPv4 31907 0t0 TCP *:ssh (LISTEN)\nsshd 1743 root 4u IPv6 31916 0t0 TCP *:ssh (LISTEN)\n",
|
||||
]
|
||||
ret = ps.lsof("sshd")
|
||||
assert ret == expected
|
||||
|
||||
with patch.dict(
|
||||
ps.__salt__, {"cmd.run": MagicMock(return_value=apache2_cmd_run_mock)}
|
||||
):
|
||||
expected = ["apache2", ""]
|
||||
ret = ps.lsof("apache2")
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test_boot_time():
|
||||
"""
|
||||
Testing boot_time function in the ps module
|
||||
"""
|
||||
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.boot_time", MagicMock(return_value=1691593290.0)
|
||||
):
|
||||
expected = 1691593290
|
||||
ret = ps.boot_time()
|
||||
assert ret == expected
|
||||
|
||||
expected = "08/09/2023"
|
||||
ret = ps.boot_time(time_format="%m/%d/%Y")
|
||||
assert ret == expected
|
||||
|
||||
with patch("salt.utils.psutil_compat.boot_time") as mock_boot_time:
|
||||
mock_boot_time.side_effect = [AttributeError(), 1691593290.0]
|
||||
expected = 1691593290
|
||||
ret = ps.boot_time()
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test_num_cpus():
|
||||
"""
|
||||
Testing num_cpus function in the ps module
|
||||
"""
|
||||
|
||||
with patch("salt.utils.psutil_compat.cpu_count") as mock_cpu_count:
|
||||
mock_cpu_count.side_effect = AttributeError()
|
||||
with patch("salt.utils.psutil_compat.NUM_CPUS", create=True, new=5):
|
||||
ret = ps.num_cpus()
|
||||
assert ret == 5
|
||||
|
||||
with patch("salt.utils.psutil_compat.cpu_count") as mock_cpu_count:
|
||||
mock_cpu_count.return_value = 5
|
||||
ret = ps.num_cpus()
|
||||
assert ret == 5
|
||||
|
||||
|
||||
def test_total_physical_memory(stub_memory_usage):
|
||||
"""
|
||||
Testing total_physical_memory function in the ps module
|
||||
"""
|
||||
|
||||
with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)):
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
ps.total_physical_memory()
|
||||
assert (
|
||||
exc.value.error
|
||||
== "virtual_memory is only available in psutil 0.6.0 or greater"
|
||||
)
|
||||
|
||||
with patch("salt.utils.psutil_compat.virtual_memory") as mock_total_physical_memory:
|
||||
mock_total_physical_memory.side_effect = AttributeError()
|
||||
with patch(
|
||||
"salt.utils.psutil_compat.TOTAL_PHYMEM",
|
||||
create=True,
|
||||
new=stub_memory_usage.total,
|
||||
):
|
||||
ret = ps.total_physical_memory()
|
||||
assert ret == 15722012672
|
||||
|
||||
with patch("salt.utils.psutil_compat.virtual_memory") as mock_total_physical_memory:
|
||||
mock_total_physical_memory.return_value = stub_memory_usage
|
||||
ret = ps.total_physical_memory()
|
||||
assert ret == 15722012672
|
||||
|
||||
|
||||
def test_proc_info():
|
||||
"""
|
||||
Testing proc_info function in the ps module
|
||||
"""
|
||||
status = b"fnord"
|
||||
extra_data = {
|
||||
"utime": "42",
|
||||
"stime": "42",
|
||||
"children_utime": "42",
|
||||
"children_stime": "42",
|
||||
"ttynr": "42",
|
||||
"cpu_time": "42",
|
||||
"blkio_ticks": "99",
|
||||
"ppid": "99",
|
||||
"cpu_num": "9999999",
|
||||
}
|
||||
important_data = {
|
||||
"name": b"blerp",
|
||||
"status": status,
|
||||
"create_time": "393829200",
|
||||
"username": "root",
|
||||
}
|
||||
important_data.update(extra_data)
|
||||
status_file = b"Name:\tblerp\nUmask:\t0000\nState:\tI (idle)\nTgid:\t99\nNgid:\t0\nPid:\t99\nPPid:\t2\nTracerPid:\t0\nUid:\t0\t0\t0\t0\nGid:\t0\t0\t0\t0\nFDSize:\t64\nGroups:\t \nNStgid:\t99\nNSpid:\t99\nNSpgid:\t0\nNSsid:\t0\nThreads:\t1\nSigQ:\t3/256078\nSigPnd:\t0000000000000000\nShdPnd:\t0000000000000000\nSigBlk:\t0000000000000000\nSigIgn:\tffffffffffffffff\nSigCgt:\t0000000000000000\nCapInh:\t0000000000000000\nCapPrm:\t000001ffffffffff\nCapEff:\t000001ffffffffff\nCapBnd:\t000001ffffffffff\nCapAmb:\t0000000000000000\nNoNewPrivs:\t0\nSeccomp:\t0\nSeccomp_filters:\t0\nSpeculation_Store_Bypass:\tthread vulnerable\nSpeculationIndirectBranch:\tconditional enabled\nCpus_allowed:\tfff\nCpus_allowed_list:\t0-11\nMems_allowed:\t00000001\nMems_allowed_list:\t0\nvoluntary_ctxt_switches:\t2\nnonvoluntary_ctxt_switches:\t0\n"
|
||||
|
||||
patch_stat_file = patch(
|
||||
"psutil._psplatform.Process._parse_stat_file",
|
||||
return_value=important_data,
|
||||
create=True,
|
||||
)
|
||||
patch_exe = patch(
|
||||
"psutil._psplatform.Process.exe",
|
||||
return_value=important_data["name"].decode(),
|
||||
create=True,
|
||||
)
|
||||
patch_oneshot = patch(
|
||||
"psutil._psplatform.Process.oneshot",
|
||||
return_value={
|
||||
# These keys can be found in psutil/_psbsd.py
|
||||
1: important_data["status"].decode(),
|
||||
# create
|
||||
9: float(important_data["create_time"]),
|
||||
# user
|
||||
14: float(important_data["create_time"]),
|
||||
# sys
|
||||
15: float(important_data["create_time"]),
|
||||
# ch_user
|
||||
16: float(important_data["create_time"]),
|
||||
# ch_sys -- we don't really care what they are, obviously
|
||||
17: float(important_data["create_time"]),
|
||||
24: important_data["name"].decode(),
|
||||
},
|
||||
create=True,
|
||||
)
|
||||
patch_kinfo = patch(
|
||||
"psutil._psplatform.Process._get_kinfo_proc",
|
||||
return_value={
|
||||
# These keys can be found in psutil/_psosx.py
|
||||
9: important_data["status"].decode(),
|
||||
8: float(important_data["create_time"]),
|
||||
10: important_data["name"].decode(),
|
||||
},
|
||||
create=True,
|
||||
)
|
||||
patch_status = patch(
|
||||
"psutil._psplatform.Process.status", return_value=status.decode()
|
||||
)
|
||||
patch_create_time = patch(
|
||||
"psutil._psplatform.Process.create_time", return_value=393829200
|
||||
)
|
||||
with patch_stat_file, patch_status, patch_create_time, patch_exe, patch_oneshot, patch_kinfo:
|
||||
if salt.utils.platform.is_windows():
|
||||
with patch("psutil._pswindows.cext") as mock__psutil_windows:
|
||||
with patch("psutil._pswindows.Process.ppid", return_value=99):
|
||||
mock__psutil_windows.proc_username.return_value = (
|
||||
"NT Authority",
|
||||
"System",
|
||||
)
|
||||
|
||||
expected = {"ppid": 99, "username": r"NT Authority\System"}
|
||||
actual_result = salt.modules.ps.proc_info(
|
||||
pid=99, attrs=["username", "ppid"]
|
||||
)
|
||||
assert actual_result == expected
|
||||
|
||||
expected = {"pid": 99, "name": "blerp"}
|
||||
actual_result = salt.modules.ps.proc_info(
|
||||
pid=99, attrs=["pid", "name"]
|
||||
)
|
||||
assert actual_result == expected
|
||||
else:
|
||||
patch_read_status_file = patch(
|
||||
"psutil._psplatform.Process._read_status_file", return_value=status_file
|
||||
)
|
||||
with patch_read_status_file:
|
||||
expected = {"ppid": 99, "username": "root"}
|
||||
actual_result = salt.modules.ps.proc_info(
|
||||
pid=99, attrs=["username", "ppid"]
|
||||
)
|
||||
assert actual_result == expected
|
||||
|
||||
expected = {"pid": 99, "name": "blerp"}
|
||||
actual_result = salt.modules.ps.proc_info(pid=99, attrs=["pid", "name"])
|
||||
assert actual_result == expected
|
||||
|
||||
|
||||
def test_proc_info_access_denied():
|
||||
"""
|
||||
Testing proc_info function in the ps module
|
||||
when an AccessDenied exception occurs
|
||||
"""
|
||||
cmdline = ["echo", "питон"]
|
||||
dummy_proc = DummyProcess(cmdline=cmdline)
|
||||
with patch("salt.utils.psutil_compat.Process") as mock_process:
|
||||
mock_process.side_effect = psutil.AccessDenied(dummy_proc)
|
||||
with pytest.raises(CommandExecutionError):
|
||||
salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"])
|
||||
|
||||
|
||||
def test_proc_info_no_such_process():
|
||||
"""
|
||||
Testing proc_info function in the ps module
|
||||
when an NoSuchProcess exception occurs
|
||||
"""
|
||||
cmdline = ["echo", "питон"]
|
||||
dummy_proc = DummyProcess(cmdline=cmdline)
|
||||
with patch("salt.utils.psutil_compat.Process") as mock_process:
|
||||
mock_process.side_effect = psutil.NoSuchProcess(dummy_proc)
|
||||
with pytest.raises(CommandExecutionError):
|
||||
salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"])
|
||||
|
||||
|
||||
def test_proc_info_attribute_error():
|
||||
"""
|
||||
Testing proc_info function in the ps module
|
||||
when an AttributeError exception occurs
|
||||
"""
|
||||
cmdline = ["echo", "питон"]
|
||||
with patch("salt.utils.psutil_compat.Process") as mock_process:
|
||||
mock_process.side_effect = AttributeError()
|
||||
with pytest.raises(CommandExecutionError):
|
||||
salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"])
|
||||
|
||||
|
||||
def test__virtual__no_psutil():
|
||||
"""
|
||||
Test __virtual__ function
|
||||
"""
|
||||
with patch.object(ps, "HAS_PSUTIL", False):
|
||||
expected = (
|
||||
False,
|
||||
"The ps module cannot be loaded: python module psutil not installed.",
|
||||
)
|
||||
result = ps.__virtual__()
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test__virtual__wrong_version():
|
||||
with patch("salt.modules.ps.psutil.version_info", (0, 2, 9)):
|
||||
expected = (
|
||||
False,
|
||||
"The ps execution module cannot be loaded: the psutil python module version {}"
|
||||
" is less than 0.3.0".format(psutil.version_info),
|
||||
)
|
||||
result = ps.__virtual__()
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test__virtual__correct_version():
|
||||
with patch("salt.modules.ps.psutil.version_info", (0, 3, 0)):
|
||||
result = ps.__virtual__()
|
||||
assert result
|
||||
|
|
483
tests/pytests/unit/modules/test_redismod.py
Normal file
483
tests/pytests/unit/modules/test_redismod.py
Normal file
|
@ -0,0 +1,483 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
Test cases for salt.modules.redismod
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.redismod as redismod
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
|
||||
class Mockredis:
|
||||
"""
|
||||
Mock redis class
|
||||
"""
|
||||
|
||||
class ConnectionError(Exception):
|
||||
"""
|
||||
Mock ConnectionError class
|
||||
"""
|
||||
|
||||
|
||||
class MockConnect:
|
||||
"""
|
||||
Mock Connect class
|
||||
"""
|
||||
|
||||
counter = 0
|
||||
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
self.pattern = None
|
||||
self.value = None
|
||||
self.key = None
|
||||
self.seconds = None
|
||||
self.timestamp = None
|
||||
self.field = None
|
||||
self.start = None
|
||||
self.stop = None
|
||||
self.master_host = None
|
||||
self.master_port = None
|
||||
|
||||
@staticmethod
|
||||
def bgrewriteaof():
|
||||
"""
|
||||
Mock bgrewriteaof method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def bgsave():
|
||||
"""
|
||||
Mock bgsave method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def config_get(self, pattern):
|
||||
"""
|
||||
Mock config_get method
|
||||
"""
|
||||
self.pattern = pattern
|
||||
return "A"
|
||||
|
||||
def config_set(self, name, value):
|
||||
"""
|
||||
Mock config_set method
|
||||
"""
|
||||
self.name = name
|
||||
self.value = value
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def dbsize():
|
||||
"""
|
||||
Mock dbsize method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def delete():
|
||||
"""
|
||||
Mock delete method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def exists(self, key):
|
||||
"""
|
||||
Mock exists method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
def expire(self, key, seconds):
|
||||
"""
|
||||
Mock expire method
|
||||
"""
|
||||
self.key = key
|
||||
self.seconds = seconds
|
||||
return "A"
|
||||
|
||||
def expireat(self, key, timestamp):
|
||||
"""
|
||||
Mock expireat method
|
||||
"""
|
||||
self.key = key
|
||||
self.timestamp = timestamp
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def flushall():
|
||||
"""
|
||||
Mock flushall method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def flushdb():
|
||||
"""
|
||||
Mock flushdb method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def get(self, key):
|
||||
"""
|
||||
Mock get method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
def hget(self, key, field):
|
||||
"""
|
||||
Mock hget method
|
||||
"""
|
||||
self.key = key
|
||||
self.field = field
|
||||
return "A"
|
||||
|
||||
def hgetall(self, key):
|
||||
"""
|
||||
Mock hgetall method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def info():
|
||||
"""
|
||||
Mock info method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def keys(self, pattern):
|
||||
"""
|
||||
Mock keys method
|
||||
"""
|
||||
self.pattern = pattern
|
||||
return "A"
|
||||
|
||||
def type(self, key):
|
||||
"""
|
||||
Mock type method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def lastsave():
|
||||
"""
|
||||
Mock lastsave method
|
||||
"""
|
||||
return datetime.now()
|
||||
|
||||
def llen(self, key):
|
||||
"""
|
||||
Mock llen method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
def lrange(self, key, start, stop):
|
||||
"""
|
||||
Mock lrange method
|
||||
"""
|
||||
self.key = key
|
||||
self.start = start
|
||||
self.stop = stop
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def ping():
|
||||
"""
|
||||
Mock ping method
|
||||
"""
|
||||
MockConnect.counter = MockConnect.counter + 1
|
||||
if MockConnect.counter == 1:
|
||||
return "A"
|
||||
elif MockConnect.counter in (2, 3, 5):
|
||||
raise Mockredis.ConnectionError("foo")
|
||||
|
||||
@staticmethod
|
||||
def save():
|
||||
"""
|
||||
Mock save method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def set(self, key, value):
|
||||
"""
|
||||
Mock set method
|
||||
"""
|
||||
self.key = key
|
||||
self.value = value
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def shutdown():
|
||||
"""
|
||||
Mock shutdown method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def slaveof(self, master_host, master_port):
|
||||
"""
|
||||
Mock slaveof method
|
||||
"""
|
||||
self.master_host = master_host
|
||||
self.master_port = master_port
|
||||
return "A"
|
||||
|
||||
def smembers(self, key):
|
||||
"""
|
||||
Mock smembers method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
@staticmethod
|
||||
def time():
|
||||
"""
|
||||
Mock time method
|
||||
"""
|
||||
return "A"
|
||||
|
||||
def zcard(self, key):
|
||||
"""
|
||||
Mock zcard method
|
||||
"""
|
||||
self.key = key
|
||||
return "A"
|
||||
|
||||
def zrange(self, key, start, stop):
|
||||
"""
|
||||
Mock zrange method
|
||||
"""
|
||||
self.key = key
|
||||
self.start = start
|
||||
self.stop = stop
|
||||
return "A"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
redismod: {
|
||||
"redis": Mockredis,
|
||||
"_connect": MagicMock(return_value=MockConnect()),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_bgrewriteaof():
|
||||
"""
|
||||
Test to asynchronously rewrite the append-only file
|
||||
"""
|
||||
assert redismod.bgrewriteaof() == "A"
|
||||
|
||||
|
||||
def test_bgsave():
|
||||
"""
|
||||
Test to asynchronously save the dataset to disk
|
||||
"""
|
||||
assert redismod.bgsave() == "A"
|
||||
|
||||
|
||||
def test_config_get():
|
||||
"""
|
||||
Test to get redis server configuration values
|
||||
"""
|
||||
assert redismod.config_get("*") == "A"
|
||||
|
||||
|
||||
def test_config_set():
|
||||
"""
|
||||
Test to set redis server configuration values
|
||||
"""
|
||||
assert redismod.config_set("name", "value") == "A"
|
||||
|
||||
|
||||
def test_dbsize():
|
||||
"""
|
||||
Test to return the number of keys in the selected database
|
||||
"""
|
||||
assert redismod.dbsize() == "A"
|
||||
|
||||
|
||||
def test_delete():
|
||||
"""
|
||||
Test to deletes the keys from redis, returns number of keys deleted
|
||||
"""
|
||||
assert redismod.delete() == "A"
|
||||
|
||||
|
||||
def test_exists():
|
||||
"""
|
||||
Test to return true if the key exists in redis
|
||||
"""
|
||||
assert redismod.exists("key") == "A"
|
||||
|
||||
|
||||
def test_expire():
|
||||
"""
|
||||
Test to set a keys time to live in seconds
|
||||
"""
|
||||
assert redismod.expire("key", "seconds") == "A"
|
||||
|
||||
|
||||
def test_expireat():
|
||||
"""
|
||||
Test to set a keys expire at given UNIX time
|
||||
"""
|
||||
assert redismod.expireat("key", "timestamp") == "A"
|
||||
|
||||
|
||||
def test_flushall():
|
||||
"""
|
||||
Test to remove all keys from all databases
|
||||
"""
|
||||
assert redismod.flushall() == "A"
|
||||
|
||||
|
||||
def test_flushdb():
|
||||
"""
|
||||
Test to remove all keys from the selected database
|
||||
"""
|
||||
assert redismod.flushdb() == "A"
|
||||
|
||||
|
||||
def test_get_key():
|
||||
"""
|
||||
Test to get redis key value
|
||||
"""
|
||||
assert redismod.get_key("key") == "A"
|
||||
|
||||
|
||||
def test_hget():
|
||||
"""
|
||||
Test to get specific field value from a redis hash, returns dict
|
||||
"""
|
||||
assert redismod.hget("key", "field") == "A"
|
||||
|
||||
|
||||
def test_hgetall():
|
||||
"""
|
||||
Test to get all fields and values from a redis hash, returns dict
|
||||
"""
|
||||
assert redismod.hgetall("key") == "A"
|
||||
|
||||
|
||||
def test_info():
|
||||
"""
|
||||
Test to get information and statistics about the server
|
||||
"""
|
||||
assert redismod.info() == "A"
|
||||
|
||||
|
||||
def test_keys():
|
||||
"""
|
||||
Test to get redis keys, supports glob style patterns
|
||||
"""
|
||||
assert redismod.keys("pattern") == "A"
|
||||
|
||||
|
||||
def test_key_type():
|
||||
"""
|
||||
Test to get redis key type
|
||||
"""
|
||||
assert redismod.key_type("key") == "A"
|
||||
|
||||
|
||||
def test_lastsave():
|
||||
"""
|
||||
Test to get the UNIX time in seconds of the last successful
|
||||
save to disk
|
||||
"""
|
||||
assert redismod.lastsave()
|
||||
|
||||
|
||||
def test_llen():
|
||||
"""
|
||||
Test to get the length of a list in Redis
|
||||
"""
|
||||
assert redismod.llen("key") == "A"
|
||||
|
||||
|
||||
def test_lrange():
|
||||
"""
|
||||
Test to get a range of values from a list in Redis
|
||||
"""
|
||||
assert redismod.lrange("key", "start", "stop") == "A"
|
||||
|
||||
|
||||
def test_ping():
|
||||
"""
|
||||
Test to ping the server, returns False on connection errors
|
||||
"""
|
||||
assert redismod.ping() == "A"
|
||||
|
||||
assert not redismod.ping()
|
||||
|
||||
|
||||
def test_save():
|
||||
"""
|
||||
Test to synchronously save the dataset to disk
|
||||
"""
|
||||
assert redismod.save() == "A"
|
||||
|
||||
|
||||
def test_set_key():
|
||||
"""
|
||||
Test to set redis key value
|
||||
"""
|
||||
assert redismod.set_key("key", "value") == "A"
|
||||
|
||||
|
||||
def test_shutdown():
|
||||
"""
|
||||
Test to synchronously save the dataset to disk and then
|
||||
shut down the server
|
||||
"""
|
||||
assert not redismod.shutdown()
|
||||
|
||||
assert redismod.shutdown()
|
||||
|
||||
assert not redismod.shutdown()
|
||||
|
||||
|
||||
def test_slaveof():
|
||||
"""
|
||||
Test to make the server a slave of another instance, or
|
||||
promote it as master
|
||||
"""
|
||||
assert redismod.slaveof("master_host", "master_port") == "A"
|
||||
|
||||
|
||||
def test_smembers():
|
||||
"""
|
||||
Test to get members in a Redis set
|
||||
"""
|
||||
assert redismod.smembers("key") == ["A"]
|
||||
|
||||
|
||||
def test_time():
|
||||
"""
|
||||
Test to return the current server UNIX time in seconds
|
||||
"""
|
||||
assert redismod.time() == "A"
|
||||
|
||||
|
||||
def test_zcard():
|
||||
"""
|
||||
Test to get the length of a sorted set in Redis
|
||||
"""
|
||||
assert redismod.zcard("key") == "A"
|
||||
|
||||
|
||||
def test_zrange():
|
||||
"""
|
||||
Test to get a range of values from a sorted set in Redis by index
|
||||
"""
|
||||
assert redismod.zrange("key", "start", "stop") == "A"
|
195
tests/pytests/unit/modules/test_serverdensity_device.py
Normal file
195
tests/pytests/unit/modules/test_serverdensity_device.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
TestCase for salt.modules.serverdensity_device
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.serverdensity_device as serverdensity_device
|
||||
import salt.utils.json
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
class MockRequests:
|
||||
"""
|
||||
Mock smtplib class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
content = """{"message": "Invalid token", "errors": [{"type": "invalid_token", "subject": "token"}]}"""
|
||||
status_code = None
|
||||
|
||||
def __init__(self):
|
||||
self.url = None
|
||||
self.data = None
|
||||
self.kwargs = None
|
||||
|
||||
def return_request(self, url, data=None, **kwargs):
|
||||
"""
|
||||
Mock request method.
|
||||
"""
|
||||
self.url = url
|
||||
self.data = data
|
||||
self.kwargs = kwargs
|
||||
requests = MockRequests()
|
||||
if self.flag == 1:
|
||||
requests.status_code = 401
|
||||
else:
|
||||
requests.status_code = 200
|
||||
return requests
|
||||
|
||||
def post(self, url, data=None, **kwargs):
|
||||
"""
|
||||
Mock post method.
|
||||
"""
|
||||
return self.return_request(url, data, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
"""
|
||||
Mock delete method.
|
||||
"""
|
||||
return self.return_request(url, **kwargs)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
"""
|
||||
Mock get method.
|
||||
"""
|
||||
return self.return_request(url, **kwargs)
|
||||
|
||||
def put(self, url, data=None, **kwargs):
|
||||
"""
|
||||
Mock put method.
|
||||
"""
|
||||
return self.return_request(url, data, **kwargs)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {serverdensity_device: {"requests": MockRequests()}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_json_loads():
|
||||
return MagicMock(side_effect=ValueError())
|
||||
|
||||
|
||||
def test_get_sd_auth():
|
||||
"""
|
||||
Tests if it returns requested Server Density
|
||||
authentication value from pillar.
|
||||
"""
|
||||
with patch.dict(serverdensity_device.__pillar__, {"serverdensity": False}):
|
||||
pytest.raises(CommandExecutionError, serverdensity_device.get_sd_auth, "1")
|
||||
|
||||
with patch.dict(serverdensity_device.__pillar__, {"serverdensity": {"1": "salt"}}):
|
||||
assert serverdensity_device.get_sd_auth("1") == "salt"
|
||||
|
||||
pytest.raises(CommandExecutionError, serverdensity_device.get_sd_auth, "2")
|
||||
|
||||
|
||||
def test_create(mock_json_loads):
|
||||
"""
|
||||
Tests if it create device in Server Density.
|
||||
"""
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}}
|
||||
):
|
||||
assert serverdensity_device.create("rich_lama", group="lama_band")
|
||||
|
||||
with patch.object(salt.utils.json, "loads", mock_json_loads):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
serverdensity_device.create,
|
||||
"rich_lama",
|
||||
group="lama_band",
|
||||
)
|
||||
|
||||
MockRequests.flag = 1
|
||||
assert serverdensity_device.create("rich_lama", group="lama_band") is None
|
||||
|
||||
|
||||
def test_delete(mock_json_loads):
|
||||
"""
|
||||
Tests if it delete a device from Server Density.
|
||||
"""
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}}
|
||||
):
|
||||
MockRequests.flag = 0
|
||||
assert serverdensity_device.delete("51f7eaf")
|
||||
|
||||
with patch.object(salt.utils.json, "loads", mock_json_loads):
|
||||
pytest.raises(CommandExecutionError, serverdensity_device.delete, "51f7eaf")
|
||||
|
||||
MockRequests.flag = 1
|
||||
assert serverdensity_device.delete("51f7eaf") is None
|
||||
|
||||
|
||||
def test_ls(mock_json_loads):
|
||||
"""
|
||||
Tests if it list devices in Server Density.
|
||||
"""
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}}
|
||||
):
|
||||
MockRequests.flag = 0
|
||||
assert serverdensity_device.ls(name="lama")
|
||||
|
||||
with patch.object(salt.utils.json, "loads", mock_json_loads):
|
||||
pytest.raises(CommandExecutionError, serverdensity_device.ls, name="lama")
|
||||
|
||||
MockRequests.flag = 1
|
||||
assert serverdensity_device.ls(name="lama") is None
|
||||
|
||||
|
||||
def test_update(mock_json_loads):
|
||||
"""
|
||||
Tests if it updates device information in Server Density.
|
||||
"""
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}}
|
||||
):
|
||||
MockRequests.flag = 0
|
||||
assert serverdensity_device.update("51f7eaf", name="lama")
|
||||
|
||||
with patch.object(salt.utils.json, "loads", mock_json_loads):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
serverdensity_device.update,
|
||||
"51f7eaf",
|
||||
name="lama",
|
||||
)
|
||||
|
||||
MockRequests.flag = 1
|
||||
assert serverdensity_device.update("51f7eaf", name="lama") is None
|
||||
|
||||
|
||||
def test_install_agent():
|
||||
"""
|
||||
Tests if it downloads Server Density installation agent,
|
||||
and installs sd-agent with agent_key.
|
||||
"""
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"account_url": "salt"}}
|
||||
):
|
||||
with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}):
|
||||
with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}):
|
||||
assert serverdensity_device.install_agent("51f7e")
|
||||
|
||||
|
||||
def test_install_agent_v2():
|
||||
"""
|
||||
Tests if it downloads Server Density installation agent,
|
||||
and installs sd-agent with agent_key.
|
||||
"""
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(
|
||||
serverdensity_device.__pillar__, {"serverdensity": {"account_name": "salt"}}
|
||||
):
|
||||
with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}):
|
||||
with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}):
|
||||
assert serverdensity_device.install_agent("51f7e", agent_version=2)
|
66
tests/pytests/unit/modules/test_servicenow.py
Normal file
66
tests/pytests/unit/modules/test_servicenow.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
"""
|
||||
:codeauthor: Anthony Shaw <anthonyshaw@apache.org>
|
||||
|
||||
TestCase for salt.modules.servicenow
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.servicenow as servicenow
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
|
||||
class MockServiceNowClient:
|
||||
def __init__(self, instance_name, username, password):
|
||||
pass
|
||||
|
||||
def get(self, query):
|
||||
return [{"query_size": len(query), "query_value": query}]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
module_globals = {
|
||||
"Client": MockServiceNowClient,
|
||||
"__salt__": {
|
||||
"config.option": MagicMock(
|
||||
return_value={
|
||||
"instance_name": "test",
|
||||
"username": "mr_test",
|
||||
"password": "test123",
|
||||
}
|
||||
)
|
||||
},
|
||||
}
|
||||
if servicenow.HAS_LIBS is False:
|
||||
module_globals["sys.modules"] = {"servicenow_rest": MagicMock()}
|
||||
module_globals["sys.modules"][
|
||||
"servicenow_rest"
|
||||
].api.Client = MockServiceNowClient
|
||||
return {servicenow: module_globals}
|
||||
|
||||
|
||||
def test_module_creation():
|
||||
client = servicenow._get_client()
|
||||
assert client is not None
|
||||
|
||||
|
||||
def test_non_structured_query():
|
||||
result = servicenow.non_structured_query("tests", "role=web")
|
||||
assert result is not None
|
||||
assert result[0]["query_size"] == 8
|
||||
assert result[0]["query_value"] == "role=web"
|
||||
|
||||
|
||||
def test_non_structured_query_kwarg():
|
||||
result = servicenow.non_structured_query("tests", role="web")
|
||||
assert result is not None
|
||||
assert result[0]["query_size"] == 8
|
||||
assert result[0]["query_value"] == "role=web"
|
||||
|
||||
|
||||
def test_non_structured_query_kwarg_multi():
|
||||
result = servicenow.non_structured_query("tests", role="web", type="computer")
|
||||
assert result is not None
|
||||
assert result[0]["query_size"] == 22
|
328
tests/pytests/unit/modules/test_smtp.py
Normal file
328
tests/pytests/unit/modules/test_smtp.py
Normal file
|
@ -0,0 +1,328 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
TestCase for salt.modules.smtp
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.smtp as smtp
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
class SMTPRecipientsRefused(Exception):
|
||||
"""
|
||||
Mock SMTPRecipientsRefused class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class SMTPHeloError(Exception):
|
||||
"""
|
||||
Mock SMTPHeloError class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class SMTPSenderRefused(Exception):
|
||||
"""
|
||||
Mock SMTPSenderRefused class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class SMTPDataError(Exception):
|
||||
"""
|
||||
Mock SMTPDataError class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class SMTPException(Exception):
|
||||
"""
|
||||
Mock SMTPException class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class SMTPAuthenticationError(Exception):
|
||||
"""
|
||||
Mock SMTPAuthenticationError class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class MockSMTPSSL:
|
||||
"""
|
||||
Mock SMTP_SSL class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
|
||||
def __init__(self, server):
|
||||
pass
|
||||
|
||||
def sendmail(self, sender, recipient, msg):
|
||||
"""
|
||||
Mock sendmail method
|
||||
"""
|
||||
if self.flag == 1:
|
||||
raise SMTPRecipientsRefused("All recipients were refused.")
|
||||
elif self.flag == 2:
|
||||
raise SMTPHeloError("Helo error")
|
||||
elif self.flag == 3:
|
||||
raise SMTPSenderRefused("Sender Refused")
|
||||
elif self.flag == 4:
|
||||
raise SMTPDataError("Data error")
|
||||
return (sender, recipient, msg)
|
||||
|
||||
def login(self, username, password):
|
||||
"""
|
||||
Mock login method
|
||||
"""
|
||||
if self.flag == 5:
|
||||
raise SMTPAuthenticationError("SMTP Authentication Failure")
|
||||
return (username, password)
|
||||
|
||||
@staticmethod
|
||||
def quit():
|
||||
"""
|
||||
Mock quit method
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
class MockSMTP:
|
||||
"""
|
||||
Mock SMTP class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
|
||||
def __init__(self, server):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def ehlo():
|
||||
"""
|
||||
Mock ehlo method
|
||||
"""
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def has_extn(name):
|
||||
"""
|
||||
Mock has_extn method
|
||||
"""
|
||||
return name
|
||||
|
||||
def starttls(self):
|
||||
"""
|
||||
Mock starttls method
|
||||
"""
|
||||
if self.flag == 1:
|
||||
raise SMTPHeloError("Helo error")
|
||||
elif self.flag == 2:
|
||||
raise SMTPException("Exception error")
|
||||
elif self.flag == 3:
|
||||
raise RuntimeError
|
||||
return True
|
||||
|
||||
def sendmail(self, sender, recipient, msg):
|
||||
"""
|
||||
Mock sendmail method
|
||||
"""
|
||||
if self.flag == 1:
|
||||
raise SMTPRecipientsRefused("All recipients were refused.")
|
||||
elif self.flag == 2:
|
||||
raise SMTPHeloError("Helo error")
|
||||
elif self.flag == 3:
|
||||
raise SMTPSenderRefused("Sender Refused")
|
||||
elif self.flag == 4:
|
||||
raise SMTPDataError("Data error")
|
||||
return (sender, recipient, msg)
|
||||
|
||||
@staticmethod
|
||||
def quit():
|
||||
"""
|
||||
Mock quit method
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
class MockGaierror(Exception):
|
||||
"""
|
||||
Mock MockGaierror class
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
self.smtp_error = msg
|
||||
|
||||
|
||||
class MockSocket:
|
||||
"""
|
||||
Mock Socket class
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.gaierror = MockGaierror
|
||||
|
||||
|
||||
class MockSmtplib:
|
||||
"""
|
||||
Mock smtplib class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
|
||||
def __init__(self):
|
||||
self.SMTPRecipientsRefused = SMTPRecipientsRefused
|
||||
self.SMTPHeloError = SMTPHeloError
|
||||
self.SMTPSenderRefused = SMTPSenderRefused
|
||||
self.SMTPDataError = SMTPDataError
|
||||
self.SMTPException = SMTPException
|
||||
self.SMTPAuthenticationError = SMTPAuthenticationError
|
||||
self.server = None
|
||||
|
||||
def SMTP_SSL(self, server):
|
||||
"""
|
||||
Mock SMTP_SSL method
|
||||
"""
|
||||
self.server = server
|
||||
if self.flag == 1:
|
||||
raise MockGaierror("gaierror")
|
||||
return MockSMTPSSL("server")
|
||||
|
||||
def SMTP(self, server):
|
||||
"""
|
||||
Mock SMTP method
|
||||
"""
|
||||
self.server = server
|
||||
if self.flag == 1:
|
||||
raise MockGaierror("gaierror")
|
||||
return MockSMTP("server")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {smtp: {"socket": MockSocket(), "smtplib": MockSmtplib()}}
|
||||
|
||||
|
||||
# 'send_msg' function tests: 1
|
||||
|
||||
|
||||
def test_send_msg():
|
||||
"""
|
||||
Tests if it send a message to an SMTP recipient.
|
||||
"""
|
||||
mock = MagicMock(
|
||||
return_value={
|
||||
"smtp.server": "",
|
||||
"smtp.tls": "True",
|
||||
"smtp.sender": "",
|
||||
"smtp.username": "",
|
||||
"smtp.password": "",
|
||||
}
|
||||
)
|
||||
with patch.dict(smtp.__salt__, {"config.option": mock}):
|
||||
assert smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTPSSL.flag = 1
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTPSSL.flag = 2
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTPSSL.flag = 3
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTPSSL.flag = 4
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={
|
||||
"smtp.server": "",
|
||||
"smtp.tls": "",
|
||||
"smtp.sender": "",
|
||||
"smtp.username": "",
|
||||
"smtp.password": "",
|
||||
}
|
||||
)
|
||||
with patch.dict(smtp.__salt__, {"config.option": mock}):
|
||||
MockSMTPSSL.flag = 5
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
username="myuser",
|
||||
password="verybadpass",
|
||||
sender="admin@example.com",
|
||||
server="smtp.domain.com",
|
||||
)
|
||||
|
||||
MockSMTP.flag = 1
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTP.flag = 2
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSMTP.flag = 3
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
||||
|
||||
MockSmtplib.flag = 1
|
||||
assert not smtp.send_msg(
|
||||
"admin@example.com",
|
||||
"This is a salt module test",
|
||||
profile="my-smtp-account",
|
||||
)
|
357
tests/pytests/unit/modules/test_syslog_ng.py
Normal file
357
tests/pytests/unit/modules/test_syslog_ng.py
Normal file
|
@ -0,0 +1,357 @@
|
|||
"""
|
||||
Test cases for salt.modules.syslog_ng
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.syslog_ng as syslog_ng
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _version():
|
||||
return "3.6.0alpha0"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _modules():
|
||||
return (
|
||||
"syslogformat,json-plugin,basicfuncs,afstomp,afsocket,cryptofuncs,"
|
||||
"afmongodb,dbparser,system-source,affile,pseudofile,afamqp,"
|
||||
"afsocket-notls,csvparser,linux-kmsg-format,afuser,confgen,afprog"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def version_output(_version, _modules):
|
||||
return """syslog-ng {0}
|
||||
Installer-Version: {0}
|
||||
Revision:
|
||||
Compile-Date: Apr 4 2014 20:26:18
|
||||
Error opening plugin module; module='afsocket-tls', error='/home/tibi/install/syslog-ng/lib/syslog-ng/libafsocket-tls.so: undefined symbol: tls_context_setup_session'
|
||||
Available-Modules: {1}
|
||||
Enable-Debug: on
|
||||
Enable-GProf: off
|
||||
Enable-Memtrace: off
|
||||
Enable-IPv6: on
|
||||
Enable-Spoof-Source: off
|
||||
Enable-TCP-Wrapper: off
|
||||
Enable-Linux-Caps: off""".format(
|
||||
_version, _modules
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stats_output():
|
||||
return """SourceName;SourceId;SourceInstance;State;Type;Number
|
||||
center;;received;a;processed;0
|
||||
destination;#anon-destination0;;a;processed;0
|
||||
destination;#anon-destination1;;a;processed;0
|
||||
source;s_gsoc2014;;a;processed;0
|
||||
center;;queued;a;processed;0
|
||||
global;payload_reallocs;;a;processed;0
|
||||
global;sdata_updates;;a;processed;0
|
||||
global;msg_clones;;a;processed;0"""
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def orig_env():
|
||||
return {"PATH": "/foo:/bar"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bin_dir():
|
||||
return "/baz"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mocked_env():
|
||||
return {"PATH": "/foo:/bar:/baz"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {syslog_ng: {}}
|
||||
|
||||
|
||||
def test_statement_without_options():
|
||||
s = syslog_ng.Statement("source", "s_local", options=[])
|
||||
b = s.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
source s_local {
|
||||
};
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_non_empty_statement():
|
||||
o1 = syslog_ng.Option("file")
|
||||
o2 = syslog_ng.Option("tcp")
|
||||
s = syslog_ng.Statement("source", "s_local", options=[o1, o2])
|
||||
b = s.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
source s_local {
|
||||
file(
|
||||
);
|
||||
tcp(
|
||||
);
|
||||
};
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_option_with_parameters():
|
||||
o1 = syslog_ng.Option("file")
|
||||
p1 = syslog_ng.SimpleParameter('"/var/log/messages"')
|
||||
p2 = syslog_ng.SimpleParameter()
|
||||
p3 = syslog_ng.TypedParameter()
|
||||
p3.type = "tls"
|
||||
p2.value = '"/var/log/syslog"'
|
||||
o1.add_parameter(p1)
|
||||
o1.add_parameter(p2)
|
||||
o1.add_parameter(p3)
|
||||
b = o1.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
file(
|
||||
"/var/log/messages",
|
||||
"/var/log/syslog",
|
||||
tls(
|
||||
)
|
||||
);
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_parameter_with_values():
|
||||
p = syslog_ng.TypedParameter()
|
||||
p.type = "tls"
|
||||
v1 = syslog_ng.TypedParameterValue()
|
||||
v1.type = "key_file"
|
||||
|
||||
v2 = syslog_ng.TypedParameterValue()
|
||||
v2.type = "cert_file"
|
||||
|
||||
p.add_value(v1)
|
||||
p.add_value(v2)
|
||||
|
||||
b = p.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
tls(
|
||||
key_file(
|
||||
),
|
||||
cert_file(
|
||||
)
|
||||
)"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_value_with_arguments():
|
||||
t = syslog_ng.TypedParameterValue()
|
||||
t.type = "key_file"
|
||||
|
||||
a1 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"')
|
||||
a2 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"')
|
||||
|
||||
t.add_argument(a1)
|
||||
t.add_argument(a2)
|
||||
|
||||
b = t.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
key_file(
|
||||
"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"
|
||||
"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"
|
||||
)"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_end_to_end_statement_generation():
|
||||
s = syslog_ng.Statement("source", "s_tls")
|
||||
|
||||
o = syslog_ng.Option("tcp")
|
||||
|
||||
ip = syslog_ng.TypedParameter("ip")
|
||||
ip.add_value(syslog_ng.SimpleParameterValue("'192.168.42.2'"))
|
||||
o.add_parameter(ip)
|
||||
|
||||
port = syslog_ng.TypedParameter("port")
|
||||
port.add_value(syslog_ng.SimpleParameterValue(514))
|
||||
o.add_parameter(port)
|
||||
|
||||
tls = syslog_ng.TypedParameter("tls")
|
||||
key_file = syslog_ng.TypedParameterValue("key_file")
|
||||
key_file.add_argument(
|
||||
syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"')
|
||||
)
|
||||
cert_file = syslog_ng.TypedParameterValue("cert_file")
|
||||
cert_file.add_argument(
|
||||
syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert"')
|
||||
)
|
||||
peer_verify = syslog_ng.TypedParameterValue("peer_verify")
|
||||
peer_verify.add_argument(syslog_ng.Argument("optional-untrusted"))
|
||||
tls.add_value(key_file)
|
||||
tls.add_value(cert_file)
|
||||
tls.add_value(peer_verify)
|
||||
o.add_parameter(tls)
|
||||
|
||||
s.add_child(o)
|
||||
b = s.build()
|
||||
assert b == (
|
||||
dedent(
|
||||
"""\
|
||||
source s_tls {
|
||||
tcp(
|
||||
ip(
|
||||
'192.168.42.2'
|
||||
),
|
||||
port(
|
||||
514
|
||||
),
|
||||
tls(
|
||||
key_file(
|
||||
"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"
|
||||
),
|
||||
cert_file(
|
||||
"/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert"
|
||||
),
|
||||
peer_verify(
|
||||
optional-untrusted
|
||||
)
|
||||
)
|
||||
);
|
||||
};
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Module not available on Windows")
|
||||
def test_version(_version, version_output, orig_env, bin_dir, mocked_env):
|
||||
cmd_ret = {"retcode": 0, "stdout": version_output}
|
||||
expected_output = {"retcode": 0, "stdout": _version}
|
||||
cmd_args = ["syslog-ng", "-V"]
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.version()
|
||||
assert result == expected_output
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False)
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.version(syslog_ng_sbin_dir=bin_dir)
|
||||
assert result == expected_output
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Module not available on Windows")
|
||||
def test_stats(stats_output, orig_env, bin_dir, mocked_env):
|
||||
cmd_ret = {"retcode": 0, "stdout": stats_output}
|
||||
cmd_args = ["syslog-ng-ctl", "stats"]
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.stats()
|
||||
assert result == cmd_ret
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False)
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.stats(syslog_ng_sbin_dir=bin_dir)
|
||||
assert result == cmd_ret
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Module not available on Windows")
|
||||
def test_modules(_modules, version_output, orig_env, bin_dir, mocked_env):
|
||||
cmd_ret = {"retcode": 0, "stdout": version_output}
|
||||
expected_output = {"retcode": 0, "stdout": _modules}
|
||||
cmd_args = ["syslog-ng", "-V"]
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.modules()
|
||||
assert result == expected_output
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False)
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.modules(syslog_ng_sbin_dir=bin_dir)
|
||||
assert result == expected_output
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Module not available on Windows")
|
||||
def test_config_test(orig_env, bin_dir, mocked_env):
|
||||
cmd_ret = {"retcode": 0, "stderr": "", "stdout": "Foo"}
|
||||
cmd_args = ["syslog-ng", "--syntax-only"]
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.config_test()
|
||||
assert result == cmd_ret
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False)
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
result = syslog_ng.config_test(syslog_ng_sbin_dir=bin_dir)
|
||||
assert result == cmd_ret
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Module not available on Windows")
|
||||
def test_config_test_cfgfile(orig_env, bin_dir, mocked_env):
|
||||
cfgfile = "/path/to/syslog-ng.conf"
|
||||
cmd_ret = {"retcode": 1, "stderr": "Syntax error...", "stdout": ""}
|
||||
cmd_args = ["syslog-ng", "--syntax-only", "--cfgfile={}".format(cfgfile)]
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
assert syslog_ng.config_test(cfgfile=cfgfile) == cmd_ret
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False)
|
||||
|
||||
cmd_mock = MagicMock(return_value=cmd_ret)
|
||||
with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict(
|
||||
os.environ, orig_env
|
||||
):
|
||||
assert (
|
||||
syslog_ng.config_test(syslog_ng_sbin_dir=bin_dir, cfgfile=cfgfile)
|
||||
== cmd_ret
|
||||
)
|
||||
cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False)
|
27
tests/pytests/unit/modules/test_uwsgi.py
Normal file
27
tests/pytests/unit/modules/test_uwsgi.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
"""
|
||||
Test cases for salt.modules.uswgi
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.uwsgi as uwsgi
|
||||
from tests.support.mock import MagicMock, Mock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
with patch("salt.utils.path.which", Mock(return_value="/usr/bin/uwsgi")):
|
||||
return {uwsgi: {}}
|
||||
|
||||
|
||||
def test_uwsgi_stats():
|
||||
socket = "127.0.0.1:5050"
|
||||
mock = MagicMock(return_value='{"a": 1, "b": 2}')
|
||||
with patch.dict(uwsgi.__salt__, {"cmd.run": mock}):
|
||||
result = uwsgi.stats(socket)
|
||||
mock.assert_called_once_with(
|
||||
["uwsgi", "--connect-and-read", "{}".format(socket)],
|
||||
python_shell=False,
|
||||
)
|
||||
assert result == {"a": 1, "b": 2}
|
167
tests/pytests/unit/modules/test_vagrant.py
Normal file
167
tests/pytests/unit/modules/test_vagrant.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
"""
|
||||
TestCase for the salt.modules.vagrant module.
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.exceptions
|
||||
import salt.modules.vagrant as vagrant
|
||||
import salt.utils.platform
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def local_opts(tmp_path):
|
||||
return {
|
||||
"extension_modules": "",
|
||||
"vagrant_sdb_data": {
|
||||
"driver": "sqlite3",
|
||||
"database": str(tmp_path / "test_vagrant.sqlite"),
|
||||
"table": "sdb",
|
||||
"create_table": True,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(local_opts):
|
||||
return {vagrant: {"__opts__": local_opts}}
|
||||
|
||||
|
||||
def test_vagrant_get_vm_info_not_found():
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}):
|
||||
with pytest.raises(salt.exceptions.SaltInvocationError):
|
||||
vagrant.get_vm_info("thisNameDoesNotExist")
|
||||
|
||||
|
||||
def test_vagrant_init_positional(local_opts, tmp_path):
|
||||
path_nowhere = str(tmp_path / "tmp" / "nowhere")
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}):
|
||||
resp = vagrant.init(
|
||||
"test1",
|
||||
path_nowhere,
|
||||
"onetest",
|
||||
"nobody",
|
||||
False,
|
||||
"french",
|
||||
{"different": "very"},
|
||||
)
|
||||
assert resp.startswith("Name test1 defined")
|
||||
expected = dict(
|
||||
name="test1",
|
||||
cwd=path_nowhere,
|
||||
machine="onetest",
|
||||
runas="nobody",
|
||||
vagrant_provider="french",
|
||||
different="very",
|
||||
)
|
||||
mock_sdb.assert_called_with(
|
||||
"sdb://vagrant_sdb_data/onetest?{}".format(path_nowhere),
|
||||
"test1",
|
||||
local_opts,
|
||||
)
|
||||
mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test1", expected, local_opts)
|
||||
|
||||
|
||||
def test_vagrant_get_vm_info():
|
||||
testdict = {"testone": "one", "machine": "two"}
|
||||
mock_sdb = MagicMock(return_value=testdict)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}):
|
||||
resp = vagrant.get_vm_info("test1")
|
||||
assert resp == testdict
|
||||
|
||||
|
||||
def test_vagrant_init_dict(local_opts):
|
||||
testdict = dict(
|
||||
cwd="/tmp/anywhere",
|
||||
machine="twotest",
|
||||
runas="somebody",
|
||||
vagrant_provider="english",
|
||||
)
|
||||
expected = testdict.copy()
|
||||
expected["name"] = "test2"
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}):
|
||||
vagrant.init("test2", vm=testdict)
|
||||
mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test2", expected, local_opts)
|
||||
|
||||
|
||||
def test_vagrant_init_arg_override(local_opts):
|
||||
testdict = dict(
|
||||
cwd="/tmp/there",
|
||||
machine="treetest",
|
||||
runas="anybody",
|
||||
vagrant_provider="spansh",
|
||||
)
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}):
|
||||
vagrant.init(
|
||||
"test3",
|
||||
cwd="/tmp",
|
||||
machine="threetest",
|
||||
runas="him",
|
||||
vagrant_provider="polish",
|
||||
vm=testdict,
|
||||
)
|
||||
expected = dict(
|
||||
name="test3",
|
||||
cwd="/tmp",
|
||||
machine="threetest",
|
||||
runas="him",
|
||||
vagrant_provider="polish",
|
||||
)
|
||||
mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test3", expected, local_opts)
|
||||
|
||||
|
||||
def test_vagrant_get_ssh_config_fails():
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}):
|
||||
mock_sdb = MagicMock(return_value={})
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}):
|
||||
vagrant.init("test3", cwd="/tmp")
|
||||
with pytest.raises(salt.exceptions.SaltInvocationError):
|
||||
vagrant.get_ssh_config("test3") # has not been started
|
||||
|
||||
|
||||
def test_vagrant_destroy(local_opts, tmp_path):
|
||||
path_mydir = str(tmp_path / "my" / "dir")
|
||||
mock_cmd = MagicMock(return_value={"retcode": 0})
|
||||
with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}):
|
||||
mock_sdb = MagicMock(return_value=None)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_delete": mock_sdb}):
|
||||
mock_sdb_get = MagicMock(
|
||||
return_value={"machine": "macfour", "cwd": path_mydir}
|
||||
)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}):
|
||||
assert vagrant.destroy("test4")
|
||||
mock_sdb.assert_any_call(
|
||||
"sdb://vagrant_sdb_data/macfour?{}".format(path_mydir),
|
||||
local_opts,
|
||||
)
|
||||
mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test4", local_opts)
|
||||
cmd = "vagrant destroy -f macfour"
|
||||
mock_cmd.assert_called_with(
|
||||
cmd, runas=None, cwd=path_mydir, output_loglevel="info"
|
||||
)
|
||||
|
||||
|
||||
def test_vagrant_start():
|
||||
mock_cmd = MagicMock(return_value={"retcode": 0})
|
||||
with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}):
|
||||
mock_sdb_get = MagicMock(
|
||||
return_value={
|
||||
"machine": "five",
|
||||
"cwd": "/the/dir",
|
||||
"runas": "me",
|
||||
"vagrant_provider": "him",
|
||||
}
|
||||
)
|
||||
with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}):
|
||||
assert vagrant.start("test5")
|
||||
cmd = "vagrant up five --provider=him"
|
||||
mock_cmd.assert_called_with(
|
||||
cmd, runas="me", cwd="/the/dir", output_loglevel="info"
|
||||
)
|
113
tests/pytests/unit/modules/test_xfs.py
Normal file
113
tests/pytests/unit/modules/test_xfs.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
"""
|
||||
Test cases for salt.modules.xfs
|
||||
"""
|
||||
|
||||
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.xfs as xfs
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skip_on_windows(reason="xfs not available on windows"),
|
||||
pytest.mark.skip_on_darwin(reason="xfs not available on darwin."),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {xfs: {}}
|
||||
|
||||
|
||||
def test_blkid_output():
|
||||
"""
|
||||
Test xfs._blkid_output when there is data
|
||||
"""
|
||||
blkid_export = textwrap.dedent(
|
||||
"""
|
||||
DEVNAME=/dev/sda1
|
||||
UUID=XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
||||
TYPE=xfs
|
||||
PARTUUID=YYYYYYYY-YY
|
||||
|
||||
DEVNAME=/dev/sdb1
|
||||
PARTUUID=ZZZZZZZZ-ZZZZ-ZZZZ-ZZZZ-ZZZZZZZZZZZZ
|
||||
"""
|
||||
)
|
||||
# We expect to find only data from /dev/sda1, nothig from
|
||||
# /dev/sdb1
|
||||
assert xfs._blkid_output(blkid_export) == {
|
||||
"/dev/sda1": {
|
||||
"label": None,
|
||||
"partuuid": "YYYYYYYY-YY",
|
||||
"uuid": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_parse_xfs_info():
|
||||
"""
|
||||
Test parsing output from mkfs.xfs.
|
||||
"""
|
||||
data = textwrap.dedent(
|
||||
"""
|
||||
meta-data=/dev/vg00/testvol isize=512 agcount=4, agsize=1310720 blks
|
||||
= sectsz=4096 attr=2, projid32bit=1
|
||||
= crc=1 finobt=1, sparse=1, rmapbt=0
|
||||
= reflink=1
|
||||
data = bsize=4096 blocks=5242880, imaxpct=25
|
||||
= sunit=0 swidth=0 blks
|
||||
naming =version 2 bsize=4096 ascii-ci=0, ftype=1
|
||||
log =internal log bsize=4096 blocks=2560, version=2
|
||||
= sectsz=4096 sunit=1 blks, lazy-count=1
|
||||
realtime =none extsz=4096 blocks=0, rtextents=0
|
||||
Discarding blocks...Done.
|
||||
"""
|
||||
)
|
||||
|
||||
assert xfs._parse_xfs_info(data) == {
|
||||
"meta-data": {
|
||||
"section": "/dev/vg00/testvol",
|
||||
"isize": "512",
|
||||
"agcount": "4",
|
||||
"agsize": "1310720 blks",
|
||||
"sectsz": "4096",
|
||||
"attr": "2",
|
||||
"projid32bit": "1",
|
||||
"crc": "1",
|
||||
"finobt": "1",
|
||||
"sparse": "1",
|
||||
"rmapbt": "0",
|
||||
"reflink": "1",
|
||||
},
|
||||
"data": {
|
||||
"section": "data",
|
||||
"bsize": "4096",
|
||||
"blocks": "5242880",
|
||||
"imaxpct": "25",
|
||||
"sunit": "0",
|
||||
"swidth": "0 blks",
|
||||
},
|
||||
"naming": {
|
||||
"section": "version 2",
|
||||
"bsize": "4096",
|
||||
"ascii-ci": "0",
|
||||
"ftype": "1",
|
||||
},
|
||||
"log": {
|
||||
"section": "internal log",
|
||||
"bsize": "4096",
|
||||
"blocks": "2560",
|
||||
"version": "2",
|
||||
"sectsz": "4096",
|
||||
"sunit": "1 blks",
|
||||
"lazy-count": "1",
|
||||
},
|
||||
"realtime": {
|
||||
"section": "none",
|
||||
"extsz": "4096",
|
||||
"blocks": "0",
|
||||
"rtextents": "0",
|
||||
},
|
||||
}
|
|
@ -10,6 +10,7 @@ import salt.serializers.plist as plistserializer
|
|||
import salt.serializers.python as pythonserializer
|
||||
import salt.serializers.yaml as yamlserializer
|
||||
import salt.states.file as filestate
|
||||
import salt.utils.platform
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -136,3 +137,56 @@ def test_rename(tmp_path):
|
|||
}
|
||||
)
|
||||
assert filestate.rename(name, source) == ret
|
||||
|
||||
mock_lex = MagicMock(side_effect=[True, False, False])
|
||||
with patch.object(os.path, "isabs", mock_t), patch.object(
|
||||
os.path, "lexists", mock_lex
|
||||
), patch.object(os.path, "isdir", mock_f), patch.dict(
|
||||
filestate.__salt__,
|
||||
{"file.makedirs": MagicMock(side_effect=filestate.CommandExecutionError())},
|
||||
), patch.object(
|
||||
os.path, "islink", mock_f
|
||||
), patch.dict(
|
||||
filestate.__opts__, {"test": False}
|
||||
), patch.object(
|
||||
shutil, "move", MagicMock()
|
||||
):
|
||||
if salt.utils.platform.is_windows():
|
||||
comt = "Drive C: is not mapped"
|
||||
else:
|
||||
comt = "Drive is not mapped"
|
||||
ret.update(
|
||||
{
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"result": False,
|
||||
"changes": {},
|
||||
}
|
||||
)
|
||||
assert filestate.rename(name, source, makedirs=True) == ret
|
||||
|
||||
mock_lex = MagicMock(side_effect=[True, False, False])
|
||||
mock_link = str(tmp_path / "salt" / "link")
|
||||
with patch.object(os.path, "isabs", mock_t), patch.object(
|
||||
os.path, "lexists", mock_lex
|
||||
), patch.object(os.path, "isdir", mock_t), patch.object(
|
||||
os.path, "islink", mock_t
|
||||
), patch(
|
||||
"salt.utils.path.readlink", MagicMock(return_value=mock_link)
|
||||
), patch.dict(
|
||||
filestate.__opts__, {"test": False}
|
||||
), patch.object(
|
||||
os, "symlink", MagicMock()
|
||||
), patch.object(
|
||||
os, "unlink", MagicMock()
|
||||
):
|
||||
comt = 'Moved "{}" to "{}"'.format(source, name)
|
||||
ret.update(
|
||||
{
|
||||
"name": name,
|
||||
"comment": comt,
|
||||
"result": True,
|
||||
"changes": {name: source},
|
||||
}
|
||||
)
|
||||
assert filestate.rename(name, source) == ret
|
||||
|
|
137
tests/pytests/unit/states/test_gem.py
Normal file
137
tests/pytests/unit/states/test_gem.py
Normal file
|
@ -0,0 +1,137 @@
|
|||
"""
|
||||
Tests of salt.states.gem
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.gem as gem
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {gem: {"__opts__": {"test": False}}}
|
||||
|
||||
|
||||
def test_installed():
|
||||
gems = {"foo": ["1.0"], "bar": ["2.0"]}
|
||||
gem_list = MagicMock(return_value=gems)
|
||||
gem_install_succeeds = MagicMock(return_value=True)
|
||||
gem_install_fails = MagicMock(return_value=False)
|
||||
|
||||
with patch.dict(gem.__salt__, {"gem.list": gem_list}):
|
||||
with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}):
|
||||
ret = gem.installed("foo")
|
||||
assert ret["result"] is True
|
||||
ret = gem.installed("quux")
|
||||
assert ret["result"] is True
|
||||
gem_install_succeeds.assert_called_once_with(
|
||||
"quux",
|
||||
pre_releases=False,
|
||||
ruby=None,
|
||||
runas=None,
|
||||
version=None,
|
||||
proxy=None,
|
||||
rdoc=False,
|
||||
source=None,
|
||||
ri=False,
|
||||
gem_bin=None,
|
||||
)
|
||||
|
||||
with patch.dict(gem.__salt__, {"gem.install": gem_install_fails}):
|
||||
ret = gem.installed("quux")
|
||||
assert ret["result"] is False
|
||||
gem_install_fails.assert_called_once_with(
|
||||
"quux",
|
||||
pre_releases=False,
|
||||
ruby=None,
|
||||
runas=None,
|
||||
version=None,
|
||||
proxy=None,
|
||||
rdoc=False,
|
||||
source=None,
|
||||
ri=False,
|
||||
gem_bin=None,
|
||||
)
|
||||
|
||||
|
||||
def test_installed_version():
|
||||
gems = {"foo": ["1.0"], "bar": ["2.0"]}
|
||||
gem_list = MagicMock(return_value=gems)
|
||||
gem_install_succeeds = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(gem.__salt__, {"gem.list": gem_list}):
|
||||
with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}):
|
||||
ret = gem.installed("foo", version=">= 1.0")
|
||||
assert ret["result"] is True
|
||||
assert ret["comment"] == "Installed Gem meets version requirements."
|
||||
|
||||
|
||||
def test_removed():
|
||||
gems = ["foo", "bar"]
|
||||
gem_list = MagicMock(return_value=gems)
|
||||
gem_uninstall_succeeds = MagicMock(return_value=True)
|
||||
gem_uninstall_fails = MagicMock(return_value=False)
|
||||
with patch.dict(gem.__salt__, {"gem.list": gem_list}):
|
||||
with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_succeeds}):
|
||||
ret = gem.removed("quux")
|
||||
assert ret["result"] is True
|
||||
ret = gem.removed("foo")
|
||||
assert ret["result"] is True
|
||||
gem_uninstall_succeeds.assert_called_once_with(
|
||||
"foo", None, runas=None, gem_bin=None
|
||||
)
|
||||
|
||||
with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_fails}):
|
||||
ret = gem.removed("bar")
|
||||
assert ret["result"] is False
|
||||
gem_uninstall_fails.assert_called_once_with(
|
||||
"bar", None, runas=None, gem_bin=None
|
||||
)
|
||||
|
||||
|
||||
def test_sources_add():
|
||||
gem_sources = ["http://foo", "http://bar"]
|
||||
gem_sources_list = MagicMock(return_value=gem_sources)
|
||||
gem_sources_add_succeeds = MagicMock(return_value=True)
|
||||
gem_sources_add_fails = MagicMock(return_value=False)
|
||||
with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}):
|
||||
with patch.dict(gem.__salt__, {"gem.sources_add": gem_sources_add_succeeds}):
|
||||
ret = gem.sources_add("http://foo")
|
||||
assert ret["result"] is True
|
||||
ret = gem.sources_add("http://fui")
|
||||
assert ret["result"] is True
|
||||
gem_sources_add_succeeds.assert_called_once_with(
|
||||
source_uri="http://fui", ruby=None, runas=None
|
||||
)
|
||||
with patch.dict(gem.__salt__, {"gem.sources_add": gem_sources_add_fails}):
|
||||
ret = gem.sources_add("http://fui")
|
||||
assert ret["result"] is False
|
||||
gem_sources_add_fails.assert_called_once_with(
|
||||
source_uri="http://fui", ruby=None, runas=None
|
||||
)
|
||||
|
||||
|
||||
def test_sources_remove():
|
||||
gem_sources = ["http://foo", "http://bar"]
|
||||
gem_sources_list = MagicMock(return_value=gem_sources)
|
||||
gem_sources_remove_succeeds = MagicMock(return_value=True)
|
||||
gem_sources_remove_fails = MagicMock(return_value=False)
|
||||
with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}):
|
||||
with patch.dict(
|
||||
gem.__salt__, {"gem.sources_remove": gem_sources_remove_succeeds}
|
||||
):
|
||||
ret = gem.sources_remove("http://fui")
|
||||
assert ret["result"] is True
|
||||
ret = gem.sources_remove("http://foo")
|
||||
assert ret["result"] is True
|
||||
gem_sources_remove_succeeds.assert_called_once_with(
|
||||
source_uri="http://foo", ruby=None, runas=None
|
||||
)
|
||||
with patch.dict(gem.__salt__, {"gem.sources_remove": gem_sources_remove_fails}):
|
||||
ret = gem.sources_remove("http://bar")
|
||||
assert ret["result"] is False
|
||||
gem_sources_remove_fails.assert_called_once_with(
|
||||
source_uri="http://bar", ruby=None, runas=None
|
||||
)
|
418
tests/pytests/unit/states/test_glusterfs.py
Normal file
418
tests/pytests/unit/states/test_glusterfs.py
Normal file
|
@ -0,0 +1,418 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
|
||||
Test cases for salt.states.glusterfs
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.glusterfs as glusterfs
|
||||
import salt.utils.cloud
|
||||
import salt.utils.network
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {glusterfs: {}}
|
||||
|
||||
|
||||
def test_peered():
|
||||
"""
|
||||
Test to verify if node is peered.
|
||||
"""
|
||||
name = "server1"
|
||||
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
mock_ip = MagicMock(return_value=["1.2.3.4", "1.2.3.5"])
|
||||
mock_ip6 = MagicMock(return_value=["2001:db8::1"])
|
||||
mock_host_ips = MagicMock(return_value=["1.2.3.5"])
|
||||
mock_peer = MagicMock(return_value=True)
|
||||
mock_status = MagicMock(return_value={"uuid1": {"hostnames": [name]}})
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{"glusterfs.peer_status": mock_status, "glusterfs.peer": mock_peer},
|
||||
):
|
||||
with patch.object(salt.utils.network, "ip_addrs", mock_ip), patch.object(
|
||||
salt.utils.network, "ip_addrs6", mock_ip6
|
||||
), patch.object(salt.utils.network, "host_to_ips", mock_host_ips):
|
||||
comt = "Peering with localhost is not needed"
|
||||
ret.update({"comment": comt})
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
mock_host_ips.return_value = ["127.0.1.1"]
|
||||
comt = "Peering with localhost is not needed"
|
||||
ret.update({"comment": comt})
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
mock_host_ips.return_value = ["2001:db8::1"]
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
mock_host_ips.return_value = ["1.2.3.42"]
|
||||
comt = "Host {} already peered".format(name)
|
||||
ret.update({"comment": comt})
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
old = {"uuid1": {"hostnames": ["other1"]}}
|
||||
new = {
|
||||
"uuid1": {"hostnames": ["other1"]},
|
||||
"uuid2": {"hostnames": ["someAlias", name]},
|
||||
}
|
||||
mock_status.side_effect = [old, new]
|
||||
comt = "Host {} successfully peered".format(name)
|
||||
ret.update({"comment": comt, "changes": {"old": old, "new": new}})
|
||||
assert glusterfs.peered(name) == ret
|
||||
mock_status.side_effect = None
|
||||
|
||||
mock_status.return_value = {"uuid1": {"hostnames": ["other"]}}
|
||||
mock_peer.return_value = False
|
||||
|
||||
ret.update({"result": False})
|
||||
|
||||
comt = "Failed to peer with {}, please check logs for errors".format(
|
||||
name
|
||||
)
|
||||
ret.update({"comment": comt, "changes": {}})
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
comt = "Invalid characters in peer name."
|
||||
ret.update({"comment": comt, "name": ":/"})
|
||||
assert glusterfs.peered(":/") == ret
|
||||
ret.update({"name": name})
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": True}):
|
||||
comt = "Peer {} will be added.".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.peered(name) == ret
|
||||
|
||||
|
||||
def test_volume_present():
|
||||
"""
|
||||
Test to ensure that a volume exists
|
||||
"""
|
||||
name = "salt"
|
||||
bricks = ["host1:/brick1"]
|
||||
ret = {"name": name, "result": True, "comment": "", "changes": {}}
|
||||
|
||||
started_info = {name: {"status": "1"}}
|
||||
stopped_info = {name: {"status": "0"}}
|
||||
|
||||
mock_info = MagicMock()
|
||||
mock_list = MagicMock()
|
||||
mock_create = MagicMock()
|
||||
mock_start = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{
|
||||
"glusterfs.info": mock_info,
|
||||
"glusterfs.list_volumes": mock_list,
|
||||
"glusterfs.create_volume": mock_create,
|
||||
"glusterfs.start_volume": mock_start,
|
||||
},
|
||||
):
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
mock_list.return_value = [name]
|
||||
mock_info.return_value = started_info
|
||||
comt = "Volume {} already exists and is started".format(name)
|
||||
ret.update({"comment": comt})
|
||||
assert glusterfs.volume_present(name, bricks, start=True) == ret
|
||||
|
||||
mock_info.return_value = stopped_info
|
||||
comt = "Volume {} already exists and is now started".format(name)
|
||||
ret.update(
|
||||
{"comment": comt, "changes": {"old": "stopped", "new": "started"}}
|
||||
)
|
||||
assert glusterfs.volume_present(name, bricks, start=True) == ret
|
||||
|
||||
comt = "Volume {} already exists".format(name)
|
||||
ret.update({"comment": comt, "changes": {}})
|
||||
assert glusterfs.volume_present(name, bricks, start=False) == ret
|
||||
with patch.dict(glusterfs.__opts__, {"test": True}):
|
||||
comt = "Volume {} already exists".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.volume_present(name, bricks, start=False) == ret
|
||||
|
||||
comt = "Volume {} already exists and will be started".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.volume_present(name, bricks, start=True) == ret
|
||||
|
||||
mock_list.return_value = []
|
||||
comt = "Volume {} will be created".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.volume_present(name, bricks, start=False) == ret
|
||||
|
||||
comt = "Volume {} will be created and started".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.volume_present(name, bricks, start=True) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
mock_list.side_effect = [[], [name]]
|
||||
comt = "Volume {} is created".format(name)
|
||||
ret.update(
|
||||
{
|
||||
"comment": comt,
|
||||
"result": True,
|
||||
"changes": {"old": [], "new": [name]},
|
||||
}
|
||||
)
|
||||
assert glusterfs.volume_present(name, bricks, start=False) == ret
|
||||
|
||||
mock_list.side_effect = [[], [name]]
|
||||
comt = "Volume {} is created and is now started".format(name)
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert glusterfs.volume_present(name, bricks, start=True) == ret
|
||||
|
||||
mock_list.side_effect = None
|
||||
mock_list.return_value = []
|
||||
mock_create.return_value = False
|
||||
comt = "Creation of volume {} failed".format(name)
|
||||
ret.update({"comment": comt, "result": False, "changes": {}})
|
||||
assert glusterfs.volume_present(name, bricks) == ret
|
||||
|
||||
with patch.object(salt.utils.cloud, "check_name", MagicMock(return_value=True)):
|
||||
comt = "Invalid characters in volume name."
|
||||
ret.update({"comment": comt, "result": False})
|
||||
assert glusterfs.volume_present(name, bricks) == ret
|
||||
|
||||
|
||||
def test_started():
|
||||
"""
|
||||
Test to check if volume has been started
|
||||
"""
|
||||
name = "salt"
|
||||
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
started_info = {name: {"status": "1"}}
|
||||
stopped_info = {name: {"status": "0"}}
|
||||
mock_info = MagicMock(return_value={})
|
||||
mock_start = MagicMock(return_value=True)
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{"glusterfs.info": mock_info, "glusterfs.start_volume": mock_start},
|
||||
):
|
||||
comt = "Volume {} does not exist".format(name)
|
||||
ret.update({"comment": comt})
|
||||
assert glusterfs.started(name) == ret
|
||||
|
||||
mock_info.return_value = started_info
|
||||
comt = "Volume {} is already started".format(name)
|
||||
ret.update({"comment": comt, "result": True})
|
||||
assert glusterfs.started(name) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": True}):
|
||||
mock_info.return_value = stopped_info
|
||||
comt = "Volume {} will be started".format(name)
|
||||
ret.update({"comment": comt, "result": None})
|
||||
assert glusterfs.started(name) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
comt = "Volume {} is started".format(name)
|
||||
ret.update(
|
||||
{
|
||||
"comment": comt,
|
||||
"result": True,
|
||||
"change": {"new": "started", "old": "stopped"},
|
||||
}
|
||||
)
|
||||
assert glusterfs.started(name) == ret
|
||||
|
||||
|
||||
def test_add_volume_bricks():
|
||||
"""
|
||||
Test to add brick(s) to an existing volume
|
||||
"""
|
||||
name = "salt"
|
||||
bricks = ["host1:/drive1"]
|
||||
old_bricks = ["host1:/drive2"]
|
||||
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
stopped_volinfo = {"salt": {"status": "0"}}
|
||||
volinfo = {"salt": {"status": "1", "bricks": {"brick1": {"path": old_bricks[0]}}}}
|
||||
new_volinfo = {
|
||||
"salt": {
|
||||
"status": "1",
|
||||
"bricks": {
|
||||
"brick1": {"path": old_bricks[0]},
|
||||
"brick2": {"path": bricks[0]},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
mock_info = MagicMock(return_value={})
|
||||
mock_add = MagicMock(side_effect=[False, True])
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{"glusterfs.info": mock_info, "glusterfs.add_volume_bricks": mock_add},
|
||||
):
|
||||
ret.update({"comment": "Volume salt does not exist"})
|
||||
assert glusterfs.add_volume_bricks(name, bricks) == ret
|
||||
|
||||
mock_info.return_value = stopped_volinfo
|
||||
ret.update({"comment": "Volume salt is not started"})
|
||||
assert glusterfs.add_volume_bricks(name, bricks) == ret
|
||||
|
||||
mock_info.return_value = volinfo
|
||||
ret.update({"comment": "Adding bricks to volume salt failed"})
|
||||
assert glusterfs.add_volume_bricks(name, bricks) == ret
|
||||
|
||||
ret.update({"result": True})
|
||||
ret.update({"comment": "Bricks already added in volume salt"})
|
||||
assert glusterfs.add_volume_bricks(name, old_bricks) == ret
|
||||
|
||||
mock_info.side_effect = [volinfo, new_volinfo]
|
||||
ret.update(
|
||||
{
|
||||
"comment": "Bricks successfully added to volume salt",
|
||||
"changes": {"new": bricks + old_bricks, "old": old_bricks},
|
||||
}
|
||||
)
|
||||
# Let's sort ourselves because the test under python 3 sometimes fails
|
||||
# just because of the new changes list order
|
||||
result = glusterfs.add_volume_bricks(name, bricks)
|
||||
ret["changes"]["new"] = sorted(ret["changes"]["new"])
|
||||
result["changes"]["new"] = sorted(result["changes"]["new"])
|
||||
assert result == ret
|
||||
|
||||
|
||||
def test_op_version():
|
||||
"""
|
||||
Test setting the Glusterfs op-version
|
||||
"""
|
||||
name = "salt"
|
||||
current = 30707
|
||||
new = 31200
|
||||
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
mock_get_version = MagicMock(return_value={})
|
||||
mock_set_version = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{
|
||||
"glusterfs.get_op_version": mock_get_version,
|
||||
"glusterfs.set_op_version": mock_set_version,
|
||||
},
|
||||
):
|
||||
mock_get_version.return_value = [False, "some error message"]
|
||||
ret.update({"result": False})
|
||||
ret.update({"comment": "some error message"})
|
||||
assert glusterfs.op_version(name, current) == ret
|
||||
|
||||
mock_get_version.return_value = current
|
||||
ret.update({"result": True})
|
||||
ret.update(
|
||||
{
|
||||
"comment": (
|
||||
"Glusterfs cluster.op-version for {} already set to {}".format(
|
||||
name, current
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
assert glusterfs.op_version(name, current) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": True}):
|
||||
mock_set_version.return_value = [False, "Failed to set version"]
|
||||
ret.update({"result": None})
|
||||
ret.update(
|
||||
{
|
||||
"comment": (
|
||||
"An attempt would be made to set the cluster.op-version for"
|
||||
" {} to {}.".format(name, new)
|
||||
)
|
||||
}
|
||||
)
|
||||
assert glusterfs.op_version(name, new) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
mock_set_version.return_value = [False, "Failed to set version"]
|
||||
ret.update({"result": False})
|
||||
ret.update({"comment": "Failed to set version"})
|
||||
assert glusterfs.op_version(name, new) == ret
|
||||
|
||||
mock_set_version.return_value = "some success message"
|
||||
ret.update({"comment": "some success message"})
|
||||
ret.update({"changes": {"old": current, "new": new}})
|
||||
ret.update({"result": True})
|
||||
assert glusterfs.op_version(name, new) == ret
|
||||
|
||||
|
||||
def test_max_op_version():
|
||||
"""
|
||||
Test setting the Glusterfs to its self reported max-op-version
|
||||
"""
|
||||
name = "salt"
|
||||
current = 30707
|
||||
new = 31200
|
||||
|
||||
ret = {"name": name, "result": False, "comment": "", "changes": {}}
|
||||
|
||||
mock_get_version = MagicMock(return_value={})
|
||||
mock_get_max_op_version = MagicMock(return_value={})
|
||||
mock_set_version = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(
|
||||
glusterfs.__salt__,
|
||||
{
|
||||
"glusterfs.get_op_version": mock_get_version,
|
||||
"glusterfs.set_op_version": mock_set_version,
|
||||
"glusterfs.get_max_op_version": mock_get_max_op_version,
|
||||
},
|
||||
):
|
||||
mock_get_version.return_value = [False, "some error message"]
|
||||
ret.update({"result": False})
|
||||
ret.update({"comment": "some error message"})
|
||||
assert glusterfs.max_op_version(name) == ret
|
||||
|
||||
mock_get_version.return_value = current
|
||||
mock_get_max_op_version.return_value = [False, "some error message"]
|
||||
ret.update({"result": False})
|
||||
ret.update({"comment": "some error message"})
|
||||
assert glusterfs.max_op_version(name) == ret
|
||||
|
||||
mock_get_version.return_value = current
|
||||
mock_get_max_op_version.return_value = current
|
||||
ret.update({"result": True})
|
||||
ret.update(
|
||||
{
|
||||
"comment": (
|
||||
"The cluster.op-version is already set to the"
|
||||
" cluster.max-op-version of {}".format(current)
|
||||
)
|
||||
}
|
||||
)
|
||||
assert glusterfs.max_op_version(name) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": True}):
|
||||
mock_get_max_op_version.return_value = new
|
||||
ret.update({"result": None})
|
||||
ret.update(
|
||||
{
|
||||
"comment": (
|
||||
"An attempt would be made to set the cluster.op-version"
|
||||
" to {}.".format(new)
|
||||
)
|
||||
}
|
||||
)
|
||||
assert glusterfs.max_op_version(name) == ret
|
||||
|
||||
with patch.dict(glusterfs.__opts__, {"test": False}):
|
||||
mock_set_version.return_value = [False, "Failed to set version"]
|
||||
ret.update({"result": False})
|
||||
ret.update({"comment": "Failed to set version"})
|
||||
assert glusterfs.max_op_version(name) == ret
|
||||
|
||||
mock_set_version.return_value = "some success message"
|
||||
ret.update({"comment": "some success message"})
|
||||
ret.update({"changes": {"old": current, "new": new}})
|
||||
ret.update({"result": True})
|
||||
assert glusterfs.max_op_version(name) == ret
|
1508
tests/support/pkg.py
Normal file
1508
tests/support/pkg.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -171,6 +171,31 @@ def remove_stale_minion_key(master, minion_id):
|
|||
log.debug("The minion(id=%r) key was not found at %s", minion_id, key_path)
|
||||
|
||||
|
||||
@pytest.helpers.register
|
||||
def remove_stale_master_key(master):
|
||||
keys_path = os.path.join(master.config["pki_dir"], "master")
|
||||
for key_name in ("master.pem", "master.pub"):
|
||||
key_path = os.path.join(keys_path, key_name)
|
||||
if os.path.exists(key_path):
|
||||
os.unlink(key_path)
|
||||
else:
|
||||
log.debug(
|
||||
"The master(id=%r) %s key was not found at %s",
|
||||
master.id,
|
||||
key_name,
|
||||
key_path,
|
||||
)
|
||||
key_path = os.path.join(master.config["pki_dir"], "minion", "minion_master.pub")
|
||||
if os.path.exists(key_path):
|
||||
os.unlink(key_path)
|
||||
else:
|
||||
log.debug(
|
||||
"The master(id=%r) minion_master.pub key was not found at %s",
|
||||
master.id,
|
||||
key_path,
|
||||
)
|
||||
|
||||
|
||||
@pytest.helpers.register
|
||||
def remove_stale_proxy_minion_cache_file(proxy_minion, minion_id=None):
|
||||
cachefile = os.path.join(
|
||||
|
|
Loading…
Add table
Reference in a new issue