mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #62877 from MKLeb/pytest/runners-unit-tests
Migrate runners unit tests to pytest
This commit is contained in:
commit
9d7307332f
25 changed files with 1284 additions and 1309 deletions
110
tests/pytests/unit/runners/test_asam.py
Normal file
110
tests/pytests/unit/runners/test_asam.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
"""
|
||||
tests.unit.runners.test_asam
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Unit tests for the asam runner
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.asam as asam
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = {
|
||||
"asam": {
|
||||
"prov1.domain.com": {
|
||||
"username": "TheUsername",
|
||||
"password": "ThePassword",
|
||||
}
|
||||
}
|
||||
}
|
||||
return {asam: {"__opts__": opts}}
|
||||
|
||||
|
||||
def test_add_platform():
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_set_name = MagicMock(return_value="plat-foo")
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platformset_name", get_platform_set_name
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.add_platform("plat-foo-2", "plat-foo", "prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformSetConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
|
||||
def test_remove_platform():
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_set_name = MagicMock(return_value="plat-foo")
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platformset_name", get_platform_set_name
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.remove_platform("plat-foo", "prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={
|
||||
"manual": "false",
|
||||
"platformName": "plat-foo",
|
||||
"platformSetName": "plat-foo",
|
||||
"postType": "platformRemove",
|
||||
"Submit": "Yes",
|
||||
},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
|
||||
def test_list_platforms():
|
||||
parse_html_content = MagicMock()
|
||||
get_platforms = MagicMock(return_value=["plat-foo", "plat-bar"])
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platforms", get_platforms
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.list_platforms("prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
|
||||
def test_list_platform_sets():
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_sets = MagicMock(return_value=["plat-foo", "plat-bar"])
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platforms", get_platform_sets
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.list_platform_sets("prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformSetConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
31
tests/pytests/unit/runners/test_bgp.py
Normal file
31
tests/pytests/unit/runners/test_bgp.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""
|
||||
Test the bgp runner
|
||||
"""
|
||||
import pytest
|
||||
|
||||
import salt.runners.bgp as bgp
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
not bgp.HAS_NAPALM, reason="napalm module required for this test"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
bgp: {
|
||||
"__opts__": {
|
||||
"optimization_order": [0, 1, 2],
|
||||
"renderer": "yaml",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_neighbors():
|
||||
ret = bgp.neighbors()
|
||||
assert ret == []
|
43
tests/pytests/unit/runners/test_cache.py
Normal file
43
tests/pytests/unit/runners/test_cache.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
"""
|
||||
unit tests for the cache runner
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.cache as cache
|
||||
import salt.utils.master
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(tmp_path):
|
||||
return {
|
||||
cache: {
|
||||
"__opts__": {
|
||||
"cache": "localfs",
|
||||
"pki_dir": str(tmp_path),
|
||||
"key_cache": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_grains():
|
||||
"""
|
||||
test cache.grains runner
|
||||
"""
|
||||
mock_minion = ["Larry"]
|
||||
mock_ret = {}
|
||||
assert cache.grains(tgt="*", minion=mock_minion) == mock_ret
|
||||
|
||||
mock_data = "grain stuff"
|
||||
|
||||
class MockMaster:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def get_minion_grains(self):
|
||||
return mock_data
|
||||
|
||||
with patch.object(salt.utils.master, "MasterPillarUtil", MockMaster):
|
||||
assert cache.grains(tgt="*") == mock_data
|
135
tests/pytests/unit/runners/test_fileserver.py
Normal file
135
tests/pytests/unit/runners/test_fileserver.py
Normal file
|
@ -0,0 +1,135 @@
|
|||
"""
|
||||
unit tests for the fileserver runner
|
||||
"""
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.loader
|
||||
import salt.runners.fileserver as fileserver
|
||||
import salt.utils.files
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
class DummyFS:
|
||||
"""
|
||||
Dummy object to provide the attributes needed to run unit tests
|
||||
"""
|
||||
|
||||
def __init__(self, backends):
|
||||
self.backends = backends
|
||||
|
||||
def keys(self):
|
||||
return ["{}.envs".format(x) for x in self.backends]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cachedir(tmp_path):
|
||||
return tmp_path / "cache"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {fileserver: {"__opts__": {"extension_modules": ""}}}
|
||||
|
||||
|
||||
def _make_file_lists_cache(cachedir, backends):
|
||||
"""
|
||||
Create some dummy files to represent file list caches, as well as other
|
||||
files that aren't file list caches, so that we can confirm that *only*
|
||||
the cache files are touched. Create a dir for each configured backend,
|
||||
as well as for the roots backend (which is *not* configured as a
|
||||
backend in this test), so that we can ensure that its cache is left
|
||||
alone.
|
||||
"""
|
||||
for back in backends:
|
||||
back_cachedir = cachedir / "file_lists" / back
|
||||
back_cachedir.mkdir(parents=True, exist_ok=True)
|
||||
for filename in ("base.p", "dev.p", "foo.txt"):
|
||||
(back_cachedir / filename).touch()
|
||||
|
||||
|
||||
def test_clear_file_list_cache_vcs(cachedir):
|
||||
"""
|
||||
Test that VCS backends are cleared irrespective of whether they are
|
||||
configured as gitfs/git, hgfs/hg, svnfs/svn.
|
||||
"""
|
||||
# Mixture of VCS backends specified with and without "fs" at the end,
|
||||
# to confirm that the correct dirs are cleared.
|
||||
backends = ["gitfs", "hg", "svnfs"]
|
||||
opts = {
|
||||
"fileserver_backend": backends,
|
||||
"cachedir": str(cachedir),
|
||||
}
|
||||
mock_fs = DummyFS(backends)
|
||||
|
||||
_make_file_lists_cache(cachedir, backends + ["roots"])
|
||||
|
||||
with patch.dict(fileserver.__opts__, opts), patch.object(
|
||||
salt.loader, "fileserver", MagicMock(return_value=mock_fs)
|
||||
):
|
||||
cleared = fileserver.clear_file_list_cache()
|
||||
|
||||
# Make sure the return data matches what you'd expect
|
||||
expected = {
|
||||
"gitfs": ["base", "dev"],
|
||||
"hg": ["base", "dev"],
|
||||
"svnfs": ["base", "dev"],
|
||||
}
|
||||
assert cleared == expected, cleared
|
||||
|
||||
# Trust, but verify! Check that the correct files are actually gone
|
||||
assert not (cachedir / "file_lists" / "gitfs" / "base.p").exists()
|
||||
assert not (cachedir / "file_lists" / "gitfs" / "dev.p").exists()
|
||||
assert not (cachedir / "file_lists" / "hg" / "base.p").exists()
|
||||
assert not (cachedir / "file_lists" / "gitfs" / "dev.p").exists()
|
||||
assert not (cachedir / "file_lists" / "hg" / "base.p").exists()
|
||||
assert not (cachedir / "file_lists" / "svnfs" / "dev.p").exists()
|
||||
|
||||
# These files *should* exist and shouldn't have been cleaned
|
||||
assert (cachedir / "file_lists" / "gitfs" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "hg" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "svnfs" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "base.p").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "dev.p").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "foo.txt").exists()
|
||||
|
||||
|
||||
def test_clear_file_list_cache_vcs_limited(cachedir):
|
||||
"""
|
||||
Test the arguments to limit what is cleared
|
||||
"""
|
||||
# Mixture of VCS backends specified with and without "fs" at the end,
|
||||
# to confirm that the correct dirs are cleared.
|
||||
backends = ["gitfs", "hg", "svnfs"]
|
||||
opts = {
|
||||
"fileserver_backend": backends,
|
||||
"cachedir": str(cachedir),
|
||||
}
|
||||
mock_fs = DummyFS(backends)
|
||||
|
||||
_make_file_lists_cache(cachedir, backends + ["roots"])
|
||||
|
||||
with patch.dict(fileserver.__opts__, opts), patch.object(
|
||||
salt.loader, "fileserver", MagicMock(return_value=mock_fs)
|
||||
):
|
||||
cleared = fileserver.clear_file_list_cache(saltenv="base", backend="gitfs")
|
||||
|
||||
expected = {"gitfs": ["base"]}
|
||||
assert cleared == expected, cleared
|
||||
|
||||
# Trust, but verify! Check that the correct files are actually gone
|
||||
assert not (cachedir / "file_lists" / "gitfs" / "base.p").exists()
|
||||
|
||||
# These files *should* exist and shouldn't have been cleaned
|
||||
assert (cachedir / "file_lists" / "gitfs" / "dev.p").exists()
|
||||
assert (cachedir / "file_lists" / "gitfs" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "hg" / "base.p").exists()
|
||||
assert (cachedir / "file_lists" / "hg" / "dev.p").exists()
|
||||
assert (cachedir / "file_lists" / "hg" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "svnfs" / "base.p").exists()
|
||||
assert (cachedir / "file_lists" / "svnfs" / "dev.p").exists()
|
||||
assert (cachedir / "file_lists" / "svnfs" / "foo.txt").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "base.p").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "dev.p").exists()
|
||||
assert (cachedir / "file_lists" / "roots" / "foo.txt").exists()
|
71
tests/pytests/unit/runners/test_git_pillar.py
Normal file
71
tests/pytests/unit/runners/test_git_pillar.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""
|
||||
unit tests for the git_pillar runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.git_pillar as git_pillar
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
from tests.support.gitfs import _OPTS
|
||||
from tests.support.mock import patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cachedir(tmp_path):
|
||||
return tmp_path / "cache"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(cachedir):
|
||||
opts = _OPTS.copy()
|
||||
opts["cachedir"] = str(cachedir)
|
||||
opts["verified_git_pillar_provider"] = "gitfoo"
|
||||
opts["ext_pillar"] = [
|
||||
{
|
||||
"git": [
|
||||
"master https://someurl/some",
|
||||
{"dev https://otherurl/other": [{"name": "somename"}]},
|
||||
]
|
||||
}
|
||||
]
|
||||
return {git_pillar: {"__opts__": opts}}
|
||||
|
||||
|
||||
def test_update():
|
||||
"""
|
||||
test git_pillar.update
|
||||
"""
|
||||
|
||||
class MockGitProvider(
|
||||
salt.utils.gitfs.GitProvider
|
||||
): # pylint: disable=abstract-method
|
||||
def init_remote(self):
|
||||
new = False
|
||||
self.repo = True
|
||||
return new
|
||||
|
||||
def fetch(self):
|
||||
return True
|
||||
|
||||
def clear_lock(self, lock_type="update"):
|
||||
pass # return success, failed
|
||||
|
||||
git_providers = {"gitfoo": MockGitProvider}
|
||||
|
||||
repo1 = {"master https://someurl/some": True}
|
||||
repo2 = {"dev https://otherurl/other": True}
|
||||
all_repos = {
|
||||
"master https://someurl/some": True,
|
||||
"dev https://otherurl/other": True,
|
||||
}
|
||||
with patch.object(salt.utils.gitfs, "GIT_PROVIDERS", git_providers):
|
||||
assert git_pillar.update() == all_repos
|
||||
assert git_pillar.update(branch="master") == repo1
|
||||
assert git_pillar.update(branch="dev") == repo2
|
||||
assert git_pillar.update(repo="somename") == repo2
|
71
tests/pytests/unit/runners/test_jobs.py
Normal file
71
tests/pytests/unit/runners/test_jobs.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""
|
||||
unit tests for the jobs runner
|
||||
"""
|
||||
import pytest
|
||||
|
||||
import salt.minion
|
||||
import salt.runners.jobs as jobs
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
jobs: {"__opts__": {"ext_job_cache": None, "master_job_cache": "local_cache"}}
|
||||
}
|
||||
|
||||
|
||||
def test_list_jobs_with_search_target():
|
||||
"""
|
||||
test jobs.list_jobs runner with search_target args
|
||||
"""
|
||||
mock_jobs_cache = {
|
||||
"20160524035503086853": {
|
||||
"Arguments": [],
|
||||
"Function": "test.ping",
|
||||
"StartTime": "2016, May 24 03:55:03.086853",
|
||||
"Target": "node-1-1.com",
|
||||
"Target-type": "glob",
|
||||
"User": "root",
|
||||
},
|
||||
"20160524035524895387": {
|
||||
"Arguments": [],
|
||||
"Function": "test.ping",
|
||||
"StartTime": "2016, May 24 03:55:24.895387",
|
||||
"Target": ["node-1-2.com", "node-1-1.com"],
|
||||
"Target-type": "list",
|
||||
"User": "sudo_ubuntu",
|
||||
},
|
||||
}
|
||||
|
||||
def return_mock_jobs():
|
||||
return mock_jobs_cache
|
||||
|
||||
class MockMasterMinion:
|
||||
|
||||
returners = {"local_cache.get_jids": return_mock_jobs}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
returns = {
|
||||
"all": mock_jobs_cache,
|
||||
"node-1-1.com": mock_jobs_cache,
|
||||
"node-1-2.com": {
|
||||
"20160524035524895387": mock_jobs_cache["20160524035524895387"]
|
||||
},
|
||||
"non-existant": {},
|
||||
}
|
||||
|
||||
with patch.object(salt.minion, "MasterMinion", MockMasterMinion):
|
||||
assert jobs.list_jobs() == returns["all"]
|
||||
|
||||
assert (
|
||||
jobs.list_jobs(search_target=["node-1-1*", "node-1-2*"]) == returns["all"]
|
||||
)
|
||||
|
||||
assert jobs.list_jobs(search_target="node-1-1.com") == returns["node-1-1.com"]
|
||||
|
||||
assert jobs.list_jobs(search_target="node-1-2.com") == returns["node-1-2.com"]
|
||||
|
||||
assert jobs.list_jobs(search_target="non-existant") == returns["non-existant"]
|
17
tests/pytests/unit/runners/test_manage.py
Normal file
17
tests/pytests/unit/runners/test_manage.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
import pytest
|
||||
|
||||
from salt.runners import manage
|
||||
|
||||
|
||||
def test_deprecation_58638():
|
||||
# check that type error will be raised
|
||||
pytest.raises(TypeError, manage.list_state, show_ipv4="data")
|
||||
|
||||
# check that show_ipv4 will raise an error
|
||||
try:
|
||||
manage.list_state(show_ipv4="data") # pylint: disable=unexpected-keyword-arg
|
||||
except TypeError as no_show_ipv4:
|
||||
assert (
|
||||
str(no_show_ipv4)
|
||||
== "list_state() got an unexpected keyword argument 'show_ipv4'"
|
||||
)
|
57
tests/pytests/unit/runners/test_net.py
Normal file
57
tests/pytests/unit/runners/test_net.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
import pytest
|
||||
|
||||
import salt.runners.net as net
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
not net.HAS_NAPALM, reason="napalm module required for this test"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(tmp_path):
|
||||
mock_get = MagicMock(return_value={})
|
||||
return {
|
||||
net: {
|
||||
"__opts__": {
|
||||
"optimization_order": [0, 1, 2],
|
||||
"renderer": "yaml",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
"extension_modules": str(tmp_path),
|
||||
},
|
||||
"__salt__": {"mine.get": mock_get},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_interfaces():
|
||||
ret = net.interfaces()
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_findarp():
|
||||
ret = net.findarp()
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_findmac():
|
||||
ret = net.findmac()
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_lldp():
|
||||
ret = net.lldp()
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_find():
|
||||
ret = net.find("")
|
||||
assert {} == ret
|
||||
|
||||
|
||||
def test_multi_find():
|
||||
ret = net.multi_find()
|
||||
assert ret is None
|
50
tests/pytests/unit/runners/test_queue.py
Normal file
50
tests/pytests/unit/runners/test_queue.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
"""
|
||||
unit tests for the cache runner
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.queue as queue_mod
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(tmp_path):
|
||||
return {
|
||||
queue_mod: {
|
||||
"__opts__": {
|
||||
"sock_dir": str(tmp_path / "queue-runner-sock-dir"),
|
||||
"transport": "zeromq",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_insert_runner():
|
||||
queue_insert = MagicMock(return_value=True)
|
||||
with patch.object(queue_mod, "insert", queue_insert):
|
||||
queue_mod.insert_runner("test.stdout_print", queue="salt")
|
||||
expected_call = {
|
||||
"queue": "salt",
|
||||
"items": {"fun": "test.stdout_print", "args": [], "kwargs": {}},
|
||||
"backend": "pgjsonb",
|
||||
}
|
||||
queue_insert.assert_called_once_with(**expected_call)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_process_runner():
|
||||
ret = [{"fun": "test.stdout_print", "args": [], "kwargs": {}}]
|
||||
|
||||
queue_pop = MagicMock(return_value=ret)
|
||||
test_stdout_print = MagicMock(return_value=True)
|
||||
with patch.dict(queue_mod.__salt__, {"test.stdout_print": test_stdout_print}):
|
||||
with patch.object(queue_mod, "pop", queue_pop):
|
||||
queue_mod.process_runner(queue="salt")
|
||||
queue_pop.assert_called_once_with(
|
||||
is_runner=True, queue="salt", quantity=1, backend="pgjsonb"
|
||||
)
|
||||
test_stdout_print.assert_called_once_with()
|
||||
queue_pop.assert_called_once_with(
|
||||
is_runner=True, queue="salt", quantity=1, backend="pgjsonb"
|
||||
)
|
303
tests/pytests/unit/runners/test_reactor.py
Normal file
303
tests/pytests/unit/runners/test_reactor.py
Normal file
|
@ -0,0 +1,303 @@
|
|||
"""
|
||||
unit tests for the reactor runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.reactor as reactor
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.utils.event import SaltEvent
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MockEvent:
|
||||
"""
|
||||
Mock event class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
|
||||
def __init__(self):
|
||||
self.full = None
|
||||
|
||||
def get_event(self, wait, tag):
|
||||
"""
|
||||
Mock get_event method
|
||||
"""
|
||||
data = []
|
||||
return {"tag": tag, "data": data}
|
||||
|
||||
def fire_event(self, data, tag):
|
||||
"""
|
||||
Mock get_event method
|
||||
"""
|
||||
return {"tag": tag, "data": data}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
reactor: {
|
||||
"__opts__": {
|
||||
"reactor": [],
|
||||
"engines": [],
|
||||
"id": "master_id",
|
||||
"sock_dir": "/var/run/salt/master",
|
||||
"transport": "zeromq",
|
||||
},
|
||||
"__jid_event__": MockEvent(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_list():
|
||||
"""
|
||||
test reactor.list runner
|
||||
"""
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.list_()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.list_()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
assert {"test_event/*": ["/srv/reactors/reactor.sls"]} in ret
|
||||
|
||||
event_returns = {
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
assert ret is None
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["reactor"] = [{"test_event/*": ["/srv/reactors/reactor.sls"]}]
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
assert {"test_event/*": ["/srv/reactors/reactor.sls"]} in ret
|
||||
|
||||
|
||||
def test_add():
|
||||
"""
|
||||
test reactor.add runner
|
||||
"""
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.add(
|
||||
"salt/cloud/*/destroyed", reactors="/srv/reactor/destroy/*.sls"
|
||||
)
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.add(
|
||||
"salt/cloud/*/destroyed", reactors="/srv/reactor/destroy/*.sls"
|
||||
)
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"result": {"status": True, "comment": "Reactor added."},
|
||||
"_stamp": "2020-09-04T17:45:33.206408",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.add("test_event/*", "/srv/reactor/reactor.sls")
|
||||
assert "status" in ret
|
||||
assert ret["status"]
|
||||
assert "Reactor added." == ret["comment"]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T17:45:33.206408",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.add("test_event/*", "/srv/reactor/reactor.sls")
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_delete():
|
||||
"""
|
||||
test reactor.delete runner
|
||||
"""
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.delete("salt/cloud/*/destroyed")
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.delete("salt/cloud/*/destroyed")
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"bot/*": ["/srv/reactors/bot.sls"]}],
|
||||
"result": {"status": True, "comment": "Reactor deleted."},
|
||||
"_stamp": "2020-09-04T18:15:41.586552",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.delete("test_event/*")
|
||||
assert "status" in ret
|
||||
assert ret["status"]
|
||||
assert "Reactor deleted." == ret["comment"]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"bot/*": ["/srv/reactors/bot.sls"]}],
|
||||
"_stamp": "2020-09-04T18:15:41.586552",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.delete("test_event/*")
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_is_leader():
|
||||
"""
|
||||
test reactor.is_leader runner
|
||||
"""
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.is_leader()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.is_leader()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {"result": True, "_stamp": "2020-09-04T18:32:10.004490"}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.is_leader()
|
||||
assert ret
|
||||
|
||||
|
||||
def test_set_leader():
|
||||
"""
|
||||
test reactor.set_leader runner
|
||||
"""
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.set_leader()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with pytest.raises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.set_leader()
|
||||
assert excinfo.value.error == "Reactor system is not running."
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {"result": True, "_stamp": "2020-09-04T18:32:10.004490"}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.set_leader()
|
||||
assert ret
|
76
tests/pytests/unit/runners/vault/test_app_role_auth.py
Normal file
76
tests/pytests/unit/runners/vault/test_app_role_auth.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
"""
|
||||
Unit tests for the Vault runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.vault as vault
|
||||
from tests.support.mock import ANY, MagicMock, Mock, call, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _mock_json_response(data, status_code=200, reason=""):
|
||||
"""
|
||||
Mock helper for http response
|
||||
"""
|
||||
response = MagicMock()
|
||||
response.json = MagicMock(return_value=data)
|
||||
response.status_code = status_code
|
||||
response.reason = reason
|
||||
return Mock(return_value=response)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
sig_valid_mock = patch(
|
||||
"salt.runners.vault._validate_signature", MagicMock(return_value=None)
|
||||
)
|
||||
token_url_mock = patch(
|
||||
"salt.runners.vault._get_token_create_url",
|
||||
MagicMock(return_value="http://fake_url"),
|
||||
)
|
||||
with sig_valid_mock, token_url_mock:
|
||||
yield {
|
||||
vault: {
|
||||
"__opts__": {
|
||||
"vault": {
|
||||
"url": "http://127.0.0.1",
|
||||
"auth": {
|
||||
"method": "approle",
|
||||
"role_id": "role",
|
||||
"secret_id": "secret",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_generate_token():
|
||||
"""
|
||||
Basic test for test_generate_token with approle (two vault calls)
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
assert isinstance(result, dict)
|
||||
assert "error" not in result
|
||||
assert "token" in result
|
||||
assert result["token"] == "test"
|
||||
calls = [
|
||||
call(
|
||||
"http://127.0.0.1/v1/auth/approle/login",
|
||||
headers=ANY,
|
||||
json=ANY,
|
||||
verify=ANY,
|
||||
),
|
||||
call("http://fake_url", headers=ANY, json=ANY, verify=ANY),
|
||||
]
|
||||
mock.assert_has_calls(calls)
|
151
tests/pytests/unit/runners/vault/test_token_auth.py
Normal file
151
tests/pytests/unit/runners/vault/test_token_auth.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
"""
|
||||
Unit tests for the Vault runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.vault as vault
|
||||
from tests.support.mock import ANY, MagicMock, Mock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _mock_json_response(data, status_code=200, reason=""):
|
||||
"""
|
||||
Mock helper for http response
|
||||
"""
|
||||
response = MagicMock()
|
||||
response.json = MagicMock(return_value=data)
|
||||
response.status_code = status_code
|
||||
response.reason = reason
|
||||
return Mock(return_value=response)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
sig_valid_mock = patch(
|
||||
"salt.runners.vault._validate_signature", MagicMock(return_value=None)
|
||||
)
|
||||
token_url_mock = patch(
|
||||
"salt.runners.vault._get_token_create_url",
|
||||
MagicMock(return_value="http://fake_url"),
|
||||
)
|
||||
with sig_valid_mock, token_url_mock:
|
||||
yield {
|
||||
vault: {
|
||||
"__opts__": {
|
||||
"vault": {
|
||||
"url": "http://127.0.0.1",
|
||||
"auth": {
|
||||
"token": "test",
|
||||
"method": "token",
|
||||
"allow_minion_override": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_generate_token():
|
||||
"""
|
||||
Basic tests for test_generate_token: all exits
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
assert isinstance(result, dict)
|
||||
assert "error" not in result
|
||||
assert "token" in result
|
||||
assert result["token"] == "test"
|
||||
mock.assert_called_with("http://fake_url", headers=ANY, json=ANY, verify=ANY)
|
||||
|
||||
# Test uses
|
||||
num_uses = 6
|
||||
result = vault.generate_token("test-minion", "signature", uses=num_uses)
|
||||
assert "uses" in result
|
||||
assert result["uses"] == num_uses
|
||||
json_request = {
|
||||
"policies": ["saltstack/minion/test-minion", "saltstack/minions"],
|
||||
"num_uses": num_uses,
|
||||
"meta": {
|
||||
"saltstack-jid": "<no jid set>",
|
||||
"saltstack-minion": "test-minion",
|
||||
"saltstack-user": "<no user set>",
|
||||
},
|
||||
}
|
||||
mock.assert_called_with(
|
||||
"http://fake_url", headers=ANY, json=json_request, verify=ANY
|
||||
)
|
||||
|
||||
# Test ttl
|
||||
expected_ttl = "6h"
|
||||
result = vault.generate_token("test-minion", "signature", ttl=expected_ttl)
|
||||
assert result["uses"] == 1
|
||||
json_request = {
|
||||
"policies": ["saltstack/minion/test-minion", "saltstack/minions"],
|
||||
"num_uses": 1,
|
||||
"explicit_max_ttl": expected_ttl,
|
||||
"meta": {
|
||||
"saltstack-jid": "<no jid set>",
|
||||
"saltstack-minion": "test-minion",
|
||||
"saltstack-user": "<no user set>",
|
||||
},
|
||||
}
|
||||
mock.assert_called_with(
|
||||
"http://fake_url", headers=ANY, json=json_request, verify=ANY
|
||||
)
|
||||
|
||||
mock = _mock_json_response({}, status_code=403, reason="no reason")
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
assert isinstance(result, dict)
|
||||
assert "error" in result
|
||||
assert result["error"] == "no reason"
|
||||
|
||||
with patch("salt.runners.vault._get_policies", MagicMock(return_value=[])):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
assert isinstance(result, dict)
|
||||
assert "error" in result
|
||||
assert result["error"] == "No policies matched minion"
|
||||
|
||||
with patch(
|
||||
"requests.post", MagicMock(side_effect=Exception("Test Exception Reason"))
|
||||
):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
assert isinstance(result, dict)
|
||||
assert "error" in result
|
||||
assert result["error"] == "Test Exception Reason"
|
||||
|
||||
|
||||
def test_generate_token_with_namespace():
|
||||
"""
|
||||
Basic tests for test_generate_token: all exits
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
supplied_config = {"namespace": "test_namespace"}
|
||||
with patch("requests.post", mock):
|
||||
with patch.dict(vault.__opts__["vault"], supplied_config):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
assert isinstance(result, dict)
|
||||
assert "error" not in result
|
||||
assert "token" in result
|
||||
assert result["token"] == "test"
|
||||
mock.assert_called_with(
|
||||
"http://fake_url",
|
||||
headers={
|
||||
"X-Vault-Token": "test",
|
||||
"X-Vault-Namespace": "test_namespace",
|
||||
},
|
||||
json=ANY,
|
||||
verify=ANY,
|
||||
)
|
169
tests/pytests/unit/runners/vault/test_vault.py
Normal file
169
tests/pytests/unit/runners/vault/test_vault.py
Normal file
|
@ -0,0 +1,169 @@
|
|||
"""
|
||||
Unit tests for the Vault runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.vault as vault
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {vault: {}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def grains():
|
||||
return {
|
||||
"id": "test-minion",
|
||||
"roles": ["web", "database"],
|
||||
"aux": ["foo", "bar"],
|
||||
"deep": {"foo": {"bar": {"baz": ["hello", "world"]}}},
|
||||
"mixedcase": "UP-low-UP",
|
||||
}
|
||||
|
||||
|
||||
def test_pattern_list_expander(grains):
|
||||
"""
|
||||
Ensure _expand_pattern_lists works as intended:
|
||||
- Expand list-valued patterns
|
||||
- Do not change non-list-valued tokens
|
||||
"""
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:{minion}"],
|
||||
"single-list:{grains[roles]}": ["single-list:web", "single-list:database"],
|
||||
"multiple-lists:{grains[roles]}+{grains[aux]}": [
|
||||
"multiple-lists:web+foo",
|
||||
"multiple-lists:web+bar",
|
||||
"multiple-lists:database+foo",
|
||||
"multiple-lists:database+bar",
|
||||
],
|
||||
"single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}": [
|
||||
"single-list-with-dicts:{grains[id]}+web+{grains[id]}",
|
||||
"single-list-with-dicts:{grains[id]}+database+{grains[id]}",
|
||||
],
|
||||
"deeply-nested-list:{grains[deep][foo][bar][baz]}": [
|
||||
"deeply-nested-list:hello",
|
||||
"deeply-nested-list:world",
|
||||
],
|
||||
}
|
||||
|
||||
# The mappings dict is assembled in _get_policies, so emulate here
|
||||
mappings = {"minion": grains["id"], "grains": grains}
|
||||
for case, correct_output in cases.items():
|
||||
output = vault._expand_pattern_lists(
|
||||
case, **mappings
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
assert output == correct_output
|
||||
|
||||
|
||||
def test_get_policies_for_nonexisting_minions():
|
||||
minion_id = "salt_master"
|
||||
# For non-existing minions, or the master-minion, grains will be None
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:{}".format(minion_id)],
|
||||
"single-list:{grains[roles]}": [],
|
||||
}
|
||||
with patch(
|
||||
"salt.utils.minions.get_minion_data",
|
||||
MagicMock(return_value=(None, None, None)),
|
||||
):
|
||||
for case, correct_output in cases.items():
|
||||
test_config = {"policies": [case]}
|
||||
output = vault._get_policies(
|
||||
minion_id, test_config
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
assert output == correct_output
|
||||
|
||||
|
||||
def test_get_policies(grains):
|
||||
"""
|
||||
Ensure _get_policies works as intended, including expansion of lists
|
||||
"""
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:test-minion"],
|
||||
"single-list:{grains[roles]}": ["single-list:web", "single-list:database"],
|
||||
"multiple-lists:{grains[roles]}+{grains[aux]}": [
|
||||
"multiple-lists:web+foo",
|
||||
"multiple-lists:web+bar",
|
||||
"multiple-lists:database+foo",
|
||||
"multiple-lists:database+bar",
|
||||
],
|
||||
"single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}": [
|
||||
"single-list-with-dicts:test-minion+web+test-minion",
|
||||
"single-list-with-dicts:test-minion+database+test-minion",
|
||||
],
|
||||
"deeply-nested-list:{grains[deep][foo][bar][baz]}": [
|
||||
"deeply-nested-list:hello",
|
||||
"deeply-nested-list:world",
|
||||
],
|
||||
"should-not-cause-an-exception,but-result-empty:{foo}": [],
|
||||
"Case-Should-Be-Lowered:{grains[mixedcase]}": [
|
||||
"case-should-be-lowered:up-low-up"
|
||||
],
|
||||
}
|
||||
|
||||
with patch(
|
||||
"salt.utils.minions.get_minion_data",
|
||||
MagicMock(return_value=(None, grains, None)),
|
||||
):
|
||||
for case, correct_output in cases.items():
|
||||
test_config = {"policies": [case]}
|
||||
output = vault._get_policies(
|
||||
"test-minion", test_config
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
assert output == correct_output
|
||||
|
||||
|
||||
def test_get_token_create_url():
|
||||
"""
|
||||
Ensure _get_token_create_url parses config correctly
|
||||
"""
|
||||
assert (
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "http://127.0.0.1"}
|
||||
)
|
||||
== "http://127.0.0.1/v1/auth/token/create"
|
||||
)
|
||||
assert (
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "https://127.0.0.1/"}
|
||||
)
|
||||
== "https://127.0.0.1/v1/auth/token/create"
|
||||
)
|
||||
assert (
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "http://127.0.0.1:8200", "role_name": "therole"}
|
||||
)
|
||||
== "http://127.0.0.1:8200/v1/auth/token/create/therole"
|
||||
)
|
||||
assert (
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "https://127.0.0.1/test", "role_name": "therole"}
|
||||
)
|
||||
== "https://127.0.0.1/test/v1/auth/token/create/therole"
|
||||
)
|
|
@ -1,106 +0,0 @@
|
|||
"""
|
||||
tests.unit.runners.test_asam
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Unit tests for the asam runner
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
import salt.runners.asam as asam
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AsamRunnerVerifySslTest(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
opts = {
|
||||
"asam": {
|
||||
"prov1.domain.com": {
|
||||
"username": "TheUsername",
|
||||
"password": "ThePassword",
|
||||
}
|
||||
}
|
||||
}
|
||||
return {asam: {"__opts__": opts}}
|
||||
|
||||
def test_add_platform(self):
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_set_name = MagicMock(return_value="plat-foo")
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platformset_name", get_platform_set_name
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.add_platform("plat-foo-2", "plat-foo", "prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformSetConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
def test_remove_platform(self):
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_set_name = MagicMock(return_value="plat-foo")
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platformset_name", get_platform_set_name
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.remove_platform("plat-foo", "prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={
|
||||
"manual": "false",
|
||||
"platformName": "plat-foo",
|
||||
"platformSetName": "plat-foo",
|
||||
"postType": "platformRemove",
|
||||
"Submit": "Yes",
|
||||
},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
def test_list_platforms(self):
|
||||
parse_html_content = MagicMock()
|
||||
get_platforms = MagicMock(return_value=["plat-foo", "plat-bar"])
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platforms", get_platforms
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.list_platforms("prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
||||
|
||||
def test_list_platform_sets(self):
|
||||
parse_html_content = MagicMock()
|
||||
get_platform_sets = MagicMock(return_value=["plat-foo", "plat-bar"])
|
||||
requests_mock = MagicMock()
|
||||
|
||||
# remove_platform
|
||||
with patch("salt.runners.asam._parse_html_content", parse_html_content), patch(
|
||||
"salt.runners.asam._get_platforms", get_platform_sets
|
||||
), patch("salt.runners.asam.requests.post", requests_mock):
|
||||
asam.list_platform_sets("prov1.domain.com")
|
||||
|
||||
requests_mock.assert_called_with(
|
||||
"https://prov1.domain.com:3451/config/PlatformSetConfig.html",
|
||||
auth=("TheUsername", "ThePassword"),
|
||||
data={"manual": "false"},
|
||||
verify=True,
|
||||
)
|
|
@ -1,26 +0,0 @@
|
|||
import salt.runners.bgp as bgp
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
|
||||
@skipIf(not bgp.HAS_NAPALM, "napalm module required for this test")
|
||||
class BGPTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test the bgp runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
bgp: {
|
||||
"__opts__": {
|
||||
"optimization_order": [0, 1, 2],
|
||||
"renderer": "yaml",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_neighbors(self):
|
||||
ret = bgp.neighbors()
|
||||
self.assertEqual(ret, [])
|
|
@ -1,48 +0,0 @@
|
|||
"""
|
||||
unit tests for the cache runner
|
||||
"""
|
||||
|
||||
|
||||
import salt.runners.cache as cache
|
||||
import salt.utils.master
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import patch
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class CacheTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the cache runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
cache: {
|
||||
"__opts__": {
|
||||
"cache": "localfs",
|
||||
"pki_dir": RUNTIME_VARS.TMP,
|
||||
"key_cache": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_grains(self):
|
||||
"""
|
||||
test cache.grains runner
|
||||
"""
|
||||
mock_minion = ["Larry"]
|
||||
mock_ret = {}
|
||||
self.assertEqual(cache.grains(tgt="*", minion=mock_minion), mock_ret)
|
||||
|
||||
mock_data = "grain stuff"
|
||||
|
||||
class MockMaster:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def get_minion_grains(self):
|
||||
return mock_data
|
||||
|
||||
with patch.object(salt.utils.master, "MasterPillarUtil", MockMaster):
|
||||
self.assertEqual(cache.grains(tgt="*"), mock_data)
|
|
@ -1,149 +0,0 @@
|
|||
"""
|
||||
unit tests for the fileserver runner
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import salt.loader
|
||||
import salt.runners.fileserver as fileserver
|
||||
import salt.utils.files
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class DummyFS:
|
||||
"""
|
||||
Dummy object to provide the attributes needed to run unit tests
|
||||
"""
|
||||
|
||||
def __init__(self, backends):
|
||||
self.backends = backends
|
||||
|
||||
def keys(self):
|
||||
return ["{}.envs".format(x) for x in self.backends]
|
||||
|
||||
|
||||
class FileserverTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the cache runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {fileserver: {"__opts__": {"extension_modules": ""}}}
|
||||
|
||||
def _make_file_lists_cache(self, cachedir, backends):
|
||||
"""
|
||||
Create some dummy files to represent file list caches, as well as other
|
||||
files that aren't file list caches, so that we can confirm that *only*
|
||||
the cache files are touched. Create a dir for each configured backend,
|
||||
as well as for the roots backend (which is *not* configured as a
|
||||
backend in this test), so that we can ensure that its cache is left
|
||||
alone.
|
||||
"""
|
||||
for back in backends:
|
||||
back_cachedir = os.path.join(cachedir, "file_lists", back)
|
||||
# Make file_lists cachedir
|
||||
os.makedirs(os.path.join(back_cachedir))
|
||||
# Touch a couple files
|
||||
for filename in ("base.p", "dev.p", "foo.txt"):
|
||||
with salt.utils.files.fopen(os.path.join(back_cachedir, filename), "w"):
|
||||
pass
|
||||
|
||||
@with_tempdir()
|
||||
def test_clear_file_list_cache_vcs(self, cachedir):
|
||||
"""
|
||||
Test that VCS backends are cleared irrespective of whether they are
|
||||
configured as gitfs/git, hgfs/hg, svnfs/svn.
|
||||
"""
|
||||
# Mixture of VCS backends specified with and without "fs" at the end,
|
||||
# to confirm that the correct dirs are cleared.
|
||||
backends = ["gitfs", "hg", "svnfs"]
|
||||
opts = {
|
||||
"fileserver_backend": backends,
|
||||
"cachedir": cachedir,
|
||||
}
|
||||
mock_fs = DummyFS(backends)
|
||||
|
||||
self._make_file_lists_cache(cachedir, backends + ["roots"])
|
||||
|
||||
with patch.dict(fileserver.__opts__, opts), patch.object(
|
||||
salt.loader, "fileserver", MagicMock(return_value=mock_fs)
|
||||
):
|
||||
cleared = fileserver.clear_file_list_cache()
|
||||
|
||||
# Make sure the return data matches what you'd expect
|
||||
expected = {
|
||||
"gitfs": ["base", "dev"],
|
||||
"hg": ["base", "dev"],
|
||||
"svnfs": ["base", "dev"],
|
||||
}
|
||||
assert cleared == expected, cleared
|
||||
|
||||
# Trust, but verify! Check that the correct files are actually gone
|
||||
assert not os.path.exists(
|
||||
os.path.join(cachedir, "file_lists", "gitfs", "base.p")
|
||||
)
|
||||
assert not os.path.exists(
|
||||
os.path.join(cachedir, "file_lists", "gitfs", "dev.p")
|
||||
)
|
||||
assert not os.path.exists(os.path.join(cachedir, "file_lists", "hg", "base.p"))
|
||||
assert not os.path.exists(
|
||||
os.path.join(cachedir, "file_lists", "gitfs", "dev.p")
|
||||
)
|
||||
assert not os.path.exists(os.path.join(cachedir, "file_lists", "hg", "base.p"))
|
||||
assert not os.path.exists(
|
||||
os.path.join(cachedir, "file_lists", "svnfs", "dev.p")
|
||||
)
|
||||
|
||||
# These files *should* exist and shouldn't have been cleaned
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "gitfs", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "hg", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "svnfs", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "base.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "dev.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "foo.txt"))
|
||||
|
||||
@with_tempdir()
|
||||
def test_clear_file_list_cache_vcs_limited(self, cachedir):
|
||||
"""
|
||||
Test the arguments to limit what is cleared
|
||||
"""
|
||||
# Mixture of VCS backends specified with and without "fs" at the end,
|
||||
# to confirm that the correct dirs are cleared.
|
||||
backends = ["gitfs", "hg", "svnfs"]
|
||||
opts = {
|
||||
"fileserver_backend": backends,
|
||||
"cachedir": cachedir,
|
||||
}
|
||||
mock_fs = DummyFS(backends)
|
||||
|
||||
self._make_file_lists_cache(cachedir, backends + ["roots"])
|
||||
|
||||
with patch.dict(fileserver.__opts__, opts), patch.object(
|
||||
salt.loader, "fileserver", MagicMock(return_value=mock_fs)
|
||||
):
|
||||
cleared = fileserver.clear_file_list_cache(saltenv="base", backend="gitfs")
|
||||
|
||||
expected = {"gitfs": ["base"]}
|
||||
assert cleared == expected, cleared
|
||||
|
||||
# Trust, but verify! Check that the correct files are actually gone
|
||||
assert not os.path.exists(
|
||||
os.path.join(cachedir, "file_lists", "gitfs", "base.p")
|
||||
)
|
||||
|
||||
# These files *should* exist and shouldn't have been cleaned
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "gitfs", "dev.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "gitfs", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "hg", "base.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "hg", "dev.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "hg", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "svnfs", "base.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "svnfs", "dev.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "svnfs", "foo.txt"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "base.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "dev.p"))
|
||||
assert os.path.exists(os.path.join(cachedir, "file_lists", "roots", "foo.txt"))
|
|
@ -1,86 +0,0 @@
|
|||
"""
|
||||
unit tests for the git_pillar runner
|
||||
"""
|
||||
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import tempfile
|
||||
|
||||
import salt.runners.git_pillar as git_pillar
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
from tests.support.gitfs import _OPTS
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import patch
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GitPillarTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the git_pillar runner
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.tmp_cachedir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
try:
|
||||
salt.utils.files.rm_rf(cls.tmp_cachedir)
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EACCES:
|
||||
log.error("Access error removing file %s", cls.tmp_cachedir)
|
||||
elif exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
def setup_loader_modules(self):
|
||||
opts = _OPTS.copy()
|
||||
opts["cachedir"] = self.tmp_cachedir
|
||||
opts["verified_git_pillar_provider"] = "gitfoo"
|
||||
opts["ext_pillar"] = [
|
||||
{
|
||||
"git": [
|
||||
"master https://someurl/some",
|
||||
{"dev https://otherurl/other": [{"name": "somename"}]},
|
||||
]
|
||||
}
|
||||
]
|
||||
return {git_pillar: {"__opts__": opts}}
|
||||
|
||||
def test_update(self):
|
||||
"""
|
||||
test git_pillar.update
|
||||
"""
|
||||
|
||||
class MockGitProvider(
|
||||
salt.utils.gitfs.GitProvider
|
||||
): # pylint: disable=abstract-method
|
||||
def init_remote(self):
|
||||
new = False
|
||||
self.repo = True
|
||||
return new
|
||||
|
||||
def fetch(self):
|
||||
return True
|
||||
|
||||
def clear_lock(self, lock_type="update"):
|
||||
pass # return success, failed
|
||||
|
||||
git_providers = {"gitfoo": MockGitProvider}
|
||||
|
||||
repo1 = {"master https://someurl/some": True}
|
||||
repo2 = {"dev https://otherurl/other": True}
|
||||
all_repos = {
|
||||
"master https://someurl/some": True,
|
||||
"dev https://otherurl/other": True,
|
||||
}
|
||||
with patch.object(salt.utils.gitfs, "GIT_PROVIDERS", git_providers):
|
||||
self.assertEqual(git_pillar.update(), all_repos)
|
||||
self.assertEqual(git_pillar.update(branch="master"), repo1)
|
||||
self.assertEqual(git_pillar.update(branch="dev"), repo2)
|
||||
self.assertEqual(git_pillar.update(repo="somename"), repo2)
|
|
@ -1,84 +0,0 @@
|
|||
"""
|
||||
unit tests for the jobs runner
|
||||
"""
|
||||
|
||||
|
||||
import salt.minion
|
||||
import salt.runners.jobs as jobs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class JobsTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the jobs runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
jobs: {
|
||||
"__opts__": {"ext_job_cache": None, "master_job_cache": "local_cache"}
|
||||
}
|
||||
}
|
||||
|
||||
def test_list_jobs_with_search_target(self):
|
||||
"""
|
||||
test jobs.list_jobs runner with search_target args
|
||||
"""
|
||||
mock_jobs_cache = {
|
||||
"20160524035503086853": {
|
||||
"Arguments": [],
|
||||
"Function": "test.ping",
|
||||
"StartTime": "2016, May 24 03:55:03.086853",
|
||||
"Target": "node-1-1.com",
|
||||
"Target-type": "glob",
|
||||
"User": "root",
|
||||
},
|
||||
"20160524035524895387": {
|
||||
"Arguments": [],
|
||||
"Function": "test.ping",
|
||||
"StartTime": "2016, May 24 03:55:24.895387",
|
||||
"Target": ["node-1-2.com", "node-1-1.com"],
|
||||
"Target-type": "list",
|
||||
"User": "sudo_ubuntu",
|
||||
},
|
||||
}
|
||||
|
||||
def return_mock_jobs():
|
||||
return mock_jobs_cache
|
||||
|
||||
class MockMasterMinion:
|
||||
|
||||
returners = {"local_cache.get_jids": return_mock_jobs}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
returns = {
|
||||
"all": mock_jobs_cache,
|
||||
"node-1-1.com": mock_jobs_cache,
|
||||
"node-1-2.com": {
|
||||
"20160524035524895387": mock_jobs_cache["20160524035524895387"]
|
||||
},
|
||||
"non-existant": {},
|
||||
}
|
||||
|
||||
with patch.object(salt.minion, "MasterMinion", MockMasterMinion):
|
||||
self.assertEqual(jobs.list_jobs(), returns["all"])
|
||||
|
||||
self.assertEqual(
|
||||
jobs.list_jobs(search_target=["node-1-1*", "node-1-2*"]), returns["all"]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
jobs.list_jobs(search_target="node-1-1.com"), returns["node-1-1.com"]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
jobs.list_jobs(search_target="node-1-2.com"), returns["node-1-2.com"]
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
jobs.list_jobs(search_target="non-existant"), returns["non-existant"]
|
||||
)
|
|
@ -1,19 +0,0 @@
|
|||
from salt.runners import manage
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class ManageTest(TestCase):
|
||||
def test_deprecation_58638(self):
|
||||
# check that type error will be raised
|
||||
self.assertRaises(TypeError, manage.list_state, show_ipv4="data")
|
||||
|
||||
# check that show_ipv4 will raise an error
|
||||
try:
|
||||
manage.list_state( # pylint: disable=unexpected-keyword-arg
|
||||
show_ipv4="data"
|
||||
)
|
||||
except TypeError as no_show_ipv4:
|
||||
self.assertEqual(
|
||||
str(no_show_ipv4),
|
||||
"list_state() got an unexpected keyword argument 'show_ipv4'",
|
||||
)
|
|
@ -1,56 +0,0 @@
|
|||
import shutil
|
||||
import tempfile
|
||||
|
||||
import salt.runners.net as net
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
|
||||
@skipIf(not net.HAS_NAPALM, "napalm module required for this test")
|
||||
class NetTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test the net runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
mock_get = MagicMock(return_value={})
|
||||
self.extmods_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)
|
||||
return {
|
||||
net: {
|
||||
"__opts__": {
|
||||
"optimization_order": [0, 1, 2],
|
||||
"renderer": "yaml",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
"extension_modules": self.extmods_dir,
|
||||
},
|
||||
"__salt__": {"mine.get": mock_get},
|
||||
}
|
||||
}
|
||||
|
||||
def test_interfaces(self):
|
||||
ret = net.interfaces()
|
||||
self.assertEqual(None, ret)
|
||||
|
||||
def test_findarp(self):
|
||||
ret = net.findarp()
|
||||
self.assertEqual(None, ret)
|
||||
|
||||
def test_findmac(self):
|
||||
ret = net.findmac()
|
||||
self.assertEqual(None, ret)
|
||||
|
||||
def test_lldp(self):
|
||||
ret = net.lldp()
|
||||
self.assertEqual(None, ret)
|
||||
|
||||
def test_find(self):
|
||||
ret = net.find("")
|
||||
self.assertEqual({}, ret)
|
||||
|
||||
def test_multi_find(self):
|
||||
ret = net.multi_find()
|
||||
self.assertEqual(None, ret)
|
|
@ -1,57 +0,0 @@
|
|||
"""
|
||||
unit tests for the cache runner
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.runners.queue as queue_mod
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class QueueTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the queue runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
queue_mod: {
|
||||
"__opts__": {
|
||||
"sock_dir": os.path.join(RUNTIME_VARS.TMP, "queue-runner-sock-dir"),
|
||||
"transport": "zeromq",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_insert_runner(self):
|
||||
queue_insert = MagicMock(return_value=True)
|
||||
with patch.object(queue_mod, "insert", queue_insert):
|
||||
queue_mod.insert_runner("test.stdout_print", queue="salt")
|
||||
expected_call = {
|
||||
"queue": "salt",
|
||||
"items": {"fun": "test.stdout_print", "args": [], "kwargs": {}},
|
||||
"backend": "pgjsonb",
|
||||
}
|
||||
queue_insert.assert_called_once_with(**expected_call)
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_process_runner(self):
|
||||
ret = [{"fun": "test.stdout_print", "args": [], "kwargs": {}}]
|
||||
|
||||
queue_pop = MagicMock(return_value=ret)
|
||||
test_stdout_print = MagicMock(return_value=True)
|
||||
with patch.dict(queue_mod.__salt__, {"test.stdout_print": test_stdout_print}):
|
||||
with patch.object(queue_mod, "pop", queue_pop):
|
||||
queue_mod.process_runner(queue="salt")
|
||||
queue_pop.assert_called_once_with(
|
||||
is_runner=True, queue="salt", quantity=1, backend="pgjsonb"
|
||||
)
|
||||
test_stdout_print.assert_called_once_with()
|
||||
queue_pop.assert_called_once_with(
|
||||
is_runner=True, queue="salt", quantity=1, backend="pgjsonb"
|
||||
)
|
|
@ -1,312 +0,0 @@
|
|||
"""
|
||||
unit tests for the reactor runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import salt.runners.reactor as reactor
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.utils.event import SaltEvent
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MockEvent:
|
||||
"""
|
||||
Mock event class
|
||||
"""
|
||||
|
||||
flag = None
|
||||
|
||||
def __init__(self):
|
||||
self.full = None
|
||||
|
||||
def get_event(self, wait, tag):
|
||||
"""
|
||||
Mock get_event method
|
||||
"""
|
||||
data = []
|
||||
return {"tag": tag, "data": data}
|
||||
|
||||
def fire_event(self, data, tag):
|
||||
"""
|
||||
Mock get_event method
|
||||
"""
|
||||
return {"tag": tag, "data": data}
|
||||
|
||||
|
||||
class ReactorTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Validate the reactor runner
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
reactor: {
|
||||
"__opts__": {
|
||||
"reactor": [],
|
||||
"engines": [],
|
||||
"id": "master_id",
|
||||
"sock_dir": "/var/run/salt/master",
|
||||
"transport": "zeromq",
|
||||
},
|
||||
"__jid_event__": MockEvent(),
|
||||
}
|
||||
}
|
||||
|
||||
def test_list(self):
|
||||
"""
|
||||
test reactor.list runner
|
||||
"""
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.list_()
|
||||
self.assertEqual(excinfo.exception.strerror, "Reactor system is not running.")
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.list_()
|
||||
self.assertEqual(
|
||||
excinfo.exception.strerror, "Reactor system is not running."
|
||||
)
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
self.assertIn({"test_event/*": ["/srv/reactors/reactor.sls"]}, ret)
|
||||
|
||||
event_returns = {
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
assert ret is None
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["reactor"] = [{"test_event/*": ["/srv/reactors/reactor.sls"]}]
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T16:51:52.577711",
|
||||
}
|
||||
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.list_()
|
||||
self.assertIn({"test_event/*": ["/srv/reactors/reactor.sls"]}, ret)
|
||||
|
||||
def test_add(self):
|
||||
"""
|
||||
test reactor.add runner
|
||||
"""
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.add(
|
||||
"salt/cloud/*/destroyed", reactors="/srv/reactor/destroy/*.sls"
|
||||
)
|
||||
self.assertEqual(excinfo.exception.strerror, "Reactor system is not running.")
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.add(
|
||||
"salt/cloud/*/destroyed", reactors="/srv/reactor/destroy/*.sls"
|
||||
)
|
||||
self.assertEqual(
|
||||
excinfo.exception.strerror, "Reactor system is not running."
|
||||
)
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"result": {"status": True, "comment": "Reactor added."},
|
||||
"_stamp": "2020-09-04T17:45:33.206408",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.add("test_event/*", "/srv/reactor/reactor.sls")
|
||||
self.assertIn("status", ret)
|
||||
self.assertTrue(ret["status"])
|
||||
self.assertEqual("Reactor added.", ret["comment"])
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"test_event/*": ["/srv/reactors/reactor.sls"]}],
|
||||
"_stamp": "2020-09-04T17:45:33.206408",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.add("test_event/*", "/srv/reactor/reactor.sls")
|
||||
assert ret is None
|
||||
|
||||
def test_delete(self):
|
||||
"""
|
||||
test reactor.delete runner
|
||||
"""
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.delete("salt/cloud/*/destroyed")
|
||||
self.assertEqual(excinfo.exception.strerror, "Reactor system is not running.")
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.delete("salt/cloud/*/destroyed")
|
||||
self.assertEqual(
|
||||
excinfo.exception.strerror, "Reactor system is not running."
|
||||
)
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"bot/*": ["/srv/reactors/bot.sls"]}],
|
||||
"result": {"status": True, "comment": "Reactor deleted."},
|
||||
"_stamp": "2020-09-04T18:15:41.586552",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.delete("test_event/*")
|
||||
self.assertIn("status", ret)
|
||||
self.assertTrue(ret["status"])
|
||||
self.assertEqual("Reactor deleted.", ret["comment"])
|
||||
|
||||
event_returns = {
|
||||
"reactors": [{"bot/*": ["/srv/reactors/bot.sls"]}],
|
||||
"_stamp": "2020-09-04T18:15:41.586552",
|
||||
}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.delete("test_event/*")
|
||||
assert ret is None
|
||||
|
||||
def test_is_leader(self):
|
||||
"""
|
||||
test reactor.is_leader runner
|
||||
"""
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.is_leader()
|
||||
self.assertEqual(excinfo.exception.strerror, "Reactor system is not running.")
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.is_leader()
|
||||
self.assertEqual(
|
||||
excinfo.exception.strerror, "Reactor system is not running."
|
||||
)
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {"result": True, "_stamp": "2020-09-04T18:32:10.004490"}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.is_leader()
|
||||
self.assertTrue(ret)
|
||||
|
||||
def test_set_leader(self):
|
||||
"""
|
||||
test reactor.set_leader runner
|
||||
"""
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.set_leader()
|
||||
self.assertEqual(excinfo.exception.strerror, "Reactor system is not running.")
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts = {"engines": []}
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with self.assertRaises(CommandExecutionError) as excinfo:
|
||||
ret = reactor.set_leader()
|
||||
self.assertEqual(
|
||||
excinfo.exception.strerror, "Reactor system is not running."
|
||||
)
|
||||
|
||||
mock_opts = {}
|
||||
mock_opts["engines"] = [
|
||||
{
|
||||
"reactor": {
|
||||
"refresh_interval": 60,
|
||||
"worker_threads": 10,
|
||||
"worker_hwm": 10000,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
event_returns = {"result": True, "_stamp": "2020-09-04T18:32:10.004490"}
|
||||
|
||||
with patch.dict(reactor.__opts__, mock_opts):
|
||||
with patch.object(SaltEvent, "get_event", return_value=event_returns):
|
||||
with patch("salt.utils.master.get_master_key") as get_master_key:
|
||||
get_master_key.retun_value = MagicMock(retun_value="master_key")
|
||||
ret = reactor.set_leader()
|
||||
self.assertTrue(ret)
|
|
@ -1,366 +0,0 @@
|
|||
"""
|
||||
Unit tests for the Vault runner
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
import salt.runners.vault as vault
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import ANY, MagicMock, Mock, call, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VaultTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for the runner module of the Vault integration
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {vault: {}}
|
||||
|
||||
def setUp(self):
|
||||
self.grains = {
|
||||
"id": "test-minion",
|
||||
"roles": ["web", "database"],
|
||||
"aux": ["foo", "bar"],
|
||||
"deep": {"foo": {"bar": {"baz": ["hello", "world"]}}},
|
||||
"mixedcase": "UP-low-UP",
|
||||
}
|
||||
|
||||
def tearDown(self):
|
||||
del self.grains
|
||||
|
||||
def test_pattern_list_expander(self):
|
||||
"""
|
||||
Ensure _expand_pattern_lists works as intended:
|
||||
- Expand list-valued patterns
|
||||
- Do not change non-list-valued tokens
|
||||
"""
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:{minion}"],
|
||||
"single-list:{grains[roles]}": ["single-list:web", "single-list:database"],
|
||||
"multiple-lists:{grains[roles]}+{grains[aux]}": [
|
||||
"multiple-lists:web+foo",
|
||||
"multiple-lists:web+bar",
|
||||
"multiple-lists:database+foo",
|
||||
"multiple-lists:database+bar",
|
||||
],
|
||||
"single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}": [
|
||||
"single-list-with-dicts:{grains[id]}+web+{grains[id]}",
|
||||
"single-list-with-dicts:{grains[id]}+database+{grains[id]}",
|
||||
],
|
||||
"deeply-nested-list:{grains[deep][foo][bar][baz]}": [
|
||||
"deeply-nested-list:hello",
|
||||
"deeply-nested-list:world",
|
||||
],
|
||||
}
|
||||
|
||||
# The mappings dict is assembled in _get_policies, so emulate here
|
||||
mappings = {"minion": self.grains["id"], "grains": self.grains}
|
||||
for case, correct_output in cases.items():
|
||||
output = vault._expand_pattern_lists(
|
||||
case, **mappings
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
self.assertEqual(output, correct_output)
|
||||
|
||||
def test_get_policies_for_nonexisting_minions(self):
|
||||
minion_id = "salt_master"
|
||||
# For non-existing minions, or the master-minion, grains will be None
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:{}".format(minion_id)],
|
||||
"single-list:{grains[roles]}": [],
|
||||
}
|
||||
with patch(
|
||||
"salt.utils.minions.get_minion_data",
|
||||
MagicMock(return_value=(None, None, None)),
|
||||
):
|
||||
for case, correct_output in cases.items():
|
||||
test_config = {"policies": [case]}
|
||||
output = vault._get_policies(
|
||||
minion_id, test_config
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
self.assertEqual(output, correct_output)
|
||||
|
||||
def test_get_policies(self):
|
||||
"""
|
||||
Ensure _get_policies works as intended, including expansion of lists
|
||||
"""
|
||||
cases = {
|
||||
"no-tokens-to-replace": ["no-tokens-to-replace"],
|
||||
"single-dict:{minion}": ["single-dict:test-minion"],
|
||||
"single-list:{grains[roles]}": ["single-list:web", "single-list:database"],
|
||||
"multiple-lists:{grains[roles]}+{grains[aux]}": [
|
||||
"multiple-lists:web+foo",
|
||||
"multiple-lists:web+bar",
|
||||
"multiple-lists:database+foo",
|
||||
"multiple-lists:database+bar",
|
||||
],
|
||||
"single-list-with-dicts:{grains[id]}+{grains[roles]}+{grains[id]}": [
|
||||
"single-list-with-dicts:test-minion+web+test-minion",
|
||||
"single-list-with-dicts:test-minion+database+test-minion",
|
||||
],
|
||||
"deeply-nested-list:{grains[deep][foo][bar][baz]}": [
|
||||
"deeply-nested-list:hello",
|
||||
"deeply-nested-list:world",
|
||||
],
|
||||
"should-not-cause-an-exception,but-result-empty:{foo}": [],
|
||||
"Case-Should-Be-Lowered:{grains[mixedcase]}": [
|
||||
"case-should-be-lowered:up-low-up"
|
||||
],
|
||||
}
|
||||
|
||||
with patch(
|
||||
"salt.utils.minions.get_minion_data",
|
||||
MagicMock(return_value=(None, self.grains, None)),
|
||||
):
|
||||
for case, correct_output in cases.items():
|
||||
test_config = {"policies": [case]}
|
||||
output = vault._get_policies(
|
||||
"test-minion", test_config
|
||||
) # pylint: disable=protected-access
|
||||
diff = set(output).symmetric_difference(set(correct_output))
|
||||
if diff:
|
||||
log.debug("Test %s failed", case)
|
||||
log.debug("Expected:\n\t%s\nGot\n\t%s", output, correct_output)
|
||||
log.debug("Difference:\n\t%s", diff)
|
||||
self.assertEqual(output, correct_output)
|
||||
|
||||
def test_get_token_create_url(self):
|
||||
"""
|
||||
Ensure _get_token_create_url parses config correctly
|
||||
"""
|
||||
self.assertEqual(
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "http://127.0.0.1"}
|
||||
),
|
||||
"http://127.0.0.1/v1/auth/token/create",
|
||||
)
|
||||
self.assertEqual(
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "https://127.0.0.1/"}
|
||||
),
|
||||
"https://127.0.0.1/v1/auth/token/create",
|
||||
)
|
||||
self.assertEqual(
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "http://127.0.0.1:8200", "role_name": "therole"}
|
||||
),
|
||||
"http://127.0.0.1:8200/v1/auth/token/create/therole",
|
||||
)
|
||||
self.assertEqual(
|
||||
vault._get_token_create_url( # pylint: disable=protected-access
|
||||
{"url": "https://127.0.0.1/test", "role_name": "therole"}
|
||||
),
|
||||
"https://127.0.0.1/test/v1/auth/token/create/therole",
|
||||
)
|
||||
|
||||
|
||||
def _mock_json_response(data, status_code=200, reason=""):
|
||||
"""
|
||||
Mock helper for http response
|
||||
"""
|
||||
response = MagicMock()
|
||||
response.json = MagicMock(return_value=data)
|
||||
response.status_code = status_code
|
||||
response.reason = reason
|
||||
return Mock(return_value=response)
|
||||
|
||||
|
||||
class VaultTokenAuthTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for the runner module of the Vault with token setup
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
vault: {
|
||||
"__opts__": {
|
||||
"vault": {
|
||||
"url": "http://127.0.0.1",
|
||||
"auth": {
|
||||
"token": "test",
|
||||
"method": "token",
|
||||
"allow_minion_override": True,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@patch("salt.runners.vault._validate_signature", MagicMock(return_value=None))
|
||||
@patch(
|
||||
"salt.runners.vault._get_token_create_url",
|
||||
MagicMock(return_value="http://fake_url"),
|
||||
)
|
||||
def test_generate_token(self):
|
||||
"""
|
||||
Basic tests for test_generate_token: all exits
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertFalse("error" in result)
|
||||
self.assertTrue("token" in result)
|
||||
self.assertEqual(result["token"], "test")
|
||||
mock.assert_called_with(
|
||||
"http://fake_url", headers=ANY, json=ANY, verify=ANY
|
||||
)
|
||||
|
||||
# Test uses
|
||||
num_uses = 6
|
||||
result = vault.generate_token("test-minion", "signature", uses=num_uses)
|
||||
self.assertTrue("uses" in result)
|
||||
self.assertEqual(result["uses"], num_uses)
|
||||
json_request = {
|
||||
"policies": ["saltstack/minion/test-minion", "saltstack/minions"],
|
||||
"num_uses": num_uses,
|
||||
"meta": {
|
||||
"saltstack-jid": "<no jid set>",
|
||||
"saltstack-minion": "test-minion",
|
||||
"saltstack-user": "<no user set>",
|
||||
},
|
||||
}
|
||||
mock.assert_called_with(
|
||||
"http://fake_url", headers=ANY, json=json_request, verify=ANY
|
||||
)
|
||||
|
||||
# Test ttl
|
||||
expected_ttl = "6h"
|
||||
result = vault.generate_token("test-minion", "signature", ttl=expected_ttl)
|
||||
self.assertTrue(result["uses"] == 1)
|
||||
json_request = {
|
||||
"policies": ["saltstack/minion/test-minion", "saltstack/minions"],
|
||||
"num_uses": 1,
|
||||
"explicit_max_ttl": expected_ttl,
|
||||
"meta": {
|
||||
"saltstack-jid": "<no jid set>",
|
||||
"saltstack-minion": "test-minion",
|
||||
"saltstack-user": "<no user set>",
|
||||
},
|
||||
}
|
||||
mock.assert_called_with(
|
||||
"http://fake_url", headers=ANY, json=json_request, verify=ANY
|
||||
)
|
||||
|
||||
mock = _mock_json_response({}, status_code=403, reason="no reason")
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertTrue("error" in result)
|
||||
self.assertEqual(result["error"], "no reason")
|
||||
|
||||
with patch("salt.runners.vault._get_policies", MagicMock(return_value=[])):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertTrue("error" in result)
|
||||
self.assertEqual(result["error"], "No policies matched minion")
|
||||
|
||||
with patch(
|
||||
"requests.post", MagicMock(side_effect=Exception("Test Exception Reason"))
|
||||
):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertTrue("error" in result)
|
||||
self.assertEqual(result["error"], "Test Exception Reason")
|
||||
|
||||
@patch("salt.runners.vault._validate_signature", MagicMock(return_value=None))
|
||||
@patch(
|
||||
"salt.runners.vault._get_token_create_url",
|
||||
MagicMock(return_value="http://fake_url"),
|
||||
)
|
||||
def test_generate_token_with_namespace(self):
|
||||
"""
|
||||
Basic tests for test_generate_token: all exits
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
supplied_config = {"namespace": "test_namespace"}
|
||||
with patch("requests.post", mock):
|
||||
with patch.dict(vault.__opts__["vault"], supplied_config):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
self.assertIsInstance(result, dict)
|
||||
self.assertNotIn("error", result)
|
||||
self.assertIn("token", result)
|
||||
self.assertEqual(result["token"], "test")
|
||||
mock.assert_called_with(
|
||||
"http://fake_url",
|
||||
headers={
|
||||
"X-Vault-Token": "test",
|
||||
"X-Vault-Namespace": "test_namespace",
|
||||
},
|
||||
json=ANY,
|
||||
verify=ANY,
|
||||
)
|
||||
|
||||
|
||||
class VaultAppRoleAuthTest(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for the runner module of the Vault with approle setup
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
vault: {
|
||||
"__opts__": {
|
||||
"vault": {
|
||||
"url": "http://127.0.0.1",
|
||||
"auth": {
|
||||
"method": "approle",
|
||||
"role_id": "role",
|
||||
"secret_id": "secret",
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@patch("salt.runners.vault._validate_signature", MagicMock(return_value=None))
|
||||
@patch(
|
||||
"salt.runners.vault._get_token_create_url",
|
||||
MagicMock(return_value="http://fake_url"),
|
||||
)
|
||||
def test_generate_token(self):
|
||||
"""
|
||||
Basic test for test_generate_token with approle (two vault calls)
|
||||
"""
|
||||
mock = _mock_json_response(
|
||||
{"auth": {"client_token": "test", "renewable": False, "lease_duration": 0}}
|
||||
)
|
||||
with patch("requests.post", mock):
|
||||
result = vault.generate_token("test-minion", "signature")
|
||||
log.debug("generate_token result: %s", result)
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
self.assertFalse("error" in result)
|
||||
self.assertTrue("token" in result)
|
||||
self.assertEqual(result["token"], "test")
|
||||
calls = [
|
||||
call(
|
||||
"http://127.0.0.1/v1/auth/approle/login",
|
||||
headers=ANY,
|
||||
json=ANY,
|
||||
verify=ANY,
|
||||
),
|
||||
call("http://fake_url", headers=ANY, json=ANY, verify=ANY),
|
||||
]
|
||||
mock.assert_has_calls(calls)
|
Loading…
Add table
Reference in a new issue