mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
migrate pillar unit tests to pytest (#62745)
This commit is contained in:
parent
b13e585d30
commit
8696b2859a
27 changed files with 3531 additions and 3461 deletions
333
tests/pytests/unit/pillar/test_azureblob.py
Normal file
333
tests/pytests/unit/pillar/test_azureblob.py
Normal file
|
@ -0,0 +1,333 @@
|
|||
"""
|
||||
Tests for the Azure Blob External Pillar.
|
||||
"""
|
||||
|
||||
import pickle
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.pillar.azureblob as azureblob
|
||||
import salt.utils.files
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
HAS_LIBS = False
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
from azure.storage.blob import BlobServiceClient
|
||||
|
||||
# pylint: enable=no-name-in-module
|
||||
|
||||
HAS_LIBS = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
HAS_LIBS is False,
|
||||
reason="The azure.storage.blob module must be installed.",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class MockBlob(dict):
|
||||
"""
|
||||
Creates a Mock Blob object.
|
||||
"""
|
||||
|
||||
name = ""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class MockContainerClient:
|
||||
"""
|
||||
Creates a Mock ContainerClient.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def walk_blobs(self, *args, **kwargs):
|
||||
yield MockBlob()
|
||||
|
||||
def get_blob_client(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class MockBlobServiceClient:
|
||||
"""
|
||||
Creates a Mock BlobServiceClient.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_container_client(self, *args, **kwargs):
|
||||
container_client = MockContainerClient()
|
||||
return container_client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cachedir(tmp_path):
|
||||
dirname = tmp_path / "cachedir"
|
||||
dirname.mkdir(parents=True, exist_ok=True)
|
||||
return dirname
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(cachedir, tmp_path):
|
||||
base_pillar = tmp_path / "base"
|
||||
prod_pillar = tmp_path / "prod"
|
||||
base_pillar.mkdir(parents=True, exist_ok=True)
|
||||
prod_pillar.mkdir(parents=True, exist_ok=True)
|
||||
pillar_roots = {
|
||||
"base": [str(base_pillar)],
|
||||
"prod": [str(prod_pillar)],
|
||||
}
|
||||
opts = {
|
||||
"cachedir": cachedir,
|
||||
"pillar_roots": pillar_roots,
|
||||
}
|
||||
return {
|
||||
azureblob: {"__opts__": opts},
|
||||
}
|
||||
|
||||
|
||||
def test__init_expired(tmp_path):
|
||||
"""
|
||||
Tests the result of _init when the cache is expired.
|
||||
"""
|
||||
container = "test"
|
||||
multiple_env = False
|
||||
environment = "base"
|
||||
blob_cache_expire = 0 # The cache will be expired
|
||||
blob_client = MockBlobServiceClient()
|
||||
cache_file = tmp_path / "cache_file"
|
||||
# Patches the _get_containers_cache_filename module so that it returns the name of the new tempfile that
|
||||
# represents the cache file
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_get_containers_cache_filename",
|
||||
MagicMock(return_value=str(cache_file)),
|
||||
):
|
||||
# Patches the from_connection_string module of the BlobServiceClient class so that a connection string does
|
||||
# not need to be given. Additionally it returns example blob data used by the ext_pillar.
|
||||
with patch.object(
|
||||
BlobServiceClient,
|
||||
"from_connection_string",
|
||||
MagicMock(return_value=blob_client),
|
||||
):
|
||||
ret = azureblob._init(
|
||||
"", container, multiple_env, environment, blob_cache_expire
|
||||
)
|
||||
|
||||
expected = {
|
||||
"base": {
|
||||
"test": [
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test__init_not_expired(tmp_path):
|
||||
"""
|
||||
Tests the result of _init when the cache is not expired.
|
||||
"""
|
||||
container = "test"
|
||||
multiple_env = False
|
||||
environment = "base"
|
||||
blob_cache_expire = (time.time()) * (time.time()) # The cache will not be expired
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
cache_file = tmp_path / "cache_file"
|
||||
# Pickles the metadata and stores it in cache_file
|
||||
with salt.utils.files.fopen(str(cache_file), "wb") as fp_:
|
||||
pickle.dump(metadata, fp_)
|
||||
# Patches the _get_containers_cache_filename module so that it returns the name of the new tempfile that
|
||||
# represents the cache file
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_get_containers_cache_filename",
|
||||
MagicMock(return_value=str(cache_file)),
|
||||
):
|
||||
# Patches the _read_containers_cache_file module so that it returns what it normally would if the new
|
||||
# tempfile representing the cache file was passed to it
|
||||
plugged = azureblob._read_containers_cache_file(str(cache_file))
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_read_containers_cache_file",
|
||||
MagicMock(return_value=plugged),
|
||||
):
|
||||
ret = azureblob._init(
|
||||
"", container, multiple_env, environment, blob_cache_expire
|
||||
)
|
||||
assert ret == metadata
|
||||
|
||||
|
||||
def test__get_cache_dir(cachedir):
|
||||
"""
|
||||
Tests the result of _get_cache_dir.
|
||||
"""
|
||||
ret = azureblob._get_cache_dir()
|
||||
assert ret == str(cachedir / "pillar_azureblob")
|
||||
|
||||
|
||||
def test__get_cached_file_name(cachedir):
|
||||
"""
|
||||
Tests the result of _get_cached_file_name.
|
||||
"""
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._get_cached_file_name(container, saltenv, path)
|
||||
assert ret == str(cachedir / "pillar_azureblob" / saltenv / container / path)
|
||||
|
||||
|
||||
def test__get_containers_cache_filename(cachedir):
|
||||
"""
|
||||
Tests the result of _get_containers_cache_filename.
|
||||
"""
|
||||
container = "test"
|
||||
ret = azureblob._get_containers_cache_filename(container)
|
||||
assert ret == str(cachedir / "pillar_azureblob" / "test-files.cache")
|
||||
|
||||
|
||||
def test__refresh_containers_cache_file(tmp_path):
|
||||
"""
|
||||
Tests the result of _refresh_containers_cache_file to ensure that it successfully copies blob data into a
|
||||
cache file.
|
||||
"""
|
||||
blob_client = MockBlobServiceClient()
|
||||
container = "test"
|
||||
cache_file = tmp_path / "cache_file"
|
||||
with patch.object(
|
||||
BlobServiceClient,
|
||||
"from_connection_string",
|
||||
MagicMock(return_value=blob_client),
|
||||
):
|
||||
ret = azureblob._refresh_containers_cache_file("", container, str(cache_file))
|
||||
expected = {
|
||||
"base": {
|
||||
"test": [
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
assert ret == expected
|
||||
|
||||
|
||||
def test__read_containers_cache_file(tmp_path):
|
||||
"""
|
||||
Tests the result of _read_containers_cache_file to make sure that it successfully loads in pickled metadata.
|
||||
"""
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
cache_file = tmp_path / "cache_file"
|
||||
# Pickles the metadata and stores it in cache_file
|
||||
with salt.utils.files.fopen(str(cache_file), "wb") as fp_:
|
||||
pickle.dump(metadata, fp_)
|
||||
# Checks to see if _read_containers_cache_file can successfully read the pickled metadata from the cache file
|
||||
ret = azureblob._read_containers_cache_file(str(cache_file))
|
||||
assert ret == metadata
|
||||
|
||||
|
||||
def test__find_files():
|
||||
"""
|
||||
Tests the result of _find_files. Ensures it only finds files and not directories. Ensures it also ignore
|
||||
irrelevant files.
|
||||
"""
|
||||
metadata = {
|
||||
"test": [
|
||||
{"name": "base/secret.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
{"name": "base/"},
|
||||
]
|
||||
}
|
||||
ret = azureblob._find_files(metadata)
|
||||
assert ret == {"test": ["base/secret.sls", "blobtest.sls"]}
|
||||
|
||||
|
||||
def test__find_file_meta1():
|
||||
"""
|
||||
Tests the result of _find_file_meta when the metadata contains a blob with the specified path and a blob
|
||||
without the specified path.
|
||||
"""
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
assert ret == {"name": "base/secret.sls", "relevant": "include.sls"}
|
||||
|
||||
|
||||
def test__find_file_meta2():
|
||||
"""
|
||||
Tests the result of _find_file_meta when the saltenv in metadata does not match the specified saltenv.
|
||||
"""
|
||||
metadata = {"wrong": {"test": [{"name": "base/secret.sls"}]}}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test__find_file_meta3():
|
||||
"""
|
||||
Tests the result of _find_file_meta when the container in metadata does not match the specified metadata.
|
||||
"""
|
||||
metadata = {"base": {"wrong": [{"name": "base/secret.sls"}]}}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
assert ret is None
|
164
tests/pytests/unit/pillar/test_consul_pillar.py
Normal file
164
tests/pytests/unit/pillar/test_consul_pillar.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
import pytest
|
||||
|
||||
import salt.pillar.consul_pillar as consul_pillar
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
not consul_pillar.consul, reason="python-consul module not installed"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_pillar_data():
|
||||
return [
|
||||
{
|
||||
"Value": "/path/to/certs/testsite1.crt",
|
||||
"Key": "test-shared/sites/testsite1/ssl/certs/SSLCertificateFile",
|
||||
},
|
||||
{
|
||||
"Value": "/path/to/certs/testsite1.key",
|
||||
"Key": "test-shared/sites/testsite1/ssl/certs/SSLCertificateKeyFile",
|
||||
},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/ssl/certs/"},
|
||||
{"Value": "True", "Key": "test-shared/sites/testsite1/ssl/force"},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/ssl/"},
|
||||
{
|
||||
"Value": "salt://sites/testsite1.tmpl",
|
||||
"Key": "test-shared/sites/testsite1/template",
|
||||
},
|
||||
{"Value": "test.example.com", "Key": "test-shared/sites/testsite1/uri"},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/"},
|
||||
{"Value": None, "Key": "test-shared/sites/"},
|
||||
{"Value": "Test User", "Key": "test-shared/user/full_name"},
|
||||
{"Value": "adm\nwww-data\nmlocate", "Key": "test-shared/user/groups"},
|
||||
{"Value": '"adm\nwww-data\nmlocate"', "Key": "test-shared/user/dontsplit"},
|
||||
{"Value": "yaml:\n key: value\n", "Key": "test-shared/user/dontexpand"},
|
||||
{"Value": None, "Key": "test-shared/user/blankvalue"},
|
||||
{"Value": "test", "Key": "test-shared/user/login"},
|
||||
{"Value": None, "Key": "test-shared/user/"},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
consul_pillar: {
|
||||
"__opts__": {
|
||||
"consul_config": {"consul.port": 8500, "consul.host": "172.17.0.15"}
|
||||
},
|
||||
"get_conn": MagicMock(return_value="consul_connection"),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_connection(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
consul_pillar.get_conn.assert_called_once_with(
|
||||
consul_pillar.__opts__, "consul_config"
|
||||
)
|
||||
|
||||
|
||||
def test_pillar_data(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
consul_pillar.consul_fetch.assert_called_once_with(
|
||||
"consul_connection", "test-shared"
|
||||
)
|
||||
assert sorted(pillar_data) == ["sites", "user"]
|
||||
assert "blankvalue" not in pillar_data["user"]
|
||||
|
||||
|
||||
def test_blank_root(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar("testminion", {}, "consul_config")
|
||||
consul_pillar.consul_fetch.assert_called_once_with("consul_connection", "")
|
||||
assert sorted(pillar_data) == ["test-shared"]
|
||||
|
||||
|
||||
def test_pillar_nest(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion",
|
||||
{},
|
||||
"consul_config pillar_root=nested-key/ root=test-shared/ ",
|
||||
)
|
||||
assert sorted(pillar_data["nested-key"]) == ["sites", "user"]
|
||||
assert "blankvalue" not in pillar_data["nested-key"]["user"]
|
||||
|
||||
|
||||
def test_value_parsing(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
assert isinstance(pillar_data["user"]["dontsplit"], str)
|
||||
|
||||
|
||||
def test_non_expansion(base_pillar_data):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", base_pillar_data)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion",
|
||||
{},
|
||||
"consul_config root=test-shared/ expand_keys=false",
|
||||
)
|
||||
assert isinstance(pillar_data["user"]["dontexpand"], str)
|
||||
|
||||
|
||||
def test_dict_merge():
|
||||
test_dict = {}
|
||||
simple_dict = {"key1": {"key2": "val1"}}
|
||||
with patch.dict(test_dict, simple_dict):
|
||||
assert consul_pillar.dict_merge(test_dict, simple_dict) == simple_dict
|
||||
with patch.dict(test_dict, {"key1": {"key3": {"key4": "value"}}}):
|
||||
assert consul_pillar.dict_merge(test_dict, simple_dict) == {
|
||||
"key1": {"key2": "val1", "key3": {"key4": "value"}}
|
||||
}
|
35
tests/pytests/unit/pillar/test_csvpillar.py
Normal file
35
tests/pytests/unit/pillar/test_csvpillar.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
"""test for pillar csvpillar.py"""
|
||||
|
||||
|
||||
import salt.pillar.csvpillar as csvpillar
|
||||
from tests.support.mock import mock_open, patch
|
||||
|
||||
|
||||
def test_001_load_utf8_csv():
|
||||
fake_csv = "id,foo,bar\r\nminion1,foo1,bar1"
|
||||
fake_dict = {"id": "minion1", "foo": "foo1", "bar": "bar1"}
|
||||
fopen_mock = mock_open(fake_csv)
|
||||
with patch("salt.utils.files.fopen", fopen_mock):
|
||||
result = csvpillar.ext_pillar(
|
||||
mid="minion1",
|
||||
pillar=None,
|
||||
path="/fake/path/file.csv",
|
||||
idkey="id",
|
||||
namespace=None,
|
||||
)
|
||||
assert fake_dict == result
|
||||
|
||||
|
||||
def test_002_load_utf8_csv_namespc():
|
||||
fake_csv = "id,foo,bar\r\nminion1,foo1,bar1"
|
||||
fake_dict = {"baz": {"id": "minion1", "foo": "foo1", "bar": "bar1"}}
|
||||
fopen_mock = mock_open(fake_csv)
|
||||
with patch("salt.utils.files.fopen", fopen_mock):
|
||||
result = csvpillar.ext_pillar(
|
||||
mid="minion1",
|
||||
pillar=None,
|
||||
path="/fake/path/file.csv",
|
||||
idkey="id",
|
||||
namespace="baz",
|
||||
)
|
||||
assert fake_dict == result
|
|
@ -0,0 +1,53 @@
|
|||
import pytest
|
||||
|
||||
from salt.pillar import extra_minion_data_in_pillar
|
||||
from tests.support.mock import MagicMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {extra_minion_data_in_pillar: {}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def extra_minion_data():
|
||||
return {
|
||||
"key1": {"subkey1": "value1"},
|
||||
"key2": {"subkey2": {"subsubkey2": "value2"}},
|
||||
"key3": "value3",
|
||||
"key4": {"subkey4": "value4"},
|
||||
}
|
||||
|
||||
|
||||
def test_extra_values_none_or_empty():
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", MagicMock(), "fake_include", None
|
||||
)
|
||||
assert ret == {}
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", MagicMock(), "fake_include", {}
|
||||
)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
def test_include_all(extra_minion_data):
|
||||
for include_all in ["*", "<all>"]:
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", MagicMock(), include_all, extra_minion_data
|
||||
)
|
||||
assert ret == extra_minion_data
|
||||
|
||||
|
||||
def test_include_specific_keys(extra_minion_data):
|
||||
# Tests partially existing key, key with and without subkey,
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id",
|
||||
MagicMock(),
|
||||
include=["key1:subkey1", "key2:subkey3", "key3", "key4"],
|
||||
extra_minion_data=extra_minion_data,
|
||||
)
|
||||
assert ret == {
|
||||
"key1": {"subkey1": "value1"},
|
||||
"key3": "value3",
|
||||
"key4": {"subkey4": "value4"},
|
||||
}
|
160
tests/pytests/unit/pillar/test_file_tree.py
Normal file
160
tests/pytests/unit/pillar/test_file_tree.py
Normal file
|
@ -0,0 +1,160 @@
|
|||
"""
|
||||
test for pillar file_tree.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.pillar.file_tree as file_tree
|
||||
import salt.utils.files
|
||||
import salt.utils.stringutils
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_id():
|
||||
return "test-host"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_pillar_content():
|
||||
return {"files": {"hostfile": b"base", "groupfile": b"base"}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dev_pillar_content():
|
||||
return {
|
||||
"files": {
|
||||
"hostfile": b"base",
|
||||
"groupfile": b"dev2",
|
||||
"hostfile1": b"dev1",
|
||||
"groupfile1": b"dev1",
|
||||
"hostfile2": b"dev2",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def parent_pillar_content():
|
||||
return {"files": {"hostfile": b"base", "groupfile": b"base", "hostfile2": b"dev2"}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pillar_path(tmp_path):
|
||||
return tmp_path / "file_tree"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(tmp_path, minion_id, pillar_path):
|
||||
cachedir = tmp_path / "cachedir"
|
||||
nodegroup_path = pathlib.Path("nodegroups", "test-group", "files")
|
||||
host_path = pathlib.Path("hosts", minion_id, "files")
|
||||
file_data = {
|
||||
(pillar_path / "base" / host_path / "hostfile"): "base",
|
||||
(pillar_path / "dev1" / host_path / "hostfile1"): "dev1",
|
||||
(pillar_path / "dev2" / host_path / "hostfile2"): "dev2",
|
||||
(pillar_path / "base" / nodegroup_path / "groupfile"): "base",
|
||||
(pillar_path / "dev1" / nodegroup_path / "groupfile1"): "dev1",
|
||||
(pillar_path / "dev2" / nodegroup_path / "groupfile"): "dev2", # test merging
|
||||
}
|
||||
for filename in file_data:
|
||||
filename.parent.mkdir(parents=True, exist_ok=True)
|
||||
filename.write_text(salt.utils.stringutils.to_str(file_data[filename]))
|
||||
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value={"minions": [minion_id], "missing": []}),
|
||||
):
|
||||
yield {
|
||||
file_tree: {
|
||||
"__opts__": {
|
||||
"cachedir": cachedir,
|
||||
"pillar_roots": {
|
||||
"base": [str(pillar_path / "base")],
|
||||
"dev": [
|
||||
str(pillar_path / "base"),
|
||||
str(pillar_path / "dev1"),
|
||||
str(pillar_path / "dev2"),
|
||||
],
|
||||
"parent": [
|
||||
str(pillar_path / "base" / "sub1"),
|
||||
str(pillar_path / "dev2" / "sub"),
|
||||
str(pillar_path / "base" / "sub2"),
|
||||
],
|
||||
},
|
||||
"pillarenv": "base",
|
||||
"nodegroups": {"test-group": [minion_id]},
|
||||
"optimization_order": [0, 1, 2],
|
||||
"file_buffer_size": 262144,
|
||||
"file_roots": {"base": "", "dev": "", "parent": ""},
|
||||
"extension_modules": "",
|
||||
"renderer": "yaml_jinja",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_absolute_path(base_pillar_content, minion_id, pillar_path):
|
||||
"""
|
||||
check file tree is imported correctly with an absolute path
|
||||
"""
|
||||
absolute_path = pillar_path / "base"
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, str(absolute_path))
|
||||
assert base_pillar_content == mypillar
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, absolute_path)
|
||||
assert base_pillar_content == mypillar
|
||||
|
||||
|
||||
def test_relative_path(base_pillar_content, dev_pillar_content, minion_id):
|
||||
"""
|
||||
check file tree is imported correctly with a relative path
|
||||
"""
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, ".")
|
||||
assert base_pillar_content == mypillar
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, ".")
|
||||
assert dev_pillar_content == mypillar
|
||||
|
||||
|
||||
def test_parent_path(parent_pillar_content, minion_id):
|
||||
"""
|
||||
check if file tree is merged correctly with a .. path
|
||||
"""
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "parent"}):
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, "..")
|
||||
assert parent_pillar_content == mypillar
|
||||
|
||||
|
||||
def test_no_pillarenv(minion_id, caplog):
|
||||
"""
|
||||
confirm that file_tree yells when pillarenv is missing for a relative path
|
||||
"""
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": None}):
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, ".")
|
||||
assert {} == mypillar
|
||||
|
||||
for record in caplog.records:
|
||||
if record.levelname == "ERROR" and "pillarenv is not set" in record.message:
|
||||
break
|
||||
else:
|
||||
raise AssertionError("Did not find error message")
|
||||
|
||||
|
||||
def test_file_tree_bytes(pillar_path, minion_id, base_pillar_content):
|
||||
"""
|
||||
test file_tree pillar returns bytes
|
||||
"""
|
||||
absolute_path = os.path.join(pillar_path, "base")
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, absolute_path)
|
||||
assert base_pillar_content == mypillar
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(minion_id, None, absolute_path)
|
||||
assert mypillar["files"]["groupfile"] == b"base"
|
916
tests/pytests/unit/pillar/test_mysql.py
Normal file
916
tests/pytests/unit/pillar/test_mysql.py
Normal file
|
@ -0,0 +1,916 @@
|
|||
import pytest
|
||||
|
||||
import salt.pillar.mysql as mysql
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
mysql.MySQLdb is None, reason="MySQL-python module not installed"
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def test_001_extract_queries_legacy():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = ["SELECT blah"], {}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
]
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_002_extract_queries_list():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
{"query": "SELECT json1", "as_json": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT json1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": True,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_003_extract_queries_kwarg():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
"8": {"query": "SELECT json1", "as_json": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"8",
|
||||
{
|
||||
"query": "SELECT json1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": True,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_004_extract_queries_mixed():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_005_extract_queries_bogus_list():
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_006_extract_queries_bogus_kwargs():
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_011_enter_root():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.enter_root("test")
|
||||
assert return_data.result["test"] == return_data.focus
|
||||
return_data.enter_root(None)
|
||||
assert return_data.result == return_data.focus
|
||||
|
||||
|
||||
def test_021_process_fields():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 2
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
|
||||
|
||||
def test_111_process_results_legacy():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
assert {1: 2} == return_data.result
|
||||
|
||||
|
||||
def test_112_process_results_legacy_multiple():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
assert {1: 2, 3: 4, 5: 6} == return_data.result
|
||||
|
||||
|
||||
def test_121_process_results_depth_0():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_122_process_results_depth_1():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {
|
||||
1: {"b": 2, "c": 3, "d": 4},
|
||||
5: {"b": 6, "c": 7, "d": 8},
|
||||
} == return_data.result
|
||||
|
||||
|
||||
def test_123_process_results_depth_2():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_124_process_results_depth_3():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_125_process_results_depth_4():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
assert {1: 6, 3: 4} == return_data.result
|
||||
|
||||
|
||||
def test_132_process_results_merge_depth_0():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}, 6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_133_process_results_overwrite_depth_0():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
assert {1: {2: {3: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {3: 4, 7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_135_process_results_overwrite_depth_1():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {"b": 6, "c": 7, "d": 8}} == return_data.result
|
||||
|
||||
|
||||
def test_136_process_results_merge_depth_2():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_137_process_results_overwrite_depth_2():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_201_process_results_complexity_multiresults():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_202_process_results_complexity_as_list():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": [3, 7], "d": [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
assert {1: {2: {3: [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: [4, 5, {6: 7}]}}} == return_data.result
|
||||
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: {6: 7}}}} == return_data.result
|
||||
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_roots_collision():
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
assert {1: {5: {6: {7: 8}}}} == return_data.result
|
||||
|
||||
|
||||
def test_301_process_results_with_lists():
|
||||
"""
|
||||
Validates the following results:
|
||||
|
||||
{'a': [
|
||||
{'c': [
|
||||
{'e': 1},
|
||||
{'g': 2}
|
||||
]
|
||||
},
|
||||
{'h': [
|
||||
{'j': 3, 'k': 4}
|
||||
]
|
||||
}
|
||||
]}
|
||||
"""
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert "a" in return_data.result
|
||||
for x in return_data.result["a"]:
|
||||
if "c" in x:
|
||||
assert list(x.keys()) == ["c"], x.keys()
|
||||
for y in x["c"]:
|
||||
if "e" in y:
|
||||
assert list(y.keys()) == ["e"]
|
||||
assert y["e"] == 1
|
||||
elif "g" in y:
|
||||
assert list(y.keys()) == ["g"]
|
||||
assert y["g"] == 2
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
elif "h" in x:
|
||||
assert len(x["h"]) == 1
|
||||
for y in x["h"]:
|
||||
if "j" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["j"] == 3
|
||||
elif "h" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["k"] == 4
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(x))
|
||||
|
||||
|
||||
def test_302_process_results_with_lists_consecutive():
|
||||
"""
|
||||
Validates the following results:
|
||||
|
||||
{'a': [
|
||||
[[
|
||||
{'e': 1},
|
||||
{'g': 2}
|
||||
]
|
||||
],
|
||||
[[
|
||||
{'j': 3, 'k': 4}
|
||||
]
|
||||
]
|
||||
]}
|
||||
"""
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
|
||||
assert "a" in return_data.result
|
||||
for x in return_data.result["a"]:
|
||||
assert len(x) == 1
|
||||
if len(x[0][0]) == 1:
|
||||
for y in x[0]:
|
||||
if "e" in y:
|
||||
assert list(y.keys()) == ["e"]
|
||||
assert y["e"] == 1
|
||||
elif "g" in y:
|
||||
assert list(y.keys()) == ["g"]
|
||||
assert y["g"] == 2
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
elif len(x[0][0]) == 2:
|
||||
for y in x[0]:
|
||||
if "j" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["j"] == 3
|
||||
elif "k" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["k"] == 4
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(len(x[0][0])))
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(x))
|
52
tests/pytests/unit/pillar/test_nodegroups.py
Normal file
52
tests/pytests/unit/pillar/test_nodegroups.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
import pytest
|
||||
|
||||
import salt.pillar.nodegroups as nodegroups
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_minion_id():
|
||||
return "fake_id"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_nodegroups(fake_minion_id):
|
||||
return {
|
||||
"groupA": fake_minion_id,
|
||||
"groupB": "another_minion_id",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_pillar_name():
|
||||
return "fake_pillar_name"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(fake_minion_id, fake_nodegroups):
|
||||
fake_opts = {
|
||||
"cache": "localfs",
|
||||
"nodegroups": fake_nodegroups,
|
||||
"id": fake_minion_id,
|
||||
}
|
||||
return {nodegroups: {"__opts__": fake_opts}}
|
||||
|
||||
|
||||
def _runner(expected_ret, fake_minion_id, fake_pillar_name, pillar_name=None):
|
||||
def _side_effect(group_sel, t):
|
||||
if group_sel.find(fake_minion_id) != -1:
|
||||
return {"minions": [fake_minion_id], "missing": []}
|
||||
return {"minions": ["another_minion_id"], "missing": []}
|
||||
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(side_effect=_side_effect),
|
||||
):
|
||||
pillar_name = pillar_name or fake_pillar_name
|
||||
actual_ret = nodegroups.ext_pillar(fake_minion_id, {}, pillar_name=pillar_name)
|
||||
assert actual_ret == expected_ret
|
||||
|
||||
|
||||
def test_succeeds(fake_pillar_name, fake_minion_id):
|
||||
ret = {fake_pillar_name: ["groupA"]}
|
||||
_runner(ret, fake_minion_id, fake_pillar_name)
|
25
tests/pytests/unit/pillar/test_pepa.py
Normal file
25
tests/pytests/unit/pillar/test_pepa.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import salt.pillar.pepa as pepa
|
||||
|
||||
try:
|
||||
from salt.utils.odict import OrderedDict
|
||||
except ImportError:
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def test_repeated_keys():
|
||||
expected_result = {
|
||||
"foo": {
|
||||
"bar": {
|
||||
"foo": True,
|
||||
"baz": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
data = OrderedDict(
|
||||
[
|
||||
("foo..bar..foo", True),
|
||||
("foo..bar..baz", True),
|
||||
]
|
||||
)
|
||||
result = pepa.key_value_to_tree(data)
|
||||
assert result == expected_result
|
20
tests/pytests/unit/pillar/test_pillar_ldap.py
Normal file
20
tests/pytests/unit/pillar/test_pillar_ldap.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
import salt.pillar.pillar_ldap as pillar_ldap
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
||||
def test__config_returns_str():
|
||||
conf = {"foo": "bar"}
|
||||
assert pillar_ldap._config("foo", conf) == salt.utils.stringutils.to_str("bar")
|
||||
|
||||
|
||||
def test__conf_defaults_to_none():
|
||||
conf = {"foo": "bar"}
|
||||
assert pillar_ldap._config("bang", conf) is None
|
||||
|
||||
|
||||
def test__conf_returns_str_from_unicode_default():
|
||||
conf = {"foo": "bar"}
|
||||
default = salt.utils.stringutils.to_unicode("bam")
|
||||
assert pillar_ldap._config("bang", conf, default) == salt.utils.stringutils.to_str(
|
||||
"bam"
|
||||
)
|
83
tests/pytests/unit/pillar/test_s3.py
Normal file
83
tests/pytests/unit/pillar/test_s3.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import pytest
|
||||
|
||||
import salt.pillar.s3 as s3_pillar
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {s3_pillar: {"__utils__": {}}}
|
||||
|
||||
|
||||
def test_refresh_buckets_cache_file():
|
||||
"""
|
||||
Test pagination with refresh_buckets_cache_file
|
||||
"""
|
||||
key = "XXXXXXXXXXXXXXXXXXXXX"
|
||||
keyid = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||
bucket = "dummy_bucket"
|
||||
service_url = "s3.amazonaws.com"
|
||||
cache_file = "dummy_file"
|
||||
|
||||
s3_creds = s3_pillar.S3Credentials(key, keyid, bucket, service_url)
|
||||
|
||||
mock_return_first = [
|
||||
{"Name": "pillar-bucket"},
|
||||
{"Prefix": "test"},
|
||||
{"KeyCount": "10"},
|
||||
{"MaxKeys": "10"},
|
||||
{"NextContinuationToken": "XXXXX"},
|
||||
{"IsTruncated": "true"},
|
||||
]
|
||||
|
||||
mock_return_second = [
|
||||
{"Name": "pillar-bucket"},
|
||||
{"Prefix": "test"},
|
||||
{"KeyCount": "10"},
|
||||
{"MaxKeys": "10"},
|
||||
{"IsTruncated": "true"},
|
||||
]
|
||||
|
||||
first_range_end = 999
|
||||
second_range_end = 1200
|
||||
for i in range(0, first_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
mock_return_first.append(tmp)
|
||||
|
||||
for i in range(first_range_end, second_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
mock_return_second.append(tmp)
|
||||
|
||||
_expected = {"base": {"dummy_bucket": []}}
|
||||
for i in range(0, second_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
_expected["base"]["dummy_bucket"].append(tmp)
|
||||
|
||||
mock_s3_query = MagicMock(side_effect=[mock_return_first, mock_return_second])
|
||||
with patch.dict(s3_pillar.__utils__, {"s3.query": mock_s3_query}):
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=b"")):
|
||||
ret = s3_pillar._refresh_buckets_cache_file(
|
||||
s3_creds, cache_file, False, "base", ""
|
||||
)
|
||||
assert ret == _expected
|
112
tests/pytests/unit/pillar/test_saltclass.py
Normal file
112
tests/pytests/unit/pillar/test_saltclass.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
import pytest
|
||||
|
||||
import salt.pillar.saltclass as saltclass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {saltclass: {}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_id():
|
||||
return "fake_id"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_saltclass_tree(tmp_path, minion_id):
|
||||
dirname = tmp_path / "saltclass" / "examples"
|
||||
dirname.mkdir(parents=True, exist_ok=True)
|
||||
classes_dir = dirname / "classes"
|
||||
classes_dir.mkdir(parents=True, exist_ok=True)
|
||||
nodes_dir = dirname / "nodes"
|
||||
nodes_dir.mkdir(parents=True, exist_ok=True)
|
||||
default_dir = classes_dir / "default"
|
||||
default_dir.mkdir(parents=True, exist_ok=True)
|
||||
roles_dir = classes_dir / "roles"
|
||||
roles_dir.mkdir(parents=True, exist_ok=True)
|
||||
nginx_subdir = roles_dir / "nginx"
|
||||
nginx_subdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
default_init = default_dir / "init.yml"
|
||||
test_list = """
|
||||
classes:
|
||||
- default.users
|
||||
- default.motd
|
||||
- default.empty
|
||||
|
||||
pillars:
|
||||
default:
|
||||
network:
|
||||
dns:
|
||||
{% if __grains__['os'] == 'should_never_match' %}
|
||||
srv1: 192.168.0.1
|
||||
srv2: 192.168.0.2
|
||||
domain: example.com
|
||||
{% endif %}
|
||||
ntp:
|
||||
srv1: 192.168.10.10
|
||||
srv2: 192.168.10.20
|
||||
test_list:
|
||||
- a: ${default:network:ntp:srv1}
|
||||
- ${default:network:ntp:srv2}
|
||||
"""
|
||||
default_init.write_text(test_list)
|
||||
|
||||
minion_node_file = nodes_dir / "{}.yml".format(minion_id)
|
||||
nodes_text = """
|
||||
environment: base
|
||||
|
||||
classes:
|
||||
{% for class in ['default', 'roles.*', 'empty.*'] %}
|
||||
- {{ class }}
|
||||
{% endfor %}
|
||||
"""
|
||||
minion_node_file.write_text(nodes_text)
|
||||
|
||||
(default_dir / "users.yml").write_text("test: this is a test")
|
||||
(default_dir / "empty.yml").write_text("test: this is a test")
|
||||
(default_dir / "motd.yml").write_text("test: this is a test")
|
||||
(roles_dir / "app.yml").write_text("test: this is a test")
|
||||
(nginx_subdir / "init.yml").write_text("test: this is a test")
|
||||
|
||||
return dirname
|
||||
|
||||
|
||||
def test_succeeds(temp_saltclass_tree):
|
||||
expected_ret = [
|
||||
"default.users",
|
||||
"default.motd",
|
||||
"default.empty",
|
||||
"default",
|
||||
"roles.app",
|
||||
"roles.nginx",
|
||||
]
|
||||
fake_args = {"path": str(temp_saltclass_tree)}
|
||||
fake_pillar = {}
|
||||
fake_minion_id = "fake_id"
|
||||
try:
|
||||
full_ret = saltclass.ext_pillar(fake_minion_id, fake_pillar, fake_args)
|
||||
parsed_ret = full_ret["__saltclass__"]["classes"]
|
||||
# Fail the test if we hit our NoneType error
|
||||
except TypeError as err:
|
||||
pytest.fail(err)
|
||||
# Else give the parsed content result
|
||||
assert expected_ret == parsed_ret
|
||||
|
||||
|
||||
def test_list_expansion_succeeds(temp_saltclass_tree):
|
||||
expected_ret = [{"a": "192.168.10.10"}, "192.168.10.20"]
|
||||
full_ret = {}
|
||||
parsed_ret = []
|
||||
fake_args = {"path": str(temp_saltclass_tree)}
|
||||
fake_pillar = {}
|
||||
fake_minion_id = "fake_id"
|
||||
try:
|
||||
full_ret = saltclass.ext_pillar(fake_minion_id, fake_pillar, fake_args)
|
||||
parsed_ret = full_ret["test_list"]
|
||||
# Fail the test if we hit our NoneType error
|
||||
except TypeError as err:
|
||||
pytest.fail(err)
|
||||
# Else give the parsed content result
|
||||
assert expected_ret == parsed_ret
|
789
tests/pytests/unit/pillar/test_sqlcipher.py
Normal file
789
tests/pytests/unit/pillar/test_sqlcipher.py
Normal file
|
@ -0,0 +1,789 @@
|
|||
import salt.pillar.sqlcipher as sqlcipher
|
||||
|
||||
|
||||
def test_001_extract_queries_list():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_002_extract_queries_kwarg():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_003_extract_queries_mixed():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_004_extract_queries_bogus_list():
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_005_extract_queries_bogus_kwargs():
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_011_enter_root():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.enter_root("test")
|
||||
assert return_data.result["test"] == return_data.focus
|
||||
return_data.enter_root(None)
|
||||
assert return_data.result == return_data.focus
|
||||
|
||||
|
||||
def test_021_process_fields():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 2
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
|
||||
|
||||
def test_111_process_results_legacy():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
assert {1: 2} == return_data.result
|
||||
|
||||
|
||||
def test_112_process_results_legacy_multiple():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
assert {1: 2, 3: 4, 5: 6} == return_data.result
|
||||
|
||||
|
||||
def test_121_process_results_depth_0():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_122_process_results_depth_1():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {
|
||||
1: {"b": 2, "c": 3, "d": 4},
|
||||
5: {"b": 6, "c": 7, "d": 8},
|
||||
} == return_data.result
|
||||
|
||||
|
||||
def test_123_process_results_depth_2():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_124_process_results_depth_3():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_125_process_results_depth_4():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
assert {1: 6, 3: 4} == return_data.result
|
||||
|
||||
|
||||
def test_132_process_results_merge_depth_0():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}, 6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_133_process_results_overwrite_depth_0():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
assert {1: {2: {3: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {3: 4, 7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_135_process_results_overwrite_depth_1():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {"b": 6, "c": 7, "d": 8}} == return_data.result
|
||||
|
||||
|
||||
def test_136_process_results_merge_depth_2():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_137_process_results_overwrite_depth_2():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_201_process_results_complexity_multiresults():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_202_process_results_complexity_as_list():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": [3, 7], "d": [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
assert {1: {2: {3: [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: [4, 5, {6: 7}]}}} == return_data.result
|
||||
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: {6: 7}}}} == return_data.result
|
||||
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_roots_collision():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
assert {1: {5: {6: {7: 8}}}} == return_data.result
|
||||
|
||||
|
||||
def test_301_process_results_with_lists():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert sorted(
|
||||
{"a": [{"c": [{"e": 1}, {"g": 2}]}, {"h": [{"j": 3, "k": 4}]}]}
|
||||
) == sorted(return_data.result)
|
||||
|
||||
|
||||
def test_302_process_results_with_lists_consecutive():
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert sorted({"a": [[[{"e": 1}, {"g": 2}]], [[{"j": 3, "k": 4}]]]}) == sorted(
|
||||
return_data.result
|
||||
)
|
789
tests/pytests/unit/pillar/test_sqlite3.py
Normal file
789
tests/pytests/unit/pillar/test_sqlite3.py
Normal file
|
@ -0,0 +1,789 @@
|
|||
import salt.pillar.sqlite3 as sqlite3
|
||||
|
||||
|
||||
def test_001_extract_queries_list():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_002_extract_queries_kwarg():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_003_extract_queries_mixed():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_004_extract_queries_bogus_list():
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_005_extract_queries_bogus_kwargs():
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
assert [
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
] == qbuffer
|
||||
|
||||
|
||||
def test_011_enter_root():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.enter_root("test")
|
||||
assert return_data.result["test"] == return_data.focus
|
||||
return_data.enter_root(None)
|
||||
assert return_data.result == return_data.focus
|
||||
|
||||
|
||||
def test_021_process_fields():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
assert return_data.num_fields == 2
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 1
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 2
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
assert return_data.num_fields == 4
|
||||
assert return_data.depth == 3
|
||||
|
||||
|
||||
def test_111_process_results_legacy():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
assert {1: 2} == return_data.result
|
||||
|
||||
|
||||
def test_112_process_results_legacy_multiple():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
assert {1: 2, 3: 4, 5: 6} == return_data.result
|
||||
|
||||
|
||||
def test_121_process_results_depth_0():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_122_process_results_depth_1():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {
|
||||
1: {"b": 2, "c": 3, "d": 4},
|
||||
5: {"b": 6, "c": 7, "d": 8},
|
||||
} == return_data.result
|
||||
|
||||
|
||||
def test_123_process_results_depth_2():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_124_process_results_depth_3():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_125_process_results_depth_4():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}}, 5: {6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
assert {1: 6, 3: 4} == return_data.result
|
||||
|
||||
|
||||
def test_132_process_results_merge_depth_0():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {3: 4}, 6: {7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_133_process_results_overwrite_depth_0():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
assert {1: {2: {3: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {3: 4, 7: 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_135_process_results_overwrite_depth_1():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {"b": 6, "c": 7, "d": 8}} == return_data.result
|
||||
|
||||
|
||||
def test_136_process_results_merge_depth_2():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
assert {1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_137_process_results_overwrite_depth_2():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_201_process_results_complexity_multiresults():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": 7, "d": 8}}} == return_data.result
|
||||
|
||||
|
||||
def test_202_process_results_complexity_as_list():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
assert {1: {2: {"c": [3, 7], "d": [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
assert {1: {2: {3: [4, 8]}}} == return_data.result
|
||||
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: [4, 5, {6: 7}]}}} == return_data.result
|
||||
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
assert {1: {2: {3: {6: 7}}}} == return_data.result
|
||||
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
assert {1: {2: {3: 5}}} == return_data.result
|
||||
|
||||
|
||||
def test_207_process_results_complexity_roots_collision():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
assert {1: {5: {6: {7: 8}}}} == return_data.result
|
||||
|
||||
|
||||
def test_301_process_results_with_lists():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert sorted(
|
||||
{"a": [{"c": [{"e": 1}, {"g": 2}]}, {"h": [{"j": 3, "k": 4}]}]}
|
||||
) == sorted(return_data.result)
|
||||
|
||||
|
||||
def test_302_process_results_with_lists_consecutive():
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert sorted({"a": [[[{"e": 1}, {"g": 2}]], [[{"j": 3, "k": 4}]]]}) == sorted(
|
||||
return_data.result
|
||||
)
|
|
@ -1,323 +0,0 @@
|
|||
"""
|
||||
Tests for the Azure Blob External Pillar.
|
||||
"""
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.pillar.azureblob as azureblob
|
||||
import salt.utils.files
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
HAS_LIBS = False
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
from azure.storage.blob import BlobServiceClient
|
||||
|
||||
# pylint: enable=no-name-in-module
|
||||
|
||||
HAS_LIBS = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class MockBlob(dict):
|
||||
"""
|
||||
Creates a Mock Blob object.
|
||||
"""
|
||||
|
||||
name = ""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class MockContainerClient:
|
||||
"""
|
||||
Creates a Mock ContainerClient.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def walk_blobs(self, *args, **kwargs):
|
||||
yield MockBlob()
|
||||
|
||||
def get_blob_client(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class MockBlobServiceClient:
|
||||
"""
|
||||
Creates a Mock BlobServiceClient.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_container_client(self, *args, **kwargs):
|
||||
container_client = MockContainerClient()
|
||||
return container_client
|
||||
|
||||
|
||||
@skipIf(HAS_LIBS is False, "The azure.storage.blob module must be installed.")
|
||||
class AzureBlobTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
TestCase for salt.pillar.azureblob ext_pillar.
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
utils = salt.loader.utils(self.opts)
|
||||
return {
|
||||
azureblob: {"__opts__": self.opts, "__utils__": utils},
|
||||
}
|
||||
|
||||
def test__init_expired(self):
|
||||
"""
|
||||
Tests the result of _init when the cache is expired.
|
||||
"""
|
||||
container = "test"
|
||||
multiple_env = False
|
||||
environment = "base"
|
||||
blob_cache_expire = 0 # The cache will be expired
|
||||
blob_client = MockBlobServiceClient()
|
||||
cache_file = tempfile.NamedTemporaryFile()
|
||||
# Patches the _get_containers_cache_filename module so that it returns the name of the new tempfile that
|
||||
# represents the cache file
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_get_containers_cache_filename",
|
||||
MagicMock(return_value=str(cache_file.name)),
|
||||
):
|
||||
# Patches the from_connection_string module of the BlobServiceClient class so that a connection string does
|
||||
# not need to be given. Additionally it returns example blob data used by the ext_pillar.
|
||||
with patch.object(
|
||||
BlobServiceClient,
|
||||
"from_connection_string",
|
||||
MagicMock(return_value=blob_client),
|
||||
):
|
||||
ret = azureblob._init(
|
||||
"", container, multiple_env, environment, blob_cache_expire
|
||||
)
|
||||
cache_file.close()
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{
|
||||
"base": {
|
||||
"test": [
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test__init_not_expired(self):
|
||||
"""
|
||||
Tests the result of _init when the cache is not expired.
|
||||
"""
|
||||
container = "test"
|
||||
multiple_env = False
|
||||
environment = "base"
|
||||
blob_cache_expire = (time.time()) * (
|
||||
time.time()
|
||||
) # The cache will not be expired
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
cache_file = tempfile.NamedTemporaryFile()
|
||||
# Pickles the metadata and stores it in cache_file
|
||||
with salt.utils.files.fopen(str(cache_file), "wb") as fp_:
|
||||
pickle.dump(metadata, fp_)
|
||||
# Patches the _get_containers_cache_filename module so that it returns the name of the new tempfile that
|
||||
# represents the cache file
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_get_containers_cache_filename",
|
||||
MagicMock(return_value=str(cache_file.name)),
|
||||
):
|
||||
# Patches the _read_containers_cache_file module so that it returns what it normally would if the new
|
||||
# tempfile representing the cache file was passed to it
|
||||
plugged = azureblob._read_containers_cache_file(str(cache_file))
|
||||
with patch.object(
|
||||
azureblob,
|
||||
"_read_containers_cache_file",
|
||||
MagicMock(return_value=plugged),
|
||||
):
|
||||
ret = azureblob._init(
|
||||
"", container, multiple_env, environment, blob_cache_expire
|
||||
)
|
||||
fp_.close()
|
||||
os.remove(str(fp_.name))
|
||||
cache_file.close()
|
||||
self.assertEqual(ret, metadata)
|
||||
|
||||
def test__get_cache_dir(self):
|
||||
"""
|
||||
Tests the result of _get_cache_dir.
|
||||
"""
|
||||
ret = azureblob._get_cache_dir()
|
||||
self.assertEqual(ret, "/var/cache/salt/master/pillar_azureblob")
|
||||
|
||||
def test__get_cached_file_name(self):
|
||||
"""
|
||||
Tests the result of _get_cached_file_name.
|
||||
"""
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._get_cached_file_name(container, saltenv, path)
|
||||
self.assertEqual(
|
||||
ret, "/var/cache/salt/master/pillar_azureblob/base/test/base/secret.sls"
|
||||
)
|
||||
|
||||
def test__get_containers_cache_filename(self):
|
||||
"""
|
||||
Tests the result of _get_containers_cache_filename.
|
||||
"""
|
||||
container = "test"
|
||||
ret = azureblob._get_containers_cache_filename(container)
|
||||
self.assertEqual(
|
||||
ret, "/var/cache/salt/master/pillar_azureblob/test-files.cache"
|
||||
)
|
||||
|
||||
def test__refresh_containers_cache_file(self):
|
||||
"""
|
||||
Tests the result of _refresh_containers_cache_file to ensure that it successfully copies blob data into a
|
||||
cache file.
|
||||
"""
|
||||
blob_client = MockBlobServiceClient()
|
||||
container = "test"
|
||||
cache_file = tempfile.NamedTemporaryFile()
|
||||
with patch.object(
|
||||
BlobServiceClient,
|
||||
"from_connection_string",
|
||||
MagicMock(return_value=blob_client),
|
||||
):
|
||||
ret = azureblob._refresh_containers_cache_file(
|
||||
"", container, cache_file.name
|
||||
)
|
||||
cache_file.close()
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{
|
||||
"base": {
|
||||
"test": [
|
||||
{
|
||||
"container": None,
|
||||
"name": "test.sls",
|
||||
"prefix": None,
|
||||
"delimiter": "/",
|
||||
"results_per_page": None,
|
||||
"location_mode": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def test__read_containers_cache_file(self):
|
||||
"""
|
||||
Tests the result of _read_containers_cache_file to make sure that it successfully loads in pickled metadata.
|
||||
"""
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
cache_file = tempfile.NamedTemporaryFile()
|
||||
# Pickles the metadata and stores it in cache_file
|
||||
with salt.utils.files.fopen(str(cache_file), "wb") as fp_:
|
||||
pickle.dump(metadata, fp_)
|
||||
# Checks to see if _read_containers_cache_file can successfully read the pickled metadata from the cache file
|
||||
ret = azureblob._read_containers_cache_file(str(cache_file))
|
||||
fp_.close()
|
||||
os.remove(str(fp_.name))
|
||||
cache_file.close()
|
||||
self.assertEqual(ret, metadata)
|
||||
|
||||
def test__find_files(self):
|
||||
"""
|
||||
Tests the result of _find_files. Ensures it only finds files and not directories. Ensures it also ignore
|
||||
irrelevant files.
|
||||
"""
|
||||
metadata = {
|
||||
"test": [
|
||||
{"name": "base/secret.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
{"name": "base/"},
|
||||
]
|
||||
}
|
||||
ret = azureblob._find_files(metadata)
|
||||
self.assertEqual(ret, {"test": ["base/secret.sls", "blobtest.sls"]})
|
||||
|
||||
def test__find_file_meta1(self):
|
||||
"""
|
||||
Tests the result of _find_file_meta when the metadata contains a blob with the specified path and a blob
|
||||
without the specified path.
|
||||
"""
|
||||
metadata = {
|
||||
"base": {
|
||||
"test": [
|
||||
{"name": "base/secret.sls", "relevant": "include.sls"},
|
||||
{"name": "blobtest.sls", "irrelevant": "ignore.sls"},
|
||||
]
|
||||
}
|
||||
}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
self.assertEqual(ret, {"name": "base/secret.sls", "relevant": "include.sls"})
|
||||
|
||||
def test__find_file_meta2(self):
|
||||
"""
|
||||
Tests the result of _find_file_meta when the saltenv in metadata does not match the specified saltenv.
|
||||
"""
|
||||
metadata = {"wrong": {"test": [{"name": "base/secret.sls"}]}}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
self.assertEqual(ret, None)
|
||||
|
||||
def test__find_file_meta3(self):
|
||||
"""
|
||||
Tests the result of _find_file_meta when the container in metadata does not match the specified metadata.
|
||||
"""
|
||||
metadata = {"base": {"wrong": [{"name": "base/secret.sls"}]}}
|
||||
container = "test"
|
||||
saltenv = "base"
|
||||
path = "base/secret.sls"
|
||||
ret = azureblob._find_file_meta(metadata, container, saltenv, path)
|
||||
self.assertEqual(ret, None)
|
|
@ -1,159 +0,0 @@
|
|||
import salt.pillar.consul_pillar as consul_pillar
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
OPTS = {"consul_config": {"consul.port": 8500, "consul.host": "172.17.0.15"}}
|
||||
|
||||
PILLAR_DATA = [
|
||||
{
|
||||
"Value": "/path/to/certs/testsite1.crt",
|
||||
"Key": "test-shared/sites/testsite1/ssl/certs/SSLCertificateFile",
|
||||
},
|
||||
{
|
||||
"Value": "/path/to/certs/testsite1.key",
|
||||
"Key": "test-shared/sites/testsite1/ssl/certs/SSLCertificateKeyFile",
|
||||
},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/ssl/certs/"},
|
||||
{"Value": "True", "Key": "test-shared/sites/testsite1/ssl/force"},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/ssl/"},
|
||||
{
|
||||
"Value": "salt://sites/testsite1.tmpl",
|
||||
"Key": "test-shared/sites/testsite1/template",
|
||||
},
|
||||
{"Value": "test.example.com", "Key": "test-shared/sites/testsite1/uri"},
|
||||
{"Value": None, "Key": "test-shared/sites/testsite1/"},
|
||||
{"Value": None, "Key": "test-shared/sites/"},
|
||||
{"Value": "Test User", "Key": "test-shared/user/full_name"},
|
||||
{"Value": "adm\nwww-data\nmlocate", "Key": "test-shared/user/groups"},
|
||||
{"Value": '"adm\nwww-data\nmlocate"', "Key": "test-shared/user/dontsplit"},
|
||||
{"Value": "yaml:\n key: value\n", "Key": "test-shared/user/dontexpand"},
|
||||
{"Value": None, "Key": "test-shared/user/blankvalue"},
|
||||
{"Value": "test", "Key": "test-shared/user/login"},
|
||||
{"Value": None, "Key": "test-shared/user/"},
|
||||
]
|
||||
|
||||
SIMPLE_DICT = {"key1": {"key2": "val1"}}
|
||||
|
||||
|
||||
@skipIf(not consul_pillar.consul, "python-consul module not installed")
|
||||
class ConsulPillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test cases for salt.pillar.consul_pillar
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
consul_pillar: {
|
||||
"__opts__": OPTS,
|
||||
"get_conn": MagicMock(return_value="consul_connection"),
|
||||
}
|
||||
}
|
||||
|
||||
def test_connection(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
consul_pillar.get_conn.assert_called_once_with(OPTS, "consul_config")
|
||||
|
||||
def test_pillar_data(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
consul_pillar.consul_fetch.assert_called_once_with(
|
||||
"consul_connection", "test-shared"
|
||||
)
|
||||
assert sorted(pillar_data) == ["sites", "user"]
|
||||
self.assertNotIn("blankvalue", pillar_data["user"])
|
||||
|
||||
def test_blank_root(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config"
|
||||
)
|
||||
consul_pillar.consul_fetch.assert_called_once_with(
|
||||
"consul_connection", ""
|
||||
)
|
||||
assert sorted(pillar_data) == ["test-shared"]
|
||||
|
||||
def test_pillar_nest(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion",
|
||||
{},
|
||||
"consul_config pillar_root=nested-key/ root=test-shared/ ",
|
||||
)
|
||||
assert sorted(pillar_data["nested-key"]) == ["sites", "user"]
|
||||
self.assertNotIn("blankvalue", pillar_data["nested-key"]["user"])
|
||||
|
||||
def test_value_parsing(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion", {}, "consul_config root=test-shared/"
|
||||
)
|
||||
assert isinstance(pillar_data["user"]["dontsplit"], str)
|
||||
|
||||
def test_non_expansion(self):
|
||||
with patch.dict(
|
||||
consul_pillar.__salt__, {"grains.get": MagicMock(return_value=({}))}
|
||||
):
|
||||
with patch.object(
|
||||
consul_pillar,
|
||||
"consul_fetch",
|
||||
MagicMock(return_value=("2232", PILLAR_DATA)),
|
||||
):
|
||||
pillar_data = consul_pillar.ext_pillar(
|
||||
"testminion",
|
||||
{},
|
||||
"consul_config root=test-shared/ expand_keys=false",
|
||||
)
|
||||
assert isinstance(pillar_data["user"]["dontexpand"], str)
|
||||
|
||||
def test_dict_merge(self):
|
||||
test_dict = {}
|
||||
with patch.dict(test_dict, SIMPLE_DICT):
|
||||
self.assertDictEqual(
|
||||
consul_pillar.dict_merge(test_dict, SIMPLE_DICT), SIMPLE_DICT
|
||||
)
|
||||
with patch.dict(test_dict, {"key1": {"key3": {"key4": "value"}}}):
|
||||
self.assertDictEqual(
|
||||
consul_pillar.dict_merge(test_dict, SIMPLE_DICT),
|
||||
{"key1": {"key2": "val1", "key3": {"key4": "value"}}},
|
||||
)
|
|
@ -1,36 +0,0 @@
|
|||
"""test for pillar csvpillar.py"""
|
||||
|
||||
|
||||
import salt.pillar.csvpillar as csvpillar
|
||||
from tests.support.mock import mock_open, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class CSVPillarTestCase(TestCase):
|
||||
def test_001_load_utf8_csv(self):
|
||||
fake_csv = "id,foo,bar\r\nminion1,foo1,bar1"
|
||||
fake_dict = {"id": "minion1", "foo": "foo1", "bar": "bar1"}
|
||||
fopen_mock = mock_open(fake_csv)
|
||||
with patch("salt.utils.files.fopen", fopen_mock):
|
||||
result = csvpillar.ext_pillar(
|
||||
mid="minion1",
|
||||
pillar=None,
|
||||
path="/fake/path/file.csv",
|
||||
idkey="id",
|
||||
namespace=None,
|
||||
)
|
||||
self.assertDictEqual(fake_dict, result)
|
||||
|
||||
def test_002_load_utf8_csv_namespc(self):
|
||||
fake_csv = "id,foo,bar\r\nminion1,foo1,bar1"
|
||||
fake_dict = {"baz": {"id": "minion1", "foo": "foo1", "bar": "bar1"}}
|
||||
fopen_mock = mock_open(fake_csv)
|
||||
with patch("salt.utils.files.fopen", fopen_mock):
|
||||
result = csvpillar.ext_pillar(
|
||||
mid="minion1",
|
||||
pillar=None,
|
||||
path="/fake/path/file.csv",
|
||||
idkey="id",
|
||||
namespace="baz",
|
||||
)
|
||||
self.assertDictEqual(fake_dict, result)
|
|
@ -1,56 +0,0 @@
|
|||
from salt.pillar import extra_minion_data_in_pillar
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class ExtraMinionDataInPillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test cases for salt.pillar.extra_minion_data_in_pillar
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {extra_minion_data_in_pillar: {}}
|
||||
|
||||
def setUp(self):
|
||||
self.pillar = MagicMock()
|
||||
self.extra_minion_data = {
|
||||
"key1": {"subkey1": "value1"},
|
||||
"key2": {"subkey2": {"subsubkey2": "value2"}},
|
||||
"key3": "value3",
|
||||
"key4": {"subkey4": "value4"},
|
||||
}
|
||||
|
||||
def test_extra_values_none_or_empty(self):
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", self.pillar, "fake_include", None
|
||||
)
|
||||
self.assertEqual(ret, {})
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", self.pillar, "fake_include", {}
|
||||
)
|
||||
self.assertEqual(ret, {})
|
||||
|
||||
def test_include_all(self):
|
||||
for include_all in ["*", "<all>"]:
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id", self.pillar, include_all, self.extra_minion_data
|
||||
)
|
||||
self.assertEqual(ret, self.extra_minion_data)
|
||||
|
||||
def test_include_specific_keys(self):
|
||||
# Tests partially existing key, key with and without subkey,
|
||||
ret = extra_minion_data_in_pillar.ext_pillar(
|
||||
"fake_id",
|
||||
self.pillar,
|
||||
include=["key1:subkey1", "key2:subkey3", "key3", "key4"],
|
||||
extra_minion_data=self.extra_minion_data,
|
||||
)
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{
|
||||
"key1": {"subkey1": "value1"},
|
||||
"key3": "value3",
|
||||
"key4": {"subkey4": "value4"},
|
||||
},
|
||||
)
|
|
@ -1,170 +0,0 @@
|
|||
"""
|
||||
test for pillar file_tree.py
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import salt.pillar.file_tree as file_tree
|
||||
import salt.utils.files
|
||||
import salt.utils.stringutils
|
||||
from tests.support.helpers import TstSuiteLoggingHandler
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
MINION_ID = "test-host"
|
||||
NODEGROUP_PATH = os.path.join("nodegroups", "test-group", "files")
|
||||
HOST_PATH = os.path.join("hosts", MINION_ID, "files")
|
||||
|
||||
BASE_PILLAR_CONTENT = {"files": {"hostfile": b"base", "groupfile": b"base"}}
|
||||
DEV_PILLAR_CONTENT = {
|
||||
"files": {
|
||||
"hostfile": b"base",
|
||||
"groupfile": b"dev2",
|
||||
"hostfile1": b"dev1",
|
||||
"groupfile1": b"dev1",
|
||||
"hostfile2": b"dev2",
|
||||
}
|
||||
}
|
||||
PARENT_PILLAR_CONTENT = {
|
||||
"files": {"hostfile": b"base", "groupfile": b"base", "hostfile2": b"dev2"}
|
||||
}
|
||||
|
||||
FILE_DATA = {
|
||||
os.path.join("base", HOST_PATH, "hostfile"): "base",
|
||||
os.path.join("dev1", HOST_PATH, "hostfile1"): "dev1",
|
||||
os.path.join("dev2", HOST_PATH, "hostfile2"): "dev2",
|
||||
os.path.join("base", NODEGROUP_PATH, "groupfile"): "base",
|
||||
os.path.join("dev1", NODEGROUP_PATH, "groupfile1"): "dev1",
|
||||
os.path.join("dev2", NODEGROUP_PATH, "groupfile"): "dev2", # test merging
|
||||
}
|
||||
|
||||
_CHECK_MINIONS_RETURN = {"minions": [MINION_ID], "missing": []}
|
||||
|
||||
|
||||
class FileTreePillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"test file_tree pillar"
|
||||
maxDiff = None
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmpdir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.addCleanup(shutil.rmtree, self.tmpdir)
|
||||
cachedir = os.path.join(self.tmpdir, "cachedir")
|
||||
os.makedirs(os.path.join(cachedir, "file_tree"))
|
||||
self.pillar_path = self._create_pillar_files()
|
||||
return {
|
||||
file_tree: {
|
||||
"__opts__": {
|
||||
"cachedir": cachedir,
|
||||
"pillar_roots": {
|
||||
"base": [os.path.join(self.pillar_path, "base")],
|
||||
"dev": [
|
||||
os.path.join(self.pillar_path, "base"),
|
||||
os.path.join(self.pillar_path, "dev1"),
|
||||
os.path.join(self.pillar_path, "dev2"),
|
||||
],
|
||||
"parent": [
|
||||
os.path.join(self.pillar_path, "base", "sub1"),
|
||||
os.path.join(self.pillar_path, "dev2", "sub"),
|
||||
os.path.join(self.pillar_path, "base", "sub2"),
|
||||
],
|
||||
},
|
||||
"pillarenv": "base",
|
||||
"nodegroups": {"test-group": [MINION_ID]},
|
||||
"optimization_order": [0, 1, 2],
|
||||
"file_buffer_size": 262144,
|
||||
"file_roots": {"base": "", "dev": "", "parent": ""},
|
||||
"extension_modules": "",
|
||||
"renderer": "yaml_jinja",
|
||||
"renderer_blacklist": [],
|
||||
"renderer_whitelist": [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _create_pillar_files(self):
|
||||
"create files in tempdir"
|
||||
pillar_path = os.path.join(self.tmpdir, "file_tree")
|
||||
for filename in FILE_DATA:
|
||||
filepath = os.path.join(pillar_path, filename)
|
||||
os.makedirs(os.path.dirname(filepath))
|
||||
with salt.utils.files.fopen(filepath, "w") as data_file:
|
||||
data_file.write(salt.utils.stringutils.to_str(FILE_DATA[filename]))
|
||||
return pillar_path
|
||||
|
||||
def test_absolute_path(self):
|
||||
"check file tree is imported correctly with an absolute path"
|
||||
absolute_path = os.path.join(self.pillar_path, "base")
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value=_CHECK_MINIONS_RETURN),
|
||||
):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_relative_path(self):
|
||||
"check file tree is imported correctly with a relative path"
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value=_CHECK_MINIONS_RETURN),
|
||||
):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, ".")
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, ".")
|
||||
self.assertEqual(DEV_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_parent_path(self):
|
||||
"check if file tree is merged correctly with a .. path"
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value=_CHECK_MINIONS_RETURN),
|
||||
):
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "parent"}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, "..")
|
||||
self.assertEqual(PARENT_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_no_pillarenv(self):
|
||||
"confirm that file_tree yells when pillarenv is missing for a relative path"
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value=_CHECK_MINIONS_RETURN),
|
||||
):
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": None}):
|
||||
with TstSuiteLoggingHandler() as handler:
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, ".")
|
||||
self.assertEqual({}, mypillar)
|
||||
|
||||
for message in handler.messages:
|
||||
if (
|
||||
message.startswith("ERROR:")
|
||||
and "pillarenv is not set" in message
|
||||
):
|
||||
break
|
||||
else:
|
||||
raise AssertionError("Did not find error message")
|
||||
|
||||
def test_file_tree_bytes(self):
|
||||
"""
|
||||
test file_tree pillar returns bytes
|
||||
"""
|
||||
absolute_path = os.path.join(self.pillar_path, "base")
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(return_value=_CHECK_MINIONS_RETURN),
|
||||
):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
with patch.dict(file_tree.__opts__, {"pillarenv": "dev"}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(mypillar["files"]["groupfile"], b"base")
|
|
@ -1,901 +0,0 @@
|
|||
import salt.pillar.mysql as mysql
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
|
||||
@skipIf(mysql.MySQLdb is None, "MySQL-python module not installed")
|
||||
class MysqlPillarTestCase(TestCase):
|
||||
maxDiff = None
|
||||
|
||||
def test_001_extract_queries_legacy(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = ["SELECT blah"], {}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
]
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_002_extract_queries_list(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
{"query": "SELECT json1", "as_json": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT json1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": True,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_003_extract_queries_kwarg(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
"8": {"query": "SELECT json1", "as_json": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"8",
|
||||
{
|
||||
"query": "SELECT json1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": True,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_004_extract_queries_mixed(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_005_extract_queries_bogus_list(self):
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_006_extract_queries_bogus_kwargs(self):
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_011_enter_root(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.enter_root("test")
|
||||
self.assertEqual(return_data.result["test"], return_data.focus)
|
||||
return_data.enter_root(None)
|
||||
self.assertEqual(return_data.result, return_data.focus)
|
||||
|
||||
def test_021_process_fields(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 2)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
|
||||
def test_111_process_results_legacy(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
self.assertEqual({1: 2}, return_data.result)
|
||||
|
||||
def test_112_process_results_legacy_multiple(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
self.assertEqual({1: 2, 3: 4, 5: 6}, return_data.result)
|
||||
|
||||
def test_121_process_results_depth_0(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_122_process_results_depth_1(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {"b": 2, "c": 3, "d": 4}, 5: {"b": 6, "c": 7, "d": 8}},
|
||||
return_data.result,
|
||||
)
|
||||
|
||||
def test_123_process_results_depth_2(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_124_process_results_depth_3(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_125_process_results_depth_4(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
self.assertEqual({1: 6, 3: 4}, return_data.result)
|
||||
|
||||
def test_132_process_results_merge_depth_0(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}, 6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_133_process_results_overwrite_depth_0(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: 8}}}, return_data.result)
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4, 7: 8}}}, return_data.result)
|
||||
|
||||
def test_135_process_results_overwrite_depth_1(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {"b": 6, "c": 7, "d": 8}}, return_data.result)
|
||||
|
||||
def test_136_process_results_merge_depth_2(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_137_process_results_overwrite_depth_2(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_201_process_results_complexity_multiresults(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_202_process_results_complexity_as_list(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": [3, 7], "d": [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: [4, 5, {6: 7}]}}}, return_data.result)
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}}, return_data.result)
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: {6: 7}}}}, return_data.result)
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_roots_collision(self):
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
self.assertEqual({1: {5: {6: {7: 8}}}}, return_data.result)
|
||||
|
||||
def test_301_process_results_with_lists(self):
|
||||
"""
|
||||
Validates the following results:
|
||||
|
||||
{'a': [
|
||||
{'c': [
|
||||
{'e': 1},
|
||||
{'g': 2}
|
||||
]
|
||||
},
|
||||
{'h': [
|
||||
{'j': 3, 'k': 4}
|
||||
]
|
||||
}
|
||||
]}
|
||||
"""
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
assert "a" in return_data.result
|
||||
for x in return_data.result["a"]:
|
||||
if "c" in x:
|
||||
assert list(x.keys()) == ["c"], x.keys()
|
||||
for y in x["c"]:
|
||||
if "e" in y:
|
||||
assert list(y.keys()) == ["e"]
|
||||
assert y["e"] == 1
|
||||
elif "g" in y:
|
||||
assert list(y.keys()) == ["g"]
|
||||
assert y["g"] == 2
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
elif "h" in x:
|
||||
assert len(x["h"]) == 1
|
||||
for y in x["h"]:
|
||||
if "j" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["j"] == 3
|
||||
elif "h" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["k"] == 4
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(x))
|
||||
|
||||
def test_302_process_results_with_lists_consecutive(self):
|
||||
"""
|
||||
Validates the following results:
|
||||
|
||||
{'a': [
|
||||
[[
|
||||
{'e': 1},
|
||||
{'g': 2}
|
||||
]
|
||||
],
|
||||
[[
|
||||
{'j': 3, 'k': 4}
|
||||
]
|
||||
]
|
||||
]}
|
||||
"""
|
||||
return_data = mysql.MySQLExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
|
||||
assert "a" in return_data.result
|
||||
for x in return_data.result["a"]:
|
||||
assert len(x) == 1
|
||||
if len(x[0][0]) == 1:
|
||||
for y in x[0]:
|
||||
if "e" in y:
|
||||
assert list(y.keys()) == ["e"]
|
||||
assert y["e"] == 1
|
||||
elif "g" in y:
|
||||
assert list(y.keys()) == ["g"]
|
||||
assert y["g"] == 2
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(y))
|
||||
elif len(x[0][0]) == 2:
|
||||
for y in x[0]:
|
||||
if "j" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["j"] == 3
|
||||
elif "k" in y:
|
||||
assert len(y.keys()) == 2
|
||||
assert y["k"] == 4
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(len(x[0][0])))
|
||||
else:
|
||||
raise ValueError("Unexpected value {}".format(x))
|
|
@ -1,43 +0,0 @@
|
|||
import salt.pillar.nodegroups as nodegroups
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
fake_minion_id = "fake_id"
|
||||
fake_pillar = {}
|
||||
fake_nodegroups = {
|
||||
"groupA": fake_minion_id,
|
||||
"groupB": "another_minion_id",
|
||||
}
|
||||
fake_opts = {"cache": "localfs", "nodegroups": fake_nodegroups, "id": fake_minion_id}
|
||||
fake_pillar_name = "fake_pillar_name"
|
||||
|
||||
|
||||
def side_effect(group_sel, t):
|
||||
if group_sel.find(fake_minion_id) != -1:
|
||||
return {"minions": [fake_minion_id], "missing": []}
|
||||
return {"minions": ["another_minion_id"], "missing": []}
|
||||
|
||||
|
||||
class NodegroupsPillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for salt.pillar.nodegroups
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {nodegroups: {"__opts__": fake_opts}}
|
||||
|
||||
def _runner(self, expected_ret, pillar_name=None):
|
||||
with patch(
|
||||
"salt.utils.minions.CkMinions.check_minions",
|
||||
MagicMock(side_effect=side_effect),
|
||||
):
|
||||
pillar_name = pillar_name or fake_pillar_name
|
||||
actual_ret = nodegroups.ext_pillar(
|
||||
fake_minion_id, fake_pillar, pillar_name=pillar_name
|
||||
)
|
||||
self.assertDictEqual(actual_ret, expected_ret)
|
||||
|
||||
def test_succeeds(self):
|
||||
ret = {fake_pillar_name: ["groupA"]}
|
||||
self._runner(ret)
|
|
@ -1,27 +0,0 @@
|
|||
import salt.pillar.pepa as pepa
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
try:
|
||||
from salt.utils.odict import OrderedDict
|
||||
except ImportError:
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class PepaPillarTestCase(TestCase):
|
||||
def test_repeated_keys(self):
|
||||
expected_result = {
|
||||
"foo": {
|
||||
"bar": {
|
||||
"foo": True,
|
||||
"baz": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
data = OrderedDict(
|
||||
[
|
||||
("foo..bar..foo", True),
|
||||
("foo..bar..baz", True),
|
||||
]
|
||||
)
|
||||
result = pepa.key_value_to_tree(data)
|
||||
self.assertDictEqual(result, expected_result)
|
|
@ -1,18 +0,0 @@
|
|||
import salt.utils.stringutils
|
||||
from salt.pillar.pillar_ldap import _config
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class LdapPillarTestCase(TestCase):
|
||||
def test__config_returns_str(self):
|
||||
conf = {"foo": "bar"}
|
||||
assert _config("foo", conf) == salt.utils.stringutils.to_str("bar")
|
||||
|
||||
def test__conf_defaults_to_none(self):
|
||||
conf = {"foo": "bar"}
|
||||
assert _config("bang", conf) is None
|
||||
|
||||
def test__conf_returns_str_from_unicode_default(self):
|
||||
conf = {"foo": "bar"}
|
||||
default = salt.utils.stringutils.to_unicode("bam")
|
||||
assert _config("bang", conf, default) == salt.utils.stringutils.to_str("bam")
|
|
@ -1,91 +0,0 @@
|
|||
import logging
|
||||
|
||||
import salt.pillar.s3 as s3_pillar
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class S3PillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
TestCase for salt.pillar.s3
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
s3_pillar_globals = {"__utils__": {}}
|
||||
return {s3_pillar: s3_pillar_globals}
|
||||
|
||||
def test_refresh_buckets_cache_file(self):
|
||||
"""
|
||||
Test pagination with refresh_buckets_cache_file
|
||||
"""
|
||||
key = "XXXXXXXXXXXXXXXXXXXXX"
|
||||
keyid = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
||||
bucket = "dummy_bucket"
|
||||
service_url = "s3.amazonaws.com"
|
||||
cache_file = "dummy_file"
|
||||
|
||||
s3_creds = s3_pillar.S3Credentials(key, keyid, bucket, service_url)
|
||||
|
||||
mock_return_first = [
|
||||
{"Name": "pillar-bucket"},
|
||||
{"Prefix": "test"},
|
||||
{"KeyCount": "10"},
|
||||
{"MaxKeys": "10"},
|
||||
{"NextContinuationToken": "XXXXX"},
|
||||
{"IsTruncated": "true"},
|
||||
]
|
||||
|
||||
mock_return_second = [
|
||||
{"Name": "pillar-bucket"},
|
||||
{"Prefix": "test"},
|
||||
{"KeyCount": "10"},
|
||||
{"MaxKeys": "10"},
|
||||
{"IsTruncated": "true"},
|
||||
]
|
||||
|
||||
first_range_end = 999
|
||||
second_range_end = 1200
|
||||
for i in range(0, first_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
mock_return_first.append(tmp)
|
||||
|
||||
for i in range(first_range_end, second_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
mock_return_second.append(tmp)
|
||||
|
||||
_expected = {"base": {"dummy_bucket": []}}
|
||||
for i in range(0, second_range_end):
|
||||
key_name = "{}/init.sls".format(i)
|
||||
tmp = {
|
||||
"Key": key_name,
|
||||
"LastModified": "2019-12-18T15:54:39.000Z",
|
||||
"ETag": '"fba0a053704e8b357c94be90b44bb640"',
|
||||
"Size": "5 ",
|
||||
"StorageClass": "STANDARD",
|
||||
}
|
||||
_expected["base"]["dummy_bucket"].append(tmp)
|
||||
|
||||
mock_s3_query = MagicMock(side_effect=[mock_return_first, mock_return_second])
|
||||
with patch.dict(s3_pillar.__utils__, {"s3.query": mock_s3_query}):
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=b"")):
|
||||
ret = s3_pillar._refresh_buckets_cache_file(
|
||||
s3_creds, cache_file, False, "base", ""
|
||||
)
|
||||
self.assertEqual(ret, _expected)
|
|
@ -1,75 +0,0 @@
|
|||
import os
|
||||
|
||||
import salt.pillar.saltclass as saltclass
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class SaltclassPillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for salt.pillar.saltclass
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {saltclass: {}}
|
||||
|
||||
def _runner(self, expected_ret):
|
||||
fake_args = {
|
||||
"path": os.path.abspath(
|
||||
os.path.join(RUNTIME_VARS.FILES, "saltclass", "examples")
|
||||
)
|
||||
}
|
||||
fake_pillar = {}
|
||||
fake_minion_id = "fake_id"
|
||||
try:
|
||||
full_ret = saltclass.ext_pillar(fake_minion_id, fake_pillar, fake_args)
|
||||
parsed_ret = full_ret["__saltclass__"]["classes"]
|
||||
# Fail the test if we hit our NoneType error
|
||||
except TypeError as err:
|
||||
self.fail(err)
|
||||
# Else give the parsed content result
|
||||
self.assertListEqual(expected_ret, parsed_ret)
|
||||
|
||||
def test_succeeds(self):
|
||||
ret = [
|
||||
"default.users",
|
||||
"default.motd",
|
||||
"default.empty",
|
||||
"default",
|
||||
"roles.app",
|
||||
"roles.nginx",
|
||||
]
|
||||
self._runner(ret)
|
||||
|
||||
|
||||
class SaltclassPillarTestCaseListExpansion(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Tests for salt.pillar.saltclass variable expansion in list
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {saltclass: {}}
|
||||
|
||||
def _runner(self, expected_ret):
|
||||
full_ret = {}
|
||||
parsed_ret = []
|
||||
fake_args = {
|
||||
"path": os.path.abspath(
|
||||
os.path.join(RUNTIME_VARS.FILES, "saltclass", "examples")
|
||||
)
|
||||
}
|
||||
fake_pillar = {}
|
||||
fake_minion_id = "fake_id"
|
||||
try:
|
||||
full_ret = saltclass.ext_pillar(fake_minion_id, fake_pillar, fake_args)
|
||||
parsed_ret = full_ret["test_list"]
|
||||
# Fail the test if we hit our NoneType error
|
||||
except TypeError as err:
|
||||
self.fail(err)
|
||||
# Else give the parsed content result
|
||||
self.assertListEqual(expected_ret, parsed_ret)
|
||||
|
||||
def test_succeeds(self):
|
||||
ret = [{"a": "192.168.10.10"}, "192.168.10.20"]
|
||||
self._runner(ret)
|
|
@ -1,781 +0,0 @@
|
|||
import salt.pillar.sqlcipher as sqlcipher
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class SQLCipherPillarTestCase(TestCase):
|
||||
maxDiff = None
|
||||
|
||||
def test_001_extract_queries_list(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_002_extract_queries_kwarg(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_003_extract_queries_mixed(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_004_extract_queries_bogus_list(self):
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_005_extract_queries_bogus_kwargs(self):
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_011_enter_root(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.enter_root("test")
|
||||
self.assertEqual(return_data.result["test"], return_data.focus)
|
||||
return_data.enter_root(None)
|
||||
self.assertEqual(return_data.result, return_data.focus)
|
||||
|
||||
def test_021_process_fields(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 2)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
|
||||
def test_111_process_results_legacy(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
self.assertEqual({1: 2}, return_data.result)
|
||||
|
||||
def test_112_process_results_legacy_multiple(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
self.assertEqual({1: 2, 3: 4, 5: 6}, return_data.result)
|
||||
|
||||
def test_121_process_results_depth_0(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_122_process_results_depth_1(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {"b": 2, "c": 3, "d": 4}, 5: {"b": 6, "c": 7, "d": 8}},
|
||||
return_data.result,
|
||||
)
|
||||
|
||||
def test_123_process_results_depth_2(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_124_process_results_depth_3(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_125_process_results_depth_4(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
self.assertEqual({1: 6, 3: 4}, return_data.result)
|
||||
|
||||
def test_132_process_results_merge_depth_0(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}, 6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_133_process_results_overwrite_depth_0(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: 8}}}, return_data.result)
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4, 7: 8}}}, return_data.result)
|
||||
|
||||
def test_135_process_results_overwrite_depth_1(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {"b": 6, "c": 7, "d": 8}}, return_data.result)
|
||||
|
||||
def test_136_process_results_merge_depth_2(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_137_process_results_overwrite_depth_2(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_201_process_results_complexity_multiresults(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_202_process_results_complexity_as_list(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": [3, 7], "d": [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: [4, 5, {6: 7}]}}}, return_data.result)
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}}, return_data.result)
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: {6: 7}}}}, return_data.result)
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_roots_collision(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
self.assertEqual({1: {5: {6: {7: 8}}}}, return_data.result)
|
||||
|
||||
def test_301_process_results_with_lists(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted({"a": [{"c": [{"e": 1}, {"g": 2}]}, {"h": [{"j": 3, "k": 4}]}]}),
|
||||
sorted(return_data.result),
|
||||
)
|
||||
|
||||
def test_302_process_results_with_lists_consecutive(self):
|
||||
return_data = sqlcipher.SQLCipherExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted({"a": [[[{"e": 1}, {"g": 2}]], [[{"j": 3, "k": 4}]]]}),
|
||||
sorted(return_data.result),
|
||||
)
|
|
@ -1,781 +0,0 @@
|
|||
import salt.pillar.sqlite3 as sqlite3
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class SQLite3PillarTestCase(TestCase):
|
||||
maxDiff = None
|
||||
|
||||
def test_001_extract_queries_list(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("SELECT blah4", 2),
|
||||
{"query": "SELECT blah5"},
|
||||
{"query": "SELECT blah6", "depth": 2},
|
||||
{"query": "SELECT blah7", "as_list": True},
|
||||
{"query": "SELECT blah8", "with_lists": "1"},
|
||||
{"query": "SELECT blah9", "with_lists": "1,2"},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah9",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": [1, 2],
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_002_extract_queries_kwarg(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[],
|
||||
{
|
||||
"1": "SELECT blah",
|
||||
"2": "SELECT blah2",
|
||||
"3": ("SELECT blah3",),
|
||||
"4": ("SELECT blah4", 2),
|
||||
"5": {"query": "SELECT blah5"},
|
||||
"6": {"query": "SELECT blah6", "depth": 2},
|
||||
"7": {"query": "SELECT blah7", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"4",
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"5",
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"6",
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"7",
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_003_extract_queries_mixed(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah1",
|
||||
("SELECT blah2", 2),
|
||||
{"query": "SELECT blah3", "as_list": True},
|
||||
],
|
||||
{
|
||||
"1": "SELECT blah1",
|
||||
"2": ("SELECT blah2", 2),
|
||||
"3": {"query": "SELECT blah3", "as_list": True},
|
||||
},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah1",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"2",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_004_extract_queries_bogus_list(self):
|
||||
# This test is specifically checking that empty queries are dropped
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = (
|
||||
[
|
||||
"SELECT blah",
|
||||
"",
|
||||
"SELECT blah2",
|
||||
("SELECT blah3",),
|
||||
("",),
|
||||
("SELECT blah4", 2),
|
||||
tuple(),
|
||||
("SELECT blah5",),
|
||||
{"query": "SELECT blah6"},
|
||||
{"query": ""},
|
||||
{"query": "SELECT blah7", "depth": 2},
|
||||
{"not_a_query": "in sight!"},
|
||||
{"query": "SELECT blah8", "as_list": True},
|
||||
],
|
||||
{},
|
||||
)
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah3",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah4",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah5",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah6",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah7",
|
||||
"depth": 2,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
None,
|
||||
{
|
||||
"query": "SELECT blah8",
|
||||
"depth": 0,
|
||||
"as_list": True,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_005_extract_queries_bogus_kwargs(self):
|
||||
# this test is cut down as most of the path matches test_*_bogus_list
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
args, kwargs = [], {"1": "SELECT blah", "2": "", "3": "SELECT blah2"}
|
||||
qbuffer = return_data.extract_queries(args, kwargs)
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
"1",
|
||||
{
|
||||
"query": "SELECT blah",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
[
|
||||
"3",
|
||||
{
|
||||
"query": "SELECT blah2",
|
||||
"depth": 0,
|
||||
"as_list": False,
|
||||
"as_json": False,
|
||||
"with_lists": None,
|
||||
"ignore_null": False,
|
||||
},
|
||||
],
|
||||
],
|
||||
qbuffer,
|
||||
)
|
||||
|
||||
def test_011_enter_root(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.enter_root("test")
|
||||
self.assertEqual(return_data.result["test"], return_data.focus)
|
||||
return_data.enter_root(None)
|
||||
self.assertEqual(return_data.result, return_data.focus)
|
||||
|
||||
def test_021_process_fields(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b"], 2)
|
||||
self.assertEqual(return_data.num_fields, 2)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 1)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 2)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
self.assertEqual(return_data.num_fields, 4)
|
||||
self.assertEqual(return_data.depth, 3)
|
||||
|
||||
def test_111_process_results_legacy(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2]])
|
||||
self.assertEqual({1: 2}, return_data.result)
|
||||
|
||||
def test_112_process_results_legacy_multiple(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [5, 6]])
|
||||
self.assertEqual({1: 2, 3: 4, 5: 6}, return_data.result)
|
||||
|
||||
def test_121_process_results_depth_0(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_122_process_results_depth_1(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {"b": 2, "c": 3, "d": 4}, 5: {"b": 6, "c": 7, "d": 8}},
|
||||
return_data.result,
|
||||
)
|
||||
|
||||
def test_123_process_results_depth_2(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}}, 5: {6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_124_process_results_depth_3(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 3)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_125_process_results_depth_4(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 4)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [5, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}}, 5: {6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_131_process_results_overwrite_legacy_multiple(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.process_results([[1, 2], [3, 4], [1, 6]])
|
||||
self.assertEqual({1: 6, 3: 4}, return_data.result)
|
||||
|
||||
def test_132_process_results_merge_depth_0(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4}, 6: {7: 8}}}, return_data.result)
|
||||
|
||||
def test_133_process_results_overwrite_depth_0(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: 8}}}, return_data.result)
|
||||
|
||||
def test_134_process_results_deepmerge_depth_0(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {3: 4, 7: 8}}}, return_data.result)
|
||||
|
||||
def test_135_process_results_overwrite_depth_1(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 1)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual({1: {"b": 6, "c": 7, "d": 8}}, return_data.result)
|
||||
|
||||
def test_136_process_results_merge_depth_2(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 6, 7, 8]])
|
||||
self.assertEqual(
|
||||
{1: {2: {"c": 3, "d": 4}, 6: {"c": 7, "d": 8}}}, return_data.result
|
||||
)
|
||||
|
||||
def test_137_process_results_overwrite_depth_2(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4], [1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_201_process_results_complexity_multiresults(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": 7, "d": 8}}}, return_data.result)
|
||||
|
||||
def test_202_process_results_complexity_as_list(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 2)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 7, 8]])
|
||||
self.assertEqual({1: {2: {"c": [3, 7], "d": [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_203_process_results_complexity_as_list_deeper(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.as_list = True
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 8]])
|
||||
self.assertEqual({1: {2: {3: [4, 8]}}}, return_data.result)
|
||||
|
||||
def test_204_process_results_complexity_as_list_mismatch_depth(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: [4, 5, {6: 7}]}}}, return_data.result)
|
||||
|
||||
def test_205_process_results_complexity_as_list_mismatch_depth_reversed(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7, 8: 9}, 4, 5]}}}, return_data.result)
|
||||
|
||||
def test_206_process_results_complexity_as_list_mismatch_depth_weird_order(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: [{6: 7}, 4, {8: 9}, 5]}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_collision_mismatch_depth(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
self.assertEqual({1: {2: {3: {6: 7}}}}, return_data.result)
|
||||
|
||||
def test_208_process_results_complexity_collision_mismatch_depth_reversed(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_209_process_results_complexity_collision_mismatch_depth_weird_order(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_20A_process_results_complexity_as_list_vary(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = True
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e"], 0)
|
||||
return_data.process_results([[1, 2, 3, 6, 7]])
|
||||
return_data.process_results([[1, 2, 3, 8, 9]])
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.as_list = False
|
||||
return_data.process_results([[1, 2, 3, 5]])
|
||||
self.assertEqual({1: {2: {3: 5}}}, return_data.result)
|
||||
|
||||
def test_207_process_results_complexity_roots_collision(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = []
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d"], 0)
|
||||
return_data.process_results([[1, 2, 3, 4]])
|
||||
return_data.enter_root(1)
|
||||
return_data.process_results([[5, 6, 7, 8]])
|
||||
self.assertEqual({1: {5: {6: {7: 8}}}}, return_data.result)
|
||||
|
||||
def test_301_process_results_with_lists(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted({"a": [{"c": [{"e": 1}, {"g": 2}]}, {"h": [{"j": 3, "k": 4}]}]}),
|
||||
sorted(return_data.result),
|
||||
)
|
||||
|
||||
def test_302_process_results_with_lists_consecutive(self):
|
||||
return_data = sqlite3.SQLite3ExtPillar()
|
||||
return_data.as_list = False
|
||||
return_data.with_lists = [1, 2, 3]
|
||||
return_data.enter_root(None)
|
||||
return_data.process_fields(["a", "b", "c", "d", "e", "v"], 0)
|
||||
return_data.process_results(
|
||||
[
|
||||
["a", "b", "c", "d", "e", 1],
|
||||
["a", "b", "c", "f", "g", 2],
|
||||
["a", "z", "h", "y", "j", 3],
|
||||
["a", "z", "h", "y", "k", 4],
|
||||
]
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted({"a": [[[{"e": 1}, {"g": 2}]], [[{"j": 3, "k": 4}]]]}),
|
||||
sorted(return_data.result),
|
||||
)
|
Loading…
Add table
Reference in a new issue