mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
These are not integration tests
This commit is contained in:
parent
058dcbf57e
commit
d05028e88c
15 changed files with 772 additions and 872 deletions
|
@ -1,134 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Mike Place <mp@saltstack.com>`
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import logging
|
||||
import pwd
|
||||
import shutil
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
from salt.fileserver import gitfs
|
||||
|
||||
gitfs.__opts__ = {'cachedir': '/tmp/gitfs_test_cache',
|
||||
'gitfs_remotes': [''],
|
||||
'gitfs_root': '',
|
||||
'fileserver_backend': ['git'],
|
||||
'gitfs_base': 'master',
|
||||
'fileserver_events': True,
|
||||
'transport': 'zeromq',
|
||||
'gitfs_mountpoint': '',
|
||||
'gitfs_env_whitelist': [],
|
||||
'gitfs_env_blacklist': [],
|
||||
'gitfs_user': '',
|
||||
'gitfs_password': '',
|
||||
'gitfs_insecure_auth': False,
|
||||
'gitfs_privkey': '',
|
||||
'gitfs_pubkey': '',
|
||||
'gitfs_passphrase': '',
|
||||
'gitfs_refspecs': ['+refs/heads/*:refs/remotes/origin/*',
|
||||
'+refs/tags/*:refs/tags/*'],
|
||||
'gitfs_ssl_verify': True
|
||||
}
|
||||
|
||||
LOAD = {'saltenv': 'base'}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import git
|
||||
GITFS_AVAILABLE = True
|
||||
except ImportError:
|
||||
GITFS_AVAILABLE = False
|
||||
|
||||
if not gitfs.__virtual__():
|
||||
GITFS_AVAILABLE = False
|
||||
|
||||
|
||||
@skipIf(not GITFS_AVAILABLE, "GitFS could not be loaded. Skipping GitFS tests!")
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class GitFSTest(integration.ModuleCase):
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
We don't want to check in another .git dir into GH because that just gets messy.
|
||||
Instead, we'll create a temporary repo on the fly for the tests to examine.
|
||||
'''
|
||||
self.integration_base_files = os.path.join(integration.FILES, 'file', 'base')
|
||||
self.tmp_repo_dir = os.path.join(integration.TMP, 'gitfs_root')
|
||||
|
||||
# Create the dir if it doesn't already exist
|
||||
|
||||
try:
|
||||
shutil.copytree(self.integration_base_files, self.tmp_repo_dir + '/')
|
||||
except OSError:
|
||||
# We probably caught an error because files already exist. Ignore
|
||||
pass
|
||||
|
||||
try:
|
||||
repo = git.Repo(self.tmp_repo_dir)
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
repo = git.Repo.init(self.tmp_repo_dir)
|
||||
|
||||
if 'USERNAME' not in os.environ:
|
||||
try:
|
||||
os.environ['USERNAME'] = pwd.getpwuid(os.geteuid()).pw_name
|
||||
except AttributeError:
|
||||
log.error('Unable to get effective username, falling back to '
|
||||
'\'root\'.')
|
||||
os.environ['USERNAME'] = 'root'
|
||||
|
||||
repo.index.add([x for x in os.listdir(self.tmp_repo_dir)
|
||||
if x != '.git'])
|
||||
repo.index.commit('Test')
|
||||
|
||||
with patch.dict(gitfs.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'gitfs_remotes': ['file://' + self.tmp_repo_dir],
|
||||
'sock_dir': self.master_opts['sock_dir'],
|
||||
'__role': self.master_opts['__role']}):
|
||||
gitfs.update()
|
||||
|
||||
def tearDown(self):
|
||||
'''
|
||||
Remove the temporary git repository and gitfs cache directory to ensure
|
||||
a clean environment for each test.
|
||||
'''
|
||||
shutil.rmtree(self.tmp_repo_dir)
|
||||
shutil.rmtree(os.path.join(self.master_opts['cachedir'], 'gitfs'))
|
||||
del self.tmp_repo_dir
|
||||
del self.integration_base_files
|
||||
|
||||
#@skipIf(True, 'Skipping tests temporarily')
|
||||
def test_file_list(self):
|
||||
with patch.dict(gitfs.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'gitfs_remotes': ['file://' + self.tmp_repo_dir],
|
||||
'sock_dir': self.master_opts['sock_dir'],
|
||||
'__role': self.master_opts['__role']}):
|
||||
ret = gitfs.file_list(LOAD)
|
||||
self.assertIn('testfile', ret)
|
||||
|
||||
#@skipIf(True, 'Skipping tests temporarily')
|
||||
def test_dir_list(self):
|
||||
with patch.dict(gitfs.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'gitfs_remotes': ['file://' + self.tmp_repo_dir],
|
||||
'sock_dir': self.master_opts['sock_dir'],
|
||||
'__role': self.master_opts['__role']}):
|
||||
ret = gitfs.dir_list(LOAD)
|
||||
self.assertIn('grail', ret)
|
||||
|
||||
#@skipIf(True, 'Skipping tests temporarily')
|
||||
def test_envs(self):
|
||||
with patch.dict(gitfs.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'gitfs_remotes': ['file://' + self.tmp_repo_dir],
|
||||
'sock_dir': self.master_opts['sock_dir'],
|
||||
'__role': self.master_opts['__role']}):
|
||||
ret = gitfs.envs()
|
||||
self.assertIn('base', ret)
|
|
@ -1,246 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Mike Place <mp@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
from salt.fileserver import roots
|
||||
from salt import fileclient
|
||||
import salt.utils
|
||||
|
||||
try:
|
||||
import win32file
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
roots.__opts__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class RootsTest(integration.ModuleCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
'''
|
||||
Create special file_roots for symlink test on Windows
|
||||
'''
|
||||
if salt.utils.is_windows():
|
||||
root_dir = tempfile.mkdtemp(dir=integration.TMP)
|
||||
source_sym = os.path.join(root_dir, 'source_sym')
|
||||
with salt.utils.fopen(source_sym, 'w') as fp_:
|
||||
fp_.write('hello world!\n')
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(root_dir)
|
||||
win32file.CreateSymbolicLink('dest_sym', 'source_sym', 0)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
cls.test_symlink_list_file_roots = {'base': [root_dir]}
|
||||
else:
|
||||
cls.test_symlink_list_file_roots = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
'''
|
||||
Remove special file_roots for symlink test
|
||||
'''
|
||||
if salt.utils.is_windows():
|
||||
try:
|
||||
salt.utils.rm_rf(cls.test_symlink_list_file_roots['base'][0])
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def setUp(self):
|
||||
if integration.TMP_STATE_TREE not in self.master_opts['file_roots']['base']:
|
||||
# We need to setup the file roots
|
||||
self.master_opts['file_roots']['base'] = [os.path.join(integration.FILES, 'file', 'base')]
|
||||
|
||||
def test_file_list(self):
|
||||
with patch.dict(roots.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
ret = roots.file_list({'saltenv': 'base'})
|
||||
self.assertIn('testfile', ret)
|
||||
|
||||
def test_find_file(self):
|
||||
with patch.dict(roots.__opts__, {'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
|
||||
ret = roots.find_file('testfile')
|
||||
self.assertEqual('testfile', ret['rel'])
|
||||
|
||||
full_path_to_file = os.path.join(integration.FILES, 'file', 'base', 'testfile')
|
||||
self.assertEqual(full_path_to_file, ret['path'])
|
||||
|
||||
def test_serve_file(self):
|
||||
with patch.dict(roots.__opts__, {'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False,
|
||||
'file_buffer_size': 262144}):
|
||||
load = {'saltenv': 'base',
|
||||
'path': os.path.join(integration.FILES, 'file', 'base', 'testfile'),
|
||||
'loc': 0
|
||||
}
|
||||
fnd = {'path': os.path.join(integration.FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'}
|
||||
ret = roots.serve_file(load, fnd)
|
||||
|
||||
data = 'Scene 24\n\n \n OLD MAN: Ah, hee he he ha!\n ' \
|
||||
'ARTHUR: And this enchanter of whom you speak, he ' \
|
||||
'has seen the grail?\n OLD MAN: Ha ha he he he ' \
|
||||
'he!\n ARTHUR: Where does he live? Old man, where ' \
|
||||
'does he live?\n OLD MAN: He knows of a cave, a ' \
|
||||
'cave which no man has entered.\n ARTHUR: And the ' \
|
||||
'Grail... The Grail is there?\n OLD MAN: Very much ' \
|
||||
'danger, for beyond the cave lies the Gorge\n ' \
|
||||
'of Eternal Peril, which no man has ever crossed.\n ' \
|
||||
'ARTHUR: But the Grail! Where is the Grail!?\n ' \
|
||||
'OLD MAN: Seek you the Bridge of Death.\n ARTHUR: ' \
|
||||
'The Bridge of Death, which leads to the Grail?\n ' \
|
||||
'OLD MAN: Hee hee ha ha!\n\n'
|
||||
if salt.utils.is_windows():
|
||||
data = 'Scene 24\r\n\r\n \r\n OLD MAN: Ah, hee he he ' \
|
||||
'ha!\r\n ARTHUR: And this enchanter of whom you ' \
|
||||
'speak, he has seen the grail?\r\n OLD MAN: Ha ha ' \
|
||||
'he he he he!\r\n ARTHUR: Where does he live? Old ' \
|
||||
'man, where does he live?\r\n OLD MAN: He knows of ' \
|
||||
'a cave, a cave which no man has entered.\r\n ' \
|
||||
'ARTHUR: And the Grail... The Grail is there?\r\n ' \
|
||||
'OLD MAN: Very much danger, for beyond the cave lies ' \
|
||||
'the Gorge\r\n of Eternal Peril, which no man ' \
|
||||
'has ever crossed.\r\n ARTHUR: But the Grail! ' \
|
||||
'Where is the Grail!?\r\n OLD MAN: Seek you the ' \
|
||||
'Bridge of Death.\r\n ARTHUR: The Bridge of Death, ' \
|
||||
'which leads to the Grail?\r\n OLD MAN: Hee hee ha ' \
|
||||
'ha!\r\n\r\n'
|
||||
|
||||
self.assertDictEqual(
|
||||
ret,
|
||||
{'data': data,
|
||||
'dest': 'testfile'})
|
||||
|
||||
@skipIf(True, "Update test not yet implemented")
|
||||
def test_update(self):
|
||||
pass
|
||||
|
||||
def test_file_hash(self):
|
||||
with patch.dict(roots.__opts__, {'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False,
|
||||
'hash_type': self.master_opts['hash_type'],
|
||||
'cachedir': self.master_opts['cachedir']}):
|
||||
load = {
|
||||
'saltenv': 'base',
|
||||
'path': os.path.join(integration.FILES, 'file', 'base', 'testfile'),
|
||||
}
|
||||
fnd = {
|
||||
'path': os.path.join(integration.FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'
|
||||
}
|
||||
ret = roots.file_hash(load, fnd)
|
||||
|
||||
# Hashes are different in Windows. May be how git translates line
|
||||
# endings
|
||||
hsum = 'baba5791276eb99a7cc498fb1acfbc3b4bd96d24cfe984b4ed6b5be2418731df'
|
||||
if salt.utils.is_windows():
|
||||
hsum = '754aa260e1f3e70f43aaf92149c7d1bad37f708c53304c37660e628d7553f687'
|
||||
|
||||
self.assertDictEqual(
|
||||
ret,
|
||||
{
|
||||
'hsum': hsum,
|
||||
'hash_type': 'sha256'
|
||||
}
|
||||
)
|
||||
|
||||
def test_file_list_emptydirs(self):
|
||||
if integration.TMP_STATE_TREE not in self.master_opts['file_roots']['base']:
|
||||
self.skipTest('This test fails when using tests/runtests.py. salt-runtests will be available soon.')
|
||||
|
||||
empty_dir = os.path.join(integration.TMP_STATE_TREE, 'empty_dir')
|
||||
if not os.path.isdir(empty_dir):
|
||||
# There's no use creating the empty-directory ourselves at this
|
||||
# point, the minions have already synced, it wouldn't get pushed to
|
||||
# them
|
||||
self.skipTest('This test fails when using tests/runtests.py. salt-runtests will be available soon.')
|
||||
|
||||
with patch.dict(roots.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
ret = roots.file_list_emptydirs({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
empty_dir = os.path.join(integration.TMP_STATE_TREE, 'empty_dir')
|
||||
if integration.TMP_STATE_TREE not in self.master_opts['file_roots']['base']:
|
||||
self.skipTest('This test fails when using tests/runtests.py. salt-runtests will be available soon.')
|
||||
|
||||
empty_dir = os.path.join(integration.TMP_STATE_TREE, 'empty_dir')
|
||||
if not os.path.isdir(empty_dir):
|
||||
# There's no use creating the empty-directory ourselves at this
|
||||
# point, the minions have already synced, it wouldn't get pushed to
|
||||
# them
|
||||
self.skipTest('This test fails when using tests/runtests.py. salt-runtests will be available soon.')
|
||||
|
||||
with patch.dict(roots.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
ret = roots.dir_list({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_symlink_list(self):
|
||||
file_roots = self.test_symlink_list_file_roots \
|
||||
or self.master_opts['file_roots']
|
||||
with patch.dict(roots.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'file_roots': file_roots,
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
ret = roots.symlink_list({'saltenv': 'base'})
|
||||
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
|
||||
|
||||
|
||||
class RootsLimitTraversalTest(integration.ModuleCase):
|
||||
|
||||
# @destructiveTest
|
||||
def test_limit_traversal(self):
|
||||
'''
|
||||
1) Set up a deep directory structure
|
||||
2) Enable the configuration option for 'limit_directory_traversal'
|
||||
3) Ensure that we can find SLS files in a directory so long as there is an SLS file in a directory above.
|
||||
4) Ensure that we cannot find an SLS file in a directory that does not have an SLS file in a directory above.
|
||||
'''
|
||||
file_client_opts = self.get_config('master', from_scratch=True)
|
||||
file_client_opts['fileserver_limit_traversal'] = True
|
||||
|
||||
ret = fileclient.Client(file_client_opts).list_states('base')
|
||||
self.assertIn('test_deep.test', ret)
|
||||
self.assertIn('test_deep.a.test', ret)
|
||||
self.assertNotIn('test_deep.b.2.test', ret)
|
|
@ -18,7 +18,6 @@ from subprocess import Popen, PIPE, STDOUT
|
|||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.helpers import requires_system_grains
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
@ -189,7 +188,6 @@ GPG_PILLAR_DECRYPTED = {
|
|||
}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(not salt.utils.which('gpg'), 'GPG is not installed')
|
||||
class DecryptGPGPillarTest(integration.ModuleCase):
|
||||
'''
|
||||
|
|
|
@ -5,7 +5,6 @@ from __future__ import absolute_import
|
|||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
|
@ -14,7 +13,6 @@ from tests.support.helpers import (
|
|||
destructiveTest,
|
||||
skip_if_binaries_missing
|
||||
)
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, Mock, patch
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -32,7 +30,6 @@ AVAILABLE_PYTHON_EXECUTABLE = salt.utils.which_bin([
|
|||
])
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class CMDModuleTest(integration.ModuleCase):
|
||||
'''
|
||||
Validate the cmd module
|
||||
|
@ -66,45 +63,6 @@ class CMDModuleTest(integration.ModuleCase):
|
|||
['echo "a=b" | sed -e s/=/:/g'],
|
||||
python_shell=True), 'a:b')
|
||||
|
||||
@patch('pwd.getpwnam')
|
||||
@patch('subprocess.Popen')
|
||||
def test_os_environment_remains_intact(self,
|
||||
popen_mock,
|
||||
getpwnam_mock):
|
||||
'''
|
||||
Make sure the OS environment is not tainted after running a command
|
||||
that specifies runas.
|
||||
'''
|
||||
environment = os.environ.copy()
|
||||
|
||||
popen_mock.return_value = Mock(
|
||||
communicate=lambda *args, **kwags: ['{}', None],
|
||||
pid=lambda: 1,
|
||||
retcode=0
|
||||
)
|
||||
|
||||
from salt.modules import cmdmod
|
||||
|
||||
cmdmod.__grains__ = {'os': 'Darwin', 'os_family': 'Solaris'}
|
||||
if sys.platform.startswith(('freebsd', 'openbsd')):
|
||||
shell = '/bin/sh'
|
||||
else:
|
||||
shell = '/bin/bash'
|
||||
|
||||
try:
|
||||
cmdmod._run('ls',
|
||||
cwd=tempfile.gettempdir(),
|
||||
runas='foobar',
|
||||
shell=shell)
|
||||
|
||||
environment2 = os.environ.copy()
|
||||
|
||||
self.assertEqual(environment, environment2)
|
||||
|
||||
getpwnam_mock.assert_called_with('foobar')
|
||||
finally:
|
||||
delattr(cmdmod, '__grains__')
|
||||
|
||||
def test_stdout(self):
|
||||
'''
|
||||
cmd.run_stdout
|
||||
|
@ -276,20 +234,3 @@ class CMDModuleTest(integration.ModuleCase):
|
|||
f_timeout=2,
|
||||
python_shell=True)
|
||||
self.assertEqual(out, 'hello')
|
||||
|
||||
def test_run_cwd_doesnt_exist_issue_7154(self):
|
||||
'''
|
||||
cmd.run should fail and raise
|
||||
salt.exceptions.CommandExecutionError if the cwd dir does not
|
||||
exist
|
||||
'''
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import salt.modules.cmdmod as cmdmod
|
||||
cmd = 'echo OHAI'
|
||||
cwd = '/path/to/nowhere'
|
||||
try:
|
||||
cmdmod.run_all(cmd, cwd=cwd)
|
||||
except CommandExecutionError:
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
|
|
@ -1,147 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Test the django module
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
|
||||
|
||||
# Import salt libs
|
||||
from salt.modules import djangomod as django
|
||||
|
||||
django.__salt__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@patch('salt.utils.which', lambda exe: exe)
|
||||
class DjangoModuleTest(integration.ModuleCase):
|
||||
'''
|
||||
Test the django module
|
||||
'''
|
||||
|
||||
def test_command(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.command('settings.py', 'runserver')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_command_with_args(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.command(
|
||||
'settings.py',
|
||||
'runserver',
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
'noinput',
|
||||
'somethingelse'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py '
|
||||
'--noinput --somethingelse',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_command_with_kwargs(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.command(
|
||||
'settings.py',
|
||||
'runserver',
|
||||
None,
|
||||
None,
|
||||
database='something'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py '
|
||||
'--database=something',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_command_with_kwargs_ignore_dunder(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.command(
|
||||
'settings.py', 'runserver', None, None, __ignore='something'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_syncdb(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.syncdb('settings.py')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py syncdb --settings=settings.py --noinput',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_syncdb_migrate(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.syncdb('settings.py', migrate=True)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py syncdb --settings=settings.py --migrate '
|
||||
'--noinput',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_createsuperuser(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.createsuperuser(
|
||||
'settings.py', 'testuser', 'user@example.com'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py createsuperuser --settings=settings.py '
|
||||
'--noinput --username=testuser --email=user@example.com',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def no_test_loaddata(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.loaddata('settings.py', 'app1,app2')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py loaddata --settings=settings.py app1 app2',
|
||||
)
|
||||
|
||||
def test_collectstatic(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(django.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
django.collectstatic(
|
||||
'settings.py', None, True, 'something', True, True, True, True
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py collectstatic --settings=settings.py '
|
||||
'--noinput --no-post-process --dry-run --clear --link '
|
||||
'--no-default-ignore --ignore=something',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
|
@ -12,11 +12,9 @@ import sys
|
|||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import patch, MagicMock
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
from salt.modules import file as filemod
|
||||
|
||||
|
||||
class FileModuleTest(integration.ModuleCase):
|
||||
|
@ -165,59 +163,6 @@ class FileModuleTest(integration.ModuleCase):
|
|||
'filehash', 'base'])
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_file(self):
|
||||
filemod.__salt__ = {
|
||||
'cp.list_master': MagicMock(
|
||||
return_value=['http/httpd.conf.fallback']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(['salt://http/httpd.conf',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf.fallback', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_file_from_other_env(self):
|
||||
def list_master(env):
|
||||
dct = {'base': [], 'dev': ['http/httpd.conf']}
|
||||
return dct[env]
|
||||
filemod.__salt__ = {
|
||||
'cp.list_master': MagicMock(side_effect=list_master),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(['salt://http/httpd.conf?saltenv=dev',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf?saltenv=dev', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_file_from_dict(self):
|
||||
filemod.__salt__ = {
|
||||
'cp.list_master': MagicMock(return_value=['http/httpd.conf']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(
|
||||
[{'salt://http/httpd.conf': ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf', 'filehash'])
|
||||
|
||||
@patch('salt.modules.file.os.remove')
|
||||
def test_source_list_for_list_returns_file_from_dict_via_http(self, remove):
|
||||
remove.return_value = None
|
||||
filemod.__salt__ = {
|
||||
'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
'cp.cache_file': MagicMock(return_value='/tmp/http.conf'),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(
|
||||
[{'http://t.est.com/http/httpd.conf': 'filehash'}], '', 'base')
|
||||
self.assertEqual(list(ret), ['http://t.est.com/http/httpd.conf', 'filehash'])
|
||||
|
||||
def test_source_list_for_single_local_file_slash_returns_unchanged(self):
|
||||
ret = self.run_function('file.source_list', [self.myfile,
|
||||
'filehash', 'base'])
|
||||
|
@ -227,25 +172,3 @@ class FileModuleTest(integration.ModuleCase):
|
|||
ret = self.run_function('file.source_list', ['file://' + self.myfile,
|
||||
'filehash', 'base'])
|
||||
self.assertEqual(list(ret), ['file://' + self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_local_file_slash(self):
|
||||
ret = filemod.source_list([self.myfile + '-foo',
|
||||
self.myfile],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), [self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_local_file_proto(self):
|
||||
ret = filemod.source_list(['file://' + self.myfile + '-foo',
|
||||
'file://' + self.myfile],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['file://' + self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_local_file_slash_from_dict(self):
|
||||
ret = filemod.source_list(
|
||||
[{self.myfile: ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), [self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_local_file_proto_from_dict(self):
|
||||
ret = filemod.source_list(
|
||||
[{'file://' + self.myfile: ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['file://' + self.myfile, 'filehash'])
|
||||
|
|
|
@ -1,143 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Tests for the local_cache returner
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.job
|
||||
from salt.returners import local_cache
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# JOBS DIR and FILES
|
||||
TMP_CACHE_DIR = os.path.join(integration.TMP, 'rootdir', 'cache')
|
||||
JOBS_DIR = os.path.join(TMP_CACHE_DIR, 'jobs')
|
||||
JID_DIR = os.path.join(JOBS_DIR, '31', 'c56eed380a4e899ae12bc42563cfdfc53066fb4a6b53e2378a08ac49064539')
|
||||
JID_FILE = os.path.join(JID_DIR, 'jid')
|
||||
JID_MINION_DIR = os.path.join(JID_DIR, 'minion', 'return.p')
|
||||
JOB_CACHE_DIR_FILES = [JID_FILE, JID_MINION_DIR]
|
||||
KEEP_JOBS = 0.0000000010
|
||||
EMPTY_JID_DIR = []
|
||||
|
||||
local_cache.__opts__ = {'cachedir': TMP_CACHE_DIR,
|
||||
'keep_jobs': KEEP_JOBS}
|
||||
|
||||
|
||||
class Local_CacheTest(integration.ShellCase):
|
||||
'''
|
||||
Test the local cache returner
|
||||
'''
|
||||
def _check_dir_files(self, msg, contents, status='None'):
|
||||
'''
|
||||
helper method to ensure files or dirs
|
||||
are either present or removed
|
||||
'''
|
||||
for content in contents:
|
||||
log.debug('CONTENT {0}'.format(content))
|
||||
if status == 'present':
|
||||
check_job_dir = os.path.exists(content)
|
||||
elif status == 'removed':
|
||||
if os.path.exists(content):
|
||||
check_job_dir = False
|
||||
else:
|
||||
check_job_dir = True
|
||||
self.assertTrue(check_job_dir,
|
||||
msg=msg + content)
|
||||
|
||||
def _add_job(self):
|
||||
'''
|
||||
helper method to add job.
|
||||
'''
|
||||
# add the job.
|
||||
opts = {}
|
||||
opts.update(self.get_config('master'))
|
||||
load = {'fun_args': [], 'jid': '20160603132323715452',
|
||||
'return': True, 'retcode': 0, 'success': True,
|
||||
'cmd': '_return', 'fun': 'test.ping', 'id': 'minion'}
|
||||
|
||||
add_job = salt.utils.job.store_job(opts, load)
|
||||
self.assertEqual(add_job, None)
|
||||
self._check_dir_files('Dir/file does not exist: ',
|
||||
JOB_CACHE_DIR_FILES,
|
||||
status='present')
|
||||
|
||||
def test_clean_old_jobs(self):
|
||||
'''
|
||||
test to ensure jobs are removed from job cache
|
||||
'''
|
||||
self._add_job()
|
||||
|
||||
# remove job
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
self._check_dir_files('job cache was not removed: ',
|
||||
JOB_CACHE_DIR_FILES,
|
||||
status='removed')
|
||||
|
||||
def test_not_clean_new_jobs(self):
|
||||
'''
|
||||
test to ensure jobs are not removed when
|
||||
jobs dir is new
|
||||
'''
|
||||
self._add_job()
|
||||
|
||||
local_cache.__opts__['keep_jobs'] = 24
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
self._check_dir_files('job cache was removed: ',
|
||||
JOB_CACHE_DIR_FILES,
|
||||
status='present')
|
||||
|
||||
# need to set back to initial KEEP_JOBS
|
||||
local_cache.__opts__['keep_jobs'] = KEEP_JOBS
|
||||
|
||||
def test_empty_jid_dir(self):
|
||||
'''
|
||||
test to ensure removal of empty jid dir
|
||||
'''
|
||||
# add empty jid dir
|
||||
new_jid_dir = os.path.join(JOBS_DIR, 'z0')
|
||||
EMPTY_JID_DIR.append(new_jid_dir)
|
||||
os.makedirs(new_jid_dir)
|
||||
|
||||
# This needed due to a race condition in Windows
|
||||
# `os.makedirs` hasn't released the handle before
|
||||
# `local_cache.clean_old_jobs` tries to delete the new_jid_dir
|
||||
if salt.utils.is_windows():
|
||||
import time
|
||||
lock_dir = new_jid_dir + '.lckchk'
|
||||
tries = 0
|
||||
while True:
|
||||
tries += 1
|
||||
if tries > 10:
|
||||
break
|
||||
# Rename the directory and name it back
|
||||
# If it fails, the directory handle is not released, try again
|
||||
# If it succeeds, break and continue test
|
||||
try:
|
||||
os.rename(new_jid_dir, lock_dir)
|
||||
time.sleep(1)
|
||||
os.rename(lock_dir, new_jid_dir)
|
||||
break
|
||||
except WindowsError: # pylint: disable=E0602
|
||||
continue
|
||||
|
||||
# check dir exists
|
||||
self._check_dir_files('new_jid_dir was not created',
|
||||
EMPTY_JID_DIR,
|
||||
status='present')
|
||||
|
||||
# remove job
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
# check jid dir is removed
|
||||
self._check_dir_files('new_jid_dir was not removed',
|
||||
EMPTY_JID_DIR,
|
||||
status='removed')
|
|
@ -10,10 +10,11 @@ import os
|
|||
import shutil
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
|
||||
from tests.integration import AdaptedConfigurationTestCaseMixIn
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -35,7 +36,6 @@ def _get_file_roots():
|
|||
)
|
||||
|
||||
|
||||
fileclient.__opts__ = {}
|
||||
MOCKED_OPTS = {
|
||||
'file_roots': _get_file_roots(),
|
||||
'fileserver_backend': ['roots'],
|
||||
|
@ -45,7 +45,10 @@ MOCKED_OPTS = {
|
|||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class FileClientTest(integration.ModuleCase):
|
||||
class FileClientTest(TestCase, AdaptedConfigurationTestCaseMixIn, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {fileclient: {'__opts__': MOCKED_OPTS}}
|
||||
|
||||
def setUp(self):
|
||||
self.file_client = fileclient.Client(self.master_opts)
|
||||
|
@ -75,13 +78,16 @@ class FileClientTest(integration.ModuleCase):
|
|||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class FileclientCacheTest(integration.ModuleCase):
|
||||
class FileclientCacheTest(TestCase, AdaptedConfigurationTestCaseMixIn, LoaderModuleMockMixin):
|
||||
'''
|
||||
Tests for the fileclient caching. The LocalClient is the only thing we can
|
||||
test as it is the only way we can mock the fileclient (the tests run from
|
||||
the minion process, so the master cannot be mocked from test code).
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {fileclient: {'__opts__': MOCKED_OPTS}}
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
No need to add a dummy foo.txt to muddy up the github repo, just make
|
||||
|
@ -182,7 +188,7 @@ class FileclientCacheTest(integration.ModuleCase):
|
|||
'''
|
||||
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
|
||||
patched_opts.update(MOCKED_OPTS)
|
||||
alt_cachedir = os.path.join(integration.TMP, 'abs_cachedir')
|
||||
alt_cachedir = os.path.join(TMP, 'abs_cachedir')
|
||||
|
||||
with patch.dict(fileclient.__opts__, patched_opts):
|
||||
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
|
||||
|
@ -291,7 +297,7 @@ class FileclientCacheTest(integration.ModuleCase):
|
|||
'''
|
||||
patched_opts = dict((x, y) for x, y in six.iteritems(self.minion_opts))
|
||||
patched_opts.update(MOCKED_OPTS)
|
||||
alt_cachedir = os.path.join(integration.TMP, 'abs_cachedir')
|
||||
alt_cachedir = os.path.join(TMP, 'abs_cachedir')
|
||||
|
||||
with patch.dict(fileclient.__opts__, patched_opts):
|
||||
client = fileclient.get_file_client(fileclient.__opts__, pillar=False)
|
|
@ -5,24 +5,34 @@
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import textwrap
|
||||
import yaml
|
||||
import pwd
|
||||
import logging
|
||||
|
||||
# Import 3rd-party libs
|
||||
import yaml
|
||||
try:
|
||||
import git # pylint: disable=unused-import
|
||||
HAS_GITPYTHON = True
|
||||
GITFS_AVAILABLE = True
|
||||
except ImportError:
|
||||
HAS_GITPYTHON = False
|
||||
GITFS_AVAILABLE = False
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
from tests.support.paths import TMP, FILES
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.gitfs
|
||||
from salt.fileserver.gitfs import PER_REMOTE_OVERRIDES, PER_REMOTE_ONLY
|
||||
import salt.fileserver.gitfs as gitfs
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@skipIf(not HAS_GITPYTHON, 'GitPython is not installed')
|
||||
|
@ -81,51 +91,150 @@ class GitfsConfigTestCase(TestCase):
|
|||
- mountpoint: abc
|
||||
''')
|
||||
self.opts.update(yaml.safe_load(opts))
|
||||
gitfs = salt.utils.gitfs.GitFS(self.opts)
|
||||
gitfs.init_remotes(self.opts['gitfs_remotes'],
|
||||
PER_REMOTE_OVERRIDES, PER_REMOTE_ONLY)
|
||||
git_fs = salt.utils.gitfs.GitFS(self.opts)
|
||||
git_fs.init_remotes(self.opts['gitfs_remotes'],
|
||||
gitfs.PER_REMOTE_OVERRIDES, gitfs.PER_REMOTE_ONLY)
|
||||
|
||||
# repo1 (branch: foo)
|
||||
# The mountpoint should take the default (from gitfs_mountpoint), while
|
||||
# ref and root should take the per-saltenv params.
|
||||
self.assertEqual(gitfs.remotes[0].mountpoint('foo'), '')
|
||||
self.assertEqual(gitfs.remotes[0].ref('foo'), 'foo_branch')
|
||||
self.assertEqual(gitfs.remotes[0].root('foo'), 'foo_root')
|
||||
self.assertEqual(git_fs.remotes[0].mountpoint('foo'), '')
|
||||
self.assertEqual(git_fs.remotes[0].ref('foo'), 'foo_branch')
|
||||
self.assertEqual(git_fs.remotes[0].root('foo'), 'foo_root')
|
||||
|
||||
# repo1 (branch: bar)
|
||||
# The 'bar' branch does not have a per-saltenv configuration set, so
|
||||
# each of the below values should fall back to global values.
|
||||
self.assertEqual(gitfs.remotes[0].mountpoint('bar'), '')
|
||||
self.assertEqual(gitfs.remotes[0].ref('bar'), 'bar')
|
||||
self.assertEqual(gitfs.remotes[0].root('bar'), 'salt')
|
||||
self.assertEqual(git_fs.remotes[0].mountpoint('bar'), '')
|
||||
self.assertEqual(git_fs.remotes[0].ref('bar'), 'bar')
|
||||
self.assertEqual(git_fs.remotes[0].root('bar'), 'salt')
|
||||
|
||||
# repo1 (branch: baz)
|
||||
# The 'baz' branch does not have a per-saltenv configuration set, but
|
||||
# it is defined in the gitfs_saltenv parameter, so the values
|
||||
# from that parameter should be returned.
|
||||
self.assertEqual(gitfs.remotes[0].mountpoint('baz'), 'baz_mountpoint')
|
||||
self.assertEqual(gitfs.remotes[0].ref('baz'), 'baz_branch')
|
||||
self.assertEqual(gitfs.remotes[0].root('baz'), 'baz_root')
|
||||
self.assertEqual(git_fs.remotes[0].mountpoint('baz'), 'baz_mountpoint')
|
||||
self.assertEqual(git_fs.remotes[0].ref('baz'), 'baz_branch')
|
||||
self.assertEqual(git_fs.remotes[0].root('baz'), 'baz_root')
|
||||
|
||||
# repo2 (branch: foo)
|
||||
# The mountpoint should take the per-remote mountpoint value of
|
||||
# 'repo2', while ref and root should fall back to global values.
|
||||
self.assertEqual(gitfs.remotes[1].mountpoint('foo'), 'repo2')
|
||||
self.assertEqual(gitfs.remotes[1].ref('foo'), 'foo')
|
||||
self.assertEqual(gitfs.remotes[1].root('foo'), 'salt')
|
||||
self.assertEqual(git_fs.remotes[1].mountpoint('foo'), 'repo2')
|
||||
self.assertEqual(git_fs.remotes[1].ref('foo'), 'foo')
|
||||
self.assertEqual(git_fs.remotes[1].root('foo'), 'salt')
|
||||
|
||||
# repo2 (branch: bar)
|
||||
# The 'bar' branch does not have a per-saltenv configuration set, so
|
||||
# the mountpoint should take the per-remote mountpoint value of
|
||||
# 'repo2', while ref and root should fall back to global values.
|
||||
self.assertEqual(gitfs.remotes[1].mountpoint('bar'), 'repo2')
|
||||
self.assertEqual(gitfs.remotes[1].ref('bar'), 'bar')
|
||||
self.assertEqual(gitfs.remotes[1].root('bar'), 'salt')
|
||||
self.assertEqual(git_fs.remotes[1].mountpoint('bar'), 'repo2')
|
||||
self.assertEqual(git_fs.remotes[1].ref('bar'), 'bar')
|
||||
self.assertEqual(git_fs.remotes[1].root('bar'), 'salt')
|
||||
|
||||
# repo2 (branch: baz)
|
||||
# The 'baz' branch has the mountpoint configured as a per-saltenv
|
||||
# parameter. The other two should take the values defined in
|
||||
# gitfs_saltenv.
|
||||
self.assertEqual(gitfs.remotes[1].mountpoint('baz'), 'abc')
|
||||
self.assertEqual(gitfs.remotes[1].ref('baz'), 'baz_branch')
|
||||
self.assertEqual(gitfs.remotes[1].root('baz'), 'baz_root')
|
||||
self.assertEqual(git_fs.remotes[1].mountpoint('baz'), 'abc')
|
||||
self.assertEqual(git_fs.remotes[1].ref('baz'), 'baz_branch')
|
||||
self.assertEqual(git_fs.remotes[1].root('baz'), 'baz_root')
|
||||
|
||||
|
||||
LOAD = {'saltenv': 'base'}
|
||||
|
||||
|
||||
@skipIf(not GITFS_AVAILABLE, "GitFS could not be loaded. Skipping GitFS tests!")
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class GitFSTest(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
self.tmp_sock_dir = tempfile.mkdtemp(dir=TMP)
|
||||
self.tmp_repo_dir = os.path.join(TMP, 'gitfs_root')
|
||||
return {
|
||||
gitfs: {
|
||||
'__opts__': {'cachedir': self.tmp_cachedir,
|
||||
'sock_dir': self.tmp_sock_dir,
|
||||
'gitfs_remotes': ['file://' + self.tmp_repo_dir],
|
||||
'gitfs_root': '',
|
||||
'fileserver_backend': ['git'],
|
||||
'gitfs_base': 'master',
|
||||
'fileserver_events': True,
|
||||
'transport': 'zeromq',
|
||||
'gitfs_mountpoint': '',
|
||||
'gitfs_env_whitelist': [],
|
||||
'gitfs_env_blacklist': [],
|
||||
'gitfs_user': '',
|
||||
'gitfs_password': '',
|
||||
'gitfs_insecure_auth': False,
|
||||
'gitfs_privkey': '',
|
||||
'gitfs_pubkey': '',
|
||||
'gitfs_passphrase': '',
|
||||
'gitfs_refspecs': ['+refs/heads/*:refs/remotes/origin/*',
|
||||
'+refs/tags/*:refs/tags/*'],
|
||||
'gitfs_ssl_verify': True,
|
||||
'__role': 'master'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
We don't want to check in another .git dir into GH because that just gets messy.
|
||||
Instead, we'll create a temporary repo on the fly for the tests to examine.
|
||||
'''
|
||||
if not gitfs.__virtual__():
|
||||
self.skip("GitFS could not be loaded. Skipping GitFS tests!")
|
||||
self.integration_base_files = os.path.join(FILES, 'file', 'base')
|
||||
|
||||
# Create the dir if it doesn't already exist
|
||||
|
||||
try:
|
||||
shutil.copytree(self.integration_base_files, self.tmp_repo_dir + '/')
|
||||
except OSError:
|
||||
# We probably caught an error because files already exist. Ignore
|
||||
pass
|
||||
|
||||
try:
|
||||
repo = git.Repo(self.tmp_repo_dir)
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
repo = git.Repo.init(self.tmp_repo_dir)
|
||||
|
||||
if 'USERNAME' not in os.environ:
|
||||
try:
|
||||
os.environ['USERNAME'] = pwd.getpwuid(os.geteuid()).pw_name
|
||||
except AttributeError:
|
||||
log.error('Unable to get effective username, falling back to '
|
||||
'\'root\'.')
|
||||
os.environ['USERNAME'] = 'root'
|
||||
|
||||
repo.index.add([x for x in os.listdir(self.tmp_repo_dir)
|
||||
if x != '.git'])
|
||||
repo.index.commit('Test')
|
||||
gitfs.update()
|
||||
|
||||
def tearDown(self):
|
||||
'''
|
||||
Remove the temporary git repository and gitfs cache directory to ensure
|
||||
a clean environment for each test.
|
||||
'''
|
||||
shutil.rmtree(self.tmp_repo_dir)
|
||||
shutil.rmtree(self.tmp_cachedir)
|
||||
shutil.rmtree(self.tmp_sock_dir)
|
||||
del self.tmp_repo_dir
|
||||
del self.tmp_cachedir
|
||||
del self.tmp_sock_dir
|
||||
del self.integration_base_files
|
||||
|
||||
def test_file_list(self):
|
||||
ret = gitfs.file_list(LOAD)
|
||||
self.assertIn('testfile', ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
ret = gitfs.dir_list(LOAD)
|
||||
self.assertIn('grail', ret)
|
||||
|
||||
def test_envs(self):
|
||||
ret = gitfs.envs()
|
||||
self.assertIn('base', ret)
|
||||
|
|
189
tests/unit/fileserver/test_roots.py
Normal file
189
tests/unit/fileserver/test_roots.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Mike Place <mp@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.integration import AdaptedConfigurationTestCaseMixIn
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import FILES, TMP, TMP_STATE_TREE
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
from salt.fileserver import roots
|
||||
from salt import fileclient
|
||||
import salt.utils
|
||||
|
||||
try:
|
||||
import win32file
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class RootsTest(TestCase, AdaptedConfigurationTestCaseMixIn, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
self.opts = self.get_config('master', from_scratch=True)
|
||||
self.opts['cachedir'] = self.tmp_cachedir
|
||||
empty_dir = os.path.join(TMP_STATE_TREE, 'empty_dir')
|
||||
if not os.path.isdir(empty_dir):
|
||||
os.makedirs(empty_dir)
|
||||
return {roots: {'__opts__': self.opts}}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
'''
|
||||
Create special file_roots for symlink test on Windows
|
||||
'''
|
||||
if salt.utils.is_windows():
|
||||
root_dir = tempfile.mkdtemp(dir=TMP)
|
||||
source_sym = os.path.join(root_dir, 'source_sym')
|
||||
with salt.utils.fopen(source_sym, 'w') as fp_:
|
||||
fp_.write('hello world!\n')
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(root_dir)
|
||||
win32file.CreateSymbolicLink('dest_sym', 'source_sym', 0)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
cls.test_symlink_list_file_roots = {'base': [root_dir]}
|
||||
else:
|
||||
cls.test_symlink_list_file_roots = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
'''
|
||||
Remove special file_roots for symlink test
|
||||
'''
|
||||
if salt.utils.is_windows():
|
||||
try:
|
||||
salt.utils.rm_rf(cls.test_symlink_list_file_roots['base'][0])
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
del self.opts
|
||||
|
||||
def test_file_list(self):
|
||||
ret = roots.file_list({'saltenv': 'base'})
|
||||
self.assertIn('testfile', ret)
|
||||
|
||||
def test_find_file(self):
|
||||
ret = roots.find_file('testfile')
|
||||
self.assertEqual('testfile', ret['rel'])
|
||||
|
||||
full_path_to_file = os.path.join(FILES, 'file', 'base', 'testfile')
|
||||
self.assertEqual(full_path_to_file, ret['path'])
|
||||
|
||||
def test_serve_file(self):
|
||||
with patch.dict(roots.__opts__, {'file_buffer_size': 262144}):
|
||||
load = {'saltenv': 'base',
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'loc': 0
|
||||
}
|
||||
fnd = {'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'}
|
||||
ret = roots.serve_file(load, fnd)
|
||||
|
||||
data = 'Scene 24\n\n \n OLD MAN: Ah, hee he he ha!\n ' \
|
||||
'ARTHUR: And this enchanter of whom you speak, he ' \
|
||||
'has seen the grail?\n OLD MAN: Ha ha he he he ' \
|
||||
'he!\n ARTHUR: Where does he live? Old man, where ' \
|
||||
'does he live?\n OLD MAN: He knows of a cave, a ' \
|
||||
'cave which no man has entered.\n ARTHUR: And the ' \
|
||||
'Grail... The Grail is there?\n OLD MAN: Very much ' \
|
||||
'danger, for beyond the cave lies the Gorge\n ' \
|
||||
'of Eternal Peril, which no man has ever crossed.\n ' \
|
||||
'ARTHUR: But the Grail! Where is the Grail!?\n ' \
|
||||
'OLD MAN: Seek you the Bridge of Death.\n ARTHUR: ' \
|
||||
'The Bridge of Death, which leads to the Grail?\n ' \
|
||||
'OLD MAN: Hee hee ha ha!\n\n'
|
||||
if salt.utils.is_windows():
|
||||
data = 'Scene 24\r\n\r\n \r\n OLD MAN: Ah, hee he he ' \
|
||||
'ha!\r\n ARTHUR: And this enchanter of whom you ' \
|
||||
'speak, he has seen the grail?\r\n OLD MAN: Ha ha ' \
|
||||
'he he he he!\r\n ARTHUR: Where does he live? Old ' \
|
||||
'man, where does he live?\r\n OLD MAN: He knows of ' \
|
||||
'a cave, a cave which no man has entered.\r\n ' \
|
||||
'ARTHUR: And the Grail... The Grail is there?\r\n ' \
|
||||
'OLD MAN: Very much danger, for beyond the cave lies ' \
|
||||
'the Gorge\r\n of Eternal Peril, which no man ' \
|
||||
'has ever crossed.\r\n ARTHUR: But the Grail! ' \
|
||||
'Where is the Grail!?\r\n OLD MAN: Seek you the ' \
|
||||
'Bridge of Death.\r\n ARTHUR: The Bridge of Death, ' \
|
||||
'which leads to the Grail?\r\n OLD MAN: Hee hee ha ' \
|
||||
'ha!\r\n\r\n'
|
||||
|
||||
self.assertDictEqual(
|
||||
ret,
|
||||
{'data': data,
|
||||
'dest': 'testfile'})
|
||||
|
||||
@skipIf(True, "Update test not yet implemented")
|
||||
def test_update(self):
|
||||
pass
|
||||
|
||||
def test_file_hash(self):
|
||||
load = {
|
||||
'saltenv': 'base',
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
}
|
||||
fnd = {
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'
|
||||
}
|
||||
ret = roots.file_hash(load, fnd)
|
||||
|
||||
# Hashes are different in Windows. May be how git translates line
|
||||
# endings
|
||||
hsum = 'baba5791276eb99a7cc498fb1acfbc3b4bd96d24cfe984b4ed6b5be2418731df'
|
||||
if salt.utils.is_windows():
|
||||
hsum = '754aa260e1f3e70f43aaf92149c7d1bad37f708c53304c37660e628d7553f687'
|
||||
|
||||
self.assertDictEqual(
|
||||
ret,
|
||||
{
|
||||
'hsum': hsum,
|
||||
'hash_type': 'sha256'
|
||||
}
|
||||
)
|
||||
|
||||
def test_file_list_emptydirs(self):
|
||||
ret = roots.file_list_emptydirs({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
ret = roots.dir_list({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_symlink_list(self):
|
||||
file_roots = self.test_symlink_list_file_roots \
|
||||
or self.opts['file_roots']
|
||||
ret = roots.symlink_list({'saltenv': 'base'})
|
||||
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
|
||||
|
||||
|
||||
class RootsLimitTraversalTest(TestCase, AdaptedConfigurationTestCaseMixIn):
|
||||
|
||||
def test_limit_traversal(self):
|
||||
'''
|
||||
1) Set up a deep directory structure
|
||||
2) Enable the configuration option for 'limit_directory_traversal'
|
||||
3) Ensure that we can find SLS files in a directory so long as there is an SLS file in a directory above.
|
||||
4) Ensure that we cannot find an SLS file in a directory that does not have an SLS file in a directory above.
|
||||
'''
|
||||
file_client_opts = self.get_config('master', from_scratch=True)
|
||||
file_client_opts['fileserver_limit_traversal'] = True
|
||||
|
||||
ret = fileclient.Client(file_client_opts).list_states('base')
|
||||
self.assertIn('test_deep.test', ret)
|
||||
self.assertIn('test_deep.a.test', ret)
|
||||
self.assertNotIn('test_deep.b.2.test', ret)
|
|
@ -5,18 +5,22 @@
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
import salt.modules.cmdmod as cmdmod
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.log import LOG_LEVELS
|
||||
import salt.utils
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
mock_open,
|
||||
Mock,
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
|
@ -258,3 +262,52 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
with patch('salt.utils.fopen', mock_open(read_data=MOCK_SHELL_FILE)):
|
||||
self.assertFalse(cmdmod._is_valid_shell('foo'))
|
||||
|
||||
@patch('pwd.getpwnam')
|
||||
@patch('subprocess.Popen')
|
||||
def test_os_environment_remains_intact(self,
|
||||
popen_mock,
|
||||
getpwnam_mock):
|
||||
'''
|
||||
Make sure the OS environment is not tainted after running a command
|
||||
that specifies runas.
|
||||
'''
|
||||
environment = os.environ.copy()
|
||||
|
||||
popen_mock.return_value = Mock(
|
||||
communicate=lambda *args, **kwags: ['{}', None],
|
||||
pid=lambda: 1,
|
||||
retcode=0
|
||||
)
|
||||
|
||||
with patch.dict(cmdmod.__grains__, {'os': 'Darwin', 'os_family': 'Solaris'}):
|
||||
if sys.platform.startswith(('freebsd', 'openbsd')):
|
||||
shell = '/bin/sh'
|
||||
else:
|
||||
shell = '/bin/bash'
|
||||
|
||||
cmdmod._run('ls',
|
||||
cwd=tempfile.gettempdir(),
|
||||
runas='foobar',
|
||||
shell=shell)
|
||||
|
||||
environment2 = os.environ.copy()
|
||||
|
||||
self.assertEqual(environment, environment2)
|
||||
|
||||
getpwnam_mock.assert_called_with('foobar')
|
||||
|
||||
def test_run_cwd_doesnt_exist_issue_7154(self):
|
||||
'''
|
||||
cmd.run should fail and raise
|
||||
salt.exceptions.CommandExecutionError if the cwd dir does not
|
||||
exist
|
||||
'''
|
||||
cmd = 'echo OHAI'
|
||||
cwd = '/path/to/nowhere'
|
||||
try:
|
||||
cmdmod.run_all(cmd, cwd=cwd)
|
||||
except CommandExecutionError:
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
|
|
@ -20,6 +20,7 @@ import salt.modules.djangomod as djangomod
|
|||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@patch('salt.utils.which', lambda exe: exe)
|
||||
class DjangomodTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.modules.djangomod
|
||||
|
@ -91,3 +92,126 @@ class DjangomodTestCase(TestCase, LoaderModuleMockMixin):
|
|||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(djangomod.__salt__, {'cmd.run': mock}):
|
||||
self.assertTrue(djangomod.collectstatic('DJANGO_SETTINGS_MODULE'))
|
||||
|
||||
def test_django_admin_cli_command(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.command('settings.py', 'runserver')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_command_with_args(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.command(
|
||||
'settings.py',
|
||||
'runserver',
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
'noinput',
|
||||
'somethingelse'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py '
|
||||
'--noinput --somethingelse',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_command_with_kwargs(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.command(
|
||||
'settings.py',
|
||||
'runserver',
|
||||
None,
|
||||
None,
|
||||
database='something'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py '
|
||||
'--database=something',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_command_with_kwargs_ignore_dunder(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.command(
|
||||
'settings.py', 'runserver', None, None, __ignore='something'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py runserver --settings=settings.py',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_syncdb(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.syncdb('settings.py')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py syncdb --settings=settings.py --noinput',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_syncdb_migrate(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.syncdb('settings.py', migrate=True)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py syncdb --settings=settings.py --migrate '
|
||||
'--noinput',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def test_django_admin_cli_createsuperuser(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.createsuperuser(
|
||||
'settings.py', 'testuser', 'user@example.com'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py createsuperuser --settings=settings.py '
|
||||
'--noinput --username=testuser --email=user@example.com',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
||||
def no_test_loaddata(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.loaddata('settings.py', 'app1,app2')
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py loaddata --settings=settings.py app1 app2',
|
||||
)
|
||||
|
||||
def test_django_admin_cli_collectstatic(self):
|
||||
mock = MagicMock()
|
||||
with patch.dict(djangomod.__salt__,
|
||||
{'cmd.run': mock}):
|
||||
djangomod.collectstatic(
|
||||
'settings.py', None, True, 'something', True, True, True, True
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
'django-admin.py collectstatic --settings=settings.py '
|
||||
'--noinput --no-post-process --dry-run --clear --link '
|
||||
'--no-default-ignore --ignore=something',
|
||||
python_shell=False,
|
||||
env=None
|
||||
)
|
||||
|
|
|
@ -3,11 +3,13 @@
|
|||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
@ -759,17 +761,88 @@ class FileBasicsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
def setUp(self):
|
||||
self.directory = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, self.directory)
|
||||
self.addCleanup(delattr, self, 'directory')
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode='w+') as self.tfile:
|
||||
self.tfile.write('Hi hello! I am a file.')
|
||||
self.tfile.close()
|
||||
|
||||
def tearDown(self):
|
||||
os.remove(self.tfile.name)
|
||||
os.remove(self.directory + '/a_link')
|
||||
os.rmdir(self.directory)
|
||||
del self.tfile
|
||||
self.addCleanup(os.remove, self.tfile.name)
|
||||
self.addCleanup(delattr, self, 'tfile')
|
||||
self.myfile = os.path.join(TMP, 'myfile')
|
||||
with salt.utils.fopen(self.myfile, 'w+') as fp:
|
||||
fp.write('Hello\n')
|
||||
self.addCleanup(os.remove, self.myfile)
|
||||
self.addCleanup(delattr, self, 'myfile')
|
||||
|
||||
def test_symlink_already_in_desired_state(self):
|
||||
os.symlink(self.tfile.name, self.directory + '/a_link')
|
||||
self.addCleanup(os.remove, self.directory + '/a_link')
|
||||
result = filemod.symlink(self.tfile.name, self.directory + '/a_link')
|
||||
self.assertTrue(result)
|
||||
|
||||
@patch('salt.modules.file.os.remove')
|
||||
def test_source_list_for_list_returns_file_from_dict_via_http(self, remove):
|
||||
remove.return_value = None
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
'cp.cache_file': MagicMock(return_value='/tmp/http.conf')}):
|
||||
ret = filemod.source_list(
|
||||
[{'http://t.est.com/http/httpd.conf': 'filehash'}], '', 'base')
|
||||
self.assertEqual(list(ret), ['http://t.est.com/http/httpd.conf', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_file(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=['http/httpd.conf.fallback']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(['salt://http/httpd.conf',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf.fallback', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_file_from_other_env(self):
|
||||
def list_master(env):
|
||||
dct = {'base': [], 'dev': ['http/httpd.conf']}
|
||||
return dct[env]
|
||||
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(side_effect=list_master),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(['salt://http/httpd.conf?saltenv=dev',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf?saltenv=dev', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_file_from_dict(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=['http/httpd.conf']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(
|
||||
[{'salt://http/httpd.conf': ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['salt://http/httpd.conf', 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_local_file_slash(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list([self.myfile + '-foo',
|
||||
self.myfile],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), [self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_existing_local_file_proto(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(['file://' + self.myfile + '-foo',
|
||||
'file://' + self.myfile],
|
||||
'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['file://' + self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_local_file_slash_from_dict(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(
|
||||
[{self.myfile: ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), [self.myfile, 'filehash'])
|
||||
|
||||
def test_source_list_for_list_returns_local_file_proto_from_dict(self):
|
||||
with patch.dict(filemod.__salt__, {'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[])}):
|
||||
ret = filemod.source_list(
|
||||
[{'file://' + self.myfile: ''}], 'filehash', 'base')
|
||||
self.assertEqual(list(ret), ['file://' + self.myfile, 'filehash'])
|
||||
|
|
|
@ -10,9 +10,11 @@ Unit tests for the Default Job Cache (local_cache).
|
|||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.integration import AdaptedConfigurationTestCaseMixIn
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
@ -25,7 +27,11 @@ from tests.support.mock import (
|
|||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.jid
|
||||
import salt.returners.local_cache as local_cache
|
||||
import salt.ext.six as six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TMP_CACHE_DIR = os.path.join(TMP, 'salt_test_job_cache')
|
||||
TMP_JID_DIR = os.path.join(TMP_CACHE_DIR, 'jobs')
|
||||
|
@ -158,3 +164,149 @@ class LocalCacheCleanOldJobsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
jid_file.write('this is a jid file')
|
||||
|
||||
return temp_dir, jid_file_path
|
||||
|
||||
|
||||
class Local_CacheTest(TestCase, AdaptedConfigurationTestCaseMixIn, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test the local cache returner
|
||||
'''
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
local_cache: {
|
||||
'__opts__': {
|
||||
'cachedir': self.TMP_CACHE_DIR,
|
||||
'keep_jobs': self.KEEP_JOBS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.TMP_CACHE_DIR = os.path.join(TMP, 'rootdir', 'cache')
|
||||
cls.JOBS_DIR = os.path.join(cls.TMP_CACHE_DIR, 'jobs')
|
||||
cls.JID_DIR = os.path.join(cls.JOBS_DIR, '31', 'c56eed380a4e899ae12bc42563cfdfc53066fb4a6b53e2378a08ac49064539')
|
||||
cls.JID_FILE = os.path.join(cls.JID_DIR, 'jid')
|
||||
cls.JID_MINION_DIR = os.path.join(cls.JID_DIR, 'minion', 'return.p')
|
||||
cls.JOB_CACHE_DIR_FILES = [cls.JID_FILE, cls.JID_MINION_DIR]
|
||||
cls.KEEP_JOBS = 0.0000000010
|
||||
cls.EMPTY_JID_DIR = []
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
for attrname in ('TMP_CACHE_DIR', 'JOBS_DIR', 'JID_DIR', 'JID_FILE', 'JID_MINION_DIR',
|
||||
'JOB_CACHE_DIR_FILES', 'KEEP_JOBS', 'EMPTY_JID_DIR'):
|
||||
try:
|
||||
attr_instance = getattr(cls, attrname)
|
||||
if isinstance(attr_instance, six.string_types):
|
||||
if os.path.isdir(attr_instance):
|
||||
shutil.rmtree(attr_instance)
|
||||
elif os.path.isfile(attr_instance):
|
||||
os.unlink(attr_instance)
|
||||
delattr(cls, attrname)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
def _check_dir_files(self, msg, contents, status='None'):
|
||||
'''
|
||||
helper method to ensure files or dirs
|
||||
are either present or removed
|
||||
'''
|
||||
for content in contents:
|
||||
log.debug('CONTENT {0}'.format(content))
|
||||
if status == 'present':
|
||||
check_job_dir = os.path.exists(content)
|
||||
elif status == 'removed':
|
||||
if os.path.exists(content):
|
||||
check_job_dir = False
|
||||
else:
|
||||
check_job_dir = True
|
||||
self.assertTrue(check_job_dir, msg=msg + content)
|
||||
|
||||
def _add_job(self):
|
||||
'''
|
||||
helper method to add job.
|
||||
'''
|
||||
# add the job.
|
||||
opts = {}
|
||||
opts.update(self.get_config('master'))
|
||||
opts['cachedir'] = self.TMP_CACHE_DIR
|
||||
load = {'fun_args': [], 'jid': '20160603132323715452',
|
||||
'return': True, 'retcode': 0, 'success': True,
|
||||
'cmd': '_return', 'fun': 'test.ping', 'id': 'minion'}
|
||||
|
||||
add_job = salt.utils.job.store_job(opts, load)
|
||||
self.assertEqual(add_job, None)
|
||||
self._check_dir_files('Dir/file does not exist: ',
|
||||
self.JOB_CACHE_DIR_FILES,
|
||||
status='present')
|
||||
|
||||
def test_clean_old_jobs(self):
|
||||
'''
|
||||
test to ensure jobs are removed from job cache
|
||||
'''
|
||||
self._add_job()
|
||||
|
||||
# remove job
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
self._check_dir_files('job cache was not removed: ',
|
||||
self.JOB_CACHE_DIR_FILES,
|
||||
status='removed')
|
||||
|
||||
def test_not_clean_new_jobs(self):
|
||||
'''
|
||||
test to ensure jobs are not removed when
|
||||
jobs dir is new
|
||||
'''
|
||||
self._add_job()
|
||||
|
||||
with patch.dict(local_cache.__opts__, {'keep_jobs': 24}):
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
self._check_dir_files('job cache was removed: ',
|
||||
self.JOB_CACHE_DIR_FILES,
|
||||
status='present')
|
||||
|
||||
def test_empty_jid_dir(self):
|
||||
'''
|
||||
test to ensure removal of empty jid dir
|
||||
'''
|
||||
# add empty jid dir
|
||||
new_jid_dir = os.path.join(self.JOBS_DIR, 'z0')
|
||||
self.EMPTY_JID_DIR.append(new_jid_dir)
|
||||
os.makedirs(new_jid_dir)
|
||||
|
||||
# This needed due to a race condition in Windows
|
||||
# `os.makedirs` hasn't released the handle before
|
||||
# `local_cache.clean_old_jobs` tries to delete the new_jid_dir
|
||||
if salt.utils.is_windows():
|
||||
import time
|
||||
lock_dir = new_jid_dir + '.lckchk'
|
||||
tries = 0
|
||||
while True:
|
||||
tries += 1
|
||||
if tries > 10:
|
||||
break
|
||||
# Rename the directory and name it back
|
||||
# If it fails, the directory handle is not released, try again
|
||||
# If it succeeds, break and continue test
|
||||
try:
|
||||
os.rename(new_jid_dir, lock_dir)
|
||||
time.sleep(1)
|
||||
os.rename(lock_dir, new_jid_dir)
|
||||
break
|
||||
except WindowsError: # pylint: disable=E0602
|
||||
continue
|
||||
|
||||
# check dir exists
|
||||
self._check_dir_files('new_jid_dir was not created',
|
||||
self.EMPTY_JID_DIR,
|
||||
status='present')
|
||||
|
||||
# remove job
|
||||
self.assertEqual(local_cache.clean_old_jobs(), None)
|
||||
|
||||
# check jid dir is removed
|
||||
self._check_dir_files('new_jid_dir was not removed',
|
||||
self.EMPTY_JID_DIR,
|
||||
status='removed')
|
||||
|
|
|
@ -7,10 +7,14 @@ import shutil
|
|||
import tempfile
|
||||
|
||||
# Import Salt Testing Libs
|
||||
import tests.integration as integration
|
||||
from salt.runners import winrepo
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.runners.winrepo as winrepo
|
||||
|
||||
_WINREPO_SLS = r'''
|
||||
winscp_x86:
|
||||
|
@ -65,29 +69,32 @@ _WINREPO_GENREPO_DATA = {
|
|||
}
|
||||
}
|
||||
|
||||
winrepo.__opts__ = {
|
||||
'winrepo_cachefile': 'winrepo.p',
|
||||
'renderer': 'yaml',
|
||||
'renderer_blacklist': [],
|
||||
'renderer_whitelist': []
|
||||
}
|
||||
winrepo.__salt__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class WinrepoTest(integration.ShellCase):
|
||||
class WinrepoTest(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test the winrepo runner
|
||||
'''
|
||||
def setUp(self):
|
||||
super(WinrepoTest, self).setUp()
|
||||
self.winrepo_dir = tempfile.mkdtemp(dir=integration.TMP)
|
||||
def setup_loader_modules(self):
|
||||
self.winrepo_dir = tempfile.mkdtemp(dir=TMP)
|
||||
self.addCleanup(shutil.rmtree, self.winrepo_dir, ignore_errors=True)
|
||||
self.extmods_dir = tempfile.mkdtemp(dir=integration.TMP)
|
||||
self.extmods_dir = tempfile.mkdtemp(dir=TMP)
|
||||
self.addCleanup(shutil.rmtree, self.extmods_dir, ignore_errors=True)
|
||||
self.winrepo_sls_dir = os.path.join(self.winrepo_dir, 'repo_sls')
|
||||
os.mkdir(self.winrepo_sls_dir)
|
||||
self.maxDiff = None
|
||||
return {
|
||||
winrepo: {
|
||||
'__opts__': {
|
||||
'winrepo_cachefile': 'winrepo.p',
|
||||
'renderer': 'yaml',
|
||||
'renderer_blacklist': [],
|
||||
'renderer_whitelist': [],
|
||||
'file_roots': {'base': [self.winrepo_dir]},
|
||||
'winrepo_dir': self.winrepo_dir,
|
||||
'extension_modules': self.extmods_dir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_genrepo(self):
|
||||
'''
|
||||
|
@ -95,11 +102,6 @@ class WinrepoTest(integration.ShellCase):
|
|||
'''
|
||||
sls_file = os.path.join(self.winrepo_sls_dir, 'wireshark.sls')
|
||||
# Add a winrepo SLS file
|
||||
with open(sls_file, 'w') as fp_:
|
||||
with salt.utils.fopen(sls_file, 'w') as fp_:
|
||||
fp_.write(_WINREPO_SLS)
|
||||
with patch.dict(
|
||||
winrepo.__opts__,
|
||||
{'file_roots': {'base': [self.winrepo_dir]},
|
||||
'winrepo_dir': self.winrepo_dir,
|
||||
'extension_modules': self.extmods_dir}):
|
||||
self.assertEqual(winrepo.genrepo(), _WINREPO_GENREPO_DATA)
|
||||
self.assertEqual(winrepo.genrepo(), _WINREPO_GENREPO_DATA)
|
Loading…
Add table
Reference in a new issue