Merge pull request #46007 from rallytime/merge-oxygen

[oxygen] Merge forward from oxygen.rc1 to oxygen
This commit is contained in:
Nicole Thomas 2018-02-13 13:50:08 -05:00 committed by GitHub
commit d4377d4678
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
65 changed files with 2380 additions and 1864 deletions

View file

@ -79,7 +79,7 @@ with release Neon.
The functions have been moved as follows:
- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle``
- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle``
instead.
- ``salt.utils.daemonize``: use ``salt.utils.process.daemonize`` instead.
- ``salt.utils.daemonize_if``: use ``salt.utils.process.daemonize_if`` instead.
@ -94,22 +94,22 @@ The functions have been moved as follows:
- ``salt.utils.is_hex``: use ``salt.utils.stringutils.is_hex`` instead.
- ``salt.utils.is_bin_str``: use ``salt.utils.stringutils.is_bin_str`` instead.
- ``salt.utils.rand_string``: use ``salt.utils.stringutils.random`` instead.
- ``salt.utils.contains_whitespace``: use
- ``salt.utils.contains_whitespace``: use
``salt.utils.stringutils.contains_whitespace`` instead.
- ``salt.utils.build_whitespace_split_regex``: use
- ``salt.utils.build_whitespace_split_regex``: use
``salt.utils.stringutils.build_whitespace_split_regex`` instead.
- ``salt.utils.expr_match``: use ``salt.utils.stringutils.expr_match`` instead.
- ``salt.utils.check_whitelist_blacklist``: use
- ``salt.utils.check_whitelist_blacklist``: use
``salt.utils.stringutils.check_whitelist_blacklist`` instead.
- ``salt.utils.check_include_exclude``: use
- ``salt.utils.check_include_exclude``: use
``salt.utils.stringutils.check_include_exclude`` instead.
- ``salt.utils.print_cli``: use ``salt.utils.stringutils.print_cli`` instead.
- ``salt.utils.clean_kwargs``: use ``salt.utils.args.clean_kwargs`` instead.
- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs``
- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs``
instead.
- ``salt.utils.shlex_split``: use ``salt.utils.args.shlex_split`` instead.
- ``salt.utils.arg_lookup``: use ``salt.utils.args.arg_lookup`` instead.
- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report``
- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report``
instead.
- ``salt.utils.split_input``: use ``salt.utils.args.split_input`` instead.
- ``salt.utils.test_mode``: use ``salt.utils.args.test_mode`` instead.
@ -118,7 +118,7 @@ The functions have been moved as follows:
- ``salt.utils.which_bin``: use ``salt.utils.path.which_bin`` instead.
- ``salt.utils.path_join``: use ``salt.utils.path.join`` instead.
- ``salt.utils.check_or_die``: use ``salt.utils.path.check_or_die`` instead.
- ``salt.utils.sanitize_win_path_string``: use
- ``salt.utils.sanitize_win_path_string``: use
``salt.utils.path.sanitize_win_path`` instead.
- ``salt.utils.rand_str``: use ``salt.utils.hashutils.random_hash`` instead.
- ``salt.utils.get_hash``: use ``salt.utils.hashutils.get_hash`` instead.
@ -128,9 +128,9 @@ The functions have been moved as follows:
- ``salt.utils.is_darwin``: use ``salt.utils.platform.is_darwin`` instead.
- ``salt.utils.is_sunos``: use ``salt.utils.platform.is_sunos`` instead.
- ``salt.utils.is_smartos``: use ``salt.utils.platform.is_smartos`` instead.
- ``salt.utils.is_smartos_globalzone``: use
- ``salt.utils.is_smartos_globalzone``: use
``salt.utils.platform.is_smartos_globalzone`` instead.
- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone``
- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone``
instead.
- ``salt.utils.is_freebsd``: use ``salt.utils.platform.is_freebsd`` instead.
- ``salt.utils.is_netbsd``: use ``salt.utils.platform.is_netbsd`` instead.
@ -147,55 +147,55 @@ The functions have been moved as follows:
- ``salt.utils.is_bin_file``: use ``salt.utils.files.is_binary`` instead.
- ``salt.utils.list_files``: use ``salt.utils.files.list_files`` instead.
- ``salt.utils.safe_walk``: use ``salt.utils.files.safe_walk`` instead.
- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal``
- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal``
instead.
- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode``
- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode``
instead.
- ``salt.utils.human_size_to_bytes``: use
- ``salt.utils.human_size_to_bytes``: use
``salt.utils.files.human_size_to_bytes`` instead.
- ``salt.utils.backup_minion``: use ``salt.utils.files.backup_minion`` instead.
- ``salt.utils.str_version_to_evr``: use ``salt.utils.pkg.rpm.version_to_evr``
instead.
- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring``
- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring``
instead.
- ``salt.utils.compare_versions``: use ``salt.utils.versions.compare`` instead.
- ``salt.utils.version_cmp``: use ``salt.utils.versions.version_cmp`` instead.
- ``salt.utils.warn_until``: use ``salt.utils.versions.warn_until`` instead.
- ``salt.utils.kwargs_warn_until``: use
- ``salt.utils.kwargs_warn_until``: use
``salt.utils.versions.kwargs_warn_until`` instead.
- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme``
- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme``
instead.
- ``salt.utils.get_colors``: use ``salt.utils.color.get_colors`` instead.
- ``salt.utils.gen_state_tag``: use ``salt.utils.state.gen_tag`` instead.
- ``salt.utils.search_onfail_requisites``: use
- ``salt.utils.search_onfail_requisites``: use
``salt.utils.state.search_onfail_requisites`` instead.
- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result``
- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result``
instead.
- ``salt.utils.get_user``: use ``salt.utils.user.get_user`` instead.
- ``salt.utils.get_uid``: use ``salt.utils.user.get_uid`` instead.
- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user``
- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user``
instead.
- ``salt.utils.chugid``: use ``salt.utils.user.chugid`` instead.
- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask``
- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask``
instead.
- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group``
- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group``
instead.
- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list``
- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list``
instead.
- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict``
- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict``
instead.
- ``salt.utils.get_gid_list``: use ``salt.utils.user.get_gid_list`` instead.
- ``salt.utils.get_gid``: use ``salt.utils.user.get_gid`` instead.
- ``salt.utils.enable_ctrl_logoff_handler``: use
- ``salt.utils.enable_ctrl_logoff_handler``: use
``salt.utils.win_functions.enable_ctrl_logoff_handler`` instead.
- ``salt.utils.traverse_dict``: use ``salt.utils.data.traverse_dict`` instead.
- ``salt.utils.traverse_dict_and_list``: use
- ``salt.utils.traverse_dict_and_list``: use
``salt.utils.data.traverse_dict_and_list`` instead.
- ``salt.utils.filter_by``: use ``salt.utils.data.filter_by`` instead.
- ``salt.utils.subdict_match``: use ``salt.utils.data.subdict_match`` instead.
- ``salt.utils.substr_in_list``: use ``salt.utils.data.substr_in_list`` instead.
- ``salt.utils.is_dictlist``: use ``salt.utils.data.is_dictlist``.
- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist``
- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist``
instead.
- ``salt.utils.compare_dicts``: use ``salt.utils.data.compare_dicts`` instead.
- ``salt.utils.compare_lists``: use ``salt.utils.data.compare_lists`` instead.
@ -208,33 +208,33 @@ The functions have been moved as follows:
- ``salt.utils.isorted``: use ``salt.utils.data.sorted_ignorecase`` instead.
- ``salt.utils.is_true``: use ``salt.utils.data.is_true`` instead.
- ``salt.utils.mysql_to_dict``: use ``salt.utils.data.mysql_to_dict`` instead.
- ``salt.utils.simple_types_filter``: use
- ``salt.utils.simple_types_filter``: use
``salt.utils.data.simple_types_filter`` instead.
- ``salt.utils.ip_bracket``: use ``salt.utils.zeromq.ip_bracket`` instead.
- ``salt.utils.gen_mac``: use ``salt.utils.network.gen_mac`` instead.
- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes``
- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes``
instead.
- ``salt.utils.refresh_dns``: use ``salt.utils.network.refresh_dns`` instead.
- ``salt.utils.dns_check``: use ``salt.utils.network.dns_check`` instead.
- ``salt.utils.get_context``: use ``salt.utils.templates.get_context`` instead.
- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key``
- ``salt.utils.get_context``: use ``salt.utils.stringutils.get_context`` instead.
- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key``
instead.
- ``salt.utils.get_values_of_matching_keys``: use
- ``salt.utils.get_values_of_matching_keys``: use
``salt.utils.master.get_values_of_matching_keys`` instead.
- ``salt.utils.date_cast``: use ``salt.utils.dateutils.date_cast`` instead.
- ``salt.utils.date_format``: use ``salt.utils.dateutils.strftime`` instead.
- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds``
- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds``
instead.
- ``salt.utils.find_json``: use ``salt.utils.json.find_json`` instead.
- ``salt.utils.import_json``: use ``salt.utils.json.import_json`` instead.
- ``salt.utils.namespaced_function``: use
- ``salt.utils.namespaced_function``: use
``salt.utils.functools.namespaced_function`` instead.
- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function``
- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function``
instead.
- ``salt.utils.profile_func``: use ``salt.utils.profile.profile_func`` instead.
- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile``
- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile``
instead.
- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile``
- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile``
instead.
State and Execution Module Support for ``docker run`` Functionality

View file

@ -119,11 +119,12 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
Creates a master server
'''
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
# escalate signal to the process manager processes
self.master.process_manager.stop_restarting()
self.master.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
self.master.process_manager.kill_children()
if hasattr(self.master, 'process_manager'): # IofloMaster has no process manager
# escalate signal to the process manager processes
self.master.process_manager.stop_restarting()
self.master.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
self.master.process_manager.kill_children()
super(Master, self)._handle_signals(signum, sigframe)
def prepare(self):
@ -151,7 +152,6 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
os.path.join(self.config['cachedir'], 'jobs'),
os.path.join(self.config['cachedir'], 'proc'),
self.config['sock_dir'],
self.config['key_dir'],
self.config['token_dir'],
self.config['syndic_dir'],
self.config['sqlite_queue_dir'],
@ -166,7 +166,7 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
self.config['user'],
permissive=self.config['permissive_pki_access'],
root_dir=self.config['root_dir'],
sensitive_dirs=[self.config['pki_dir'], self.config['key_dir']],
pki_dir=self.config['pki_dir'],
)
# Clear out syndics from cachedir
for syndic_file in os.listdir(self.config['syndic_dir']):
@ -234,7 +234,8 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
# escalate signal to the process manager processes
self.minion.stop(signum)
if hasattr(self.minion, 'stop'):
self.minion.stop(signum)
super(Minion, self)._handle_signals(signum, sigframe)
# pylint: disable=no-member
@ -287,7 +288,7 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
self.config['user'],
permissive=self.config['permissive_pki_access'],
root_dir=self.config['root_dir'],
sensitive_dirs=[self.config['pki_dir']],
pki_dir=self.config['pki_dir'],
)
except OSError as error:
self.environment_failure(error)
@ -392,7 +393,7 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
:param exitmsg
'''
self.action_log_info('Shutting down')
if hasattr(self, 'minion'):
if hasattr(self, 'minion') and hasattr(self.minion, 'destroy'):
self.minion.destroy()
super(Minion, self).shutdown(
exitcode, ('The Salt {0} is shutdown. {1}'.format(
@ -469,7 +470,7 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
self.config['user'],
permissive=self.config['permissive_pki_access'],
root_dir=self.config['root_dir'],
sensitive_dirs=[self.config['pki_dir']],
pki_dir=self.config['pki_dir'],
)
except OSError as error:
self.environment_failure(error)
@ -578,7 +579,7 @@ class Syndic(salt.utils.parsers.SyndicOptionParser, DaemonsMixin): # pylint: di
self.config['user'],
permissive=self.config['permissive_pki_access'],
root_dir=self.config['root_dir'],
sensitive_dirs=[self.config['pki_dir']],
pki_dir=self.config['pki_dir'],
)
except OSError as error:
self.environment_failure(error)

View file

@ -10,6 +10,7 @@ import os
import salt.utils.job
import salt.utils.parsers
import salt.utils.stringutils
import salt.log
from salt.utils.args import yamlify_arg
from salt.utils.verify import verify_log
from salt.exceptions import (
@ -38,9 +39,10 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
import salt.client
self.parse_args()
# Setup file logging!
self.setup_logfile_logger()
verify_log(self.config)
if self.config['log_level'] not in ('quiet', ):
# Setup file logging!
self.setup_logfile_logger()
verify_log(self.config)
try:
# We don't need to bail on config file permission errors
@ -82,7 +84,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
if 'token' in self.config:
import salt.utils.files
try:
with salt.utils.files.fopen(os.path.join(self.config['key_dir'], '.root_key'), 'r') as fp_:
with salt.utils.files.fopen(os.path.join(self.config['cachedir'], '.root_key'), 'r') as fp_:
kwargs['key'] = fp_.readline()
except IOError:
kwargs['token'] = self.config['token']

View file

@ -194,11 +194,11 @@ class LocalClient(object):
# The username may contain '\' if it is in Windows
# 'DOMAIN\username' format. Fix this for the keyfile path.
key_user = key_user.replace('\\', '_')
keyfile = os.path.join(self.opts['key_dir'],
keyfile = os.path.join(self.opts['cachedir'],
'.{0}_key'.format(key_user))
try:
# Make sure all key parent directories are accessible
salt.utils.verify.check_path_traversal(self.opts['key_dir'],
salt.utils.verify.check_path_traversal(self.opts['cachedir'],
key_user,
self.skip_perm_errors)
with salt.utils.files.fopen(keyfile, 'r') as key:

View file

@ -240,7 +240,7 @@ class CloudClient(object):
if a.get('provider', '')]
if providers:
_providers = opts.get('providers', {})
for provider in list(_providers).copy():
for provider in _providers.copy():
if provider not in providers:
_providers.pop(provider)
return opts

View file

@ -200,8 +200,7 @@ def get_dependencies():
'''
deps = {
'requests': HAS_REQUESTS,
'm2crypto': HAS_M2,
'pycrypto': HAS_PYCRYPTO
'pycrypto or m2crypto': HAS_M2 or HAS_PYCRYPTO
}
return config.check_driver_dependencies(
__virtualname__,

View file

@ -355,7 +355,7 @@ def _get_ips(node, addr_type='public'):
ret = []
for _, interface in node.addresses.items():
for addr in interface:
if addr_type in ('floating', 'fixed') and addr_type == addr['OS-EXT-IPS:type']:
if addr_type in ('floating', 'fixed') and addr_type == addr.get('OS-EXT-IPS:type'):
ret.append(addr['addr'])
elif addr_type == 'public' and __utils__['cloud.is_public_ip'](addr['addr']):
ret.append(addr['addr'])

View file

@ -197,9 +197,6 @@ VALID_OPTS = {
# The directory used to store public key data
'pki_dir': six.string_types,
# The directory to store authentication keys of a master's local environment.
'key_dir': six.string_types,
# A unique identifier for this daemon
'id': six.string_types,
@ -1495,7 +1492,6 @@ DEFAULT_MASTER_OPTS = {
'archive_jobs': False,
'root_dir': salt.syspaths.ROOT_DIR,
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'master'),
'key_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'key'),
'key_cache': '',
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'master'),
'file_roots': {
@ -2310,6 +2306,12 @@ def prepend_root_dir(opts, path_options):
path = tmp_path_root_dir
else:
path = tmp_path_def_root_dir
elif salt.utils.platform.is_windows() and not os.path.splitdrive(path)[0]:
# In windows, os.path.isabs resolves '/' to 'C:\\' or whatever
# the root drive is. This elif prevents the next from being
# hit, so that the root_dir is prefixed in cases where the
# drive is not prefixed on a config option
pass
elif os.path.isabs(path):
# Absolute path (not default or overriden root_dir)
# No prepending required
@ -2497,7 +2499,7 @@ def syndic_config(master_config_path,
opts.update(syndic_opts)
# Prepend root_dir to other paths
prepend_root_dirs = [
'pki_dir', 'key_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'autosign_file', 'autoreject_file', 'token_dir', 'autosign_grains_dir'
]
for config_key in ('log_file', 'key_logfile', 'syndic_log_file'):
@ -3645,7 +3647,7 @@ def _adjust_log_file_override(overrides, default_log_file):
if overrides.get('log_dir'):
# Adjust log_file if a log_dir override is introduced
if overrides.get('log_file'):
if not os.path.abspath(overrides['log_file']):
if not os.path.isabs(overrides['log_file']):
# Prepend log_dir if log_file is relative
overrides['log_file'] = os.path.join(overrides['log_dir'],
overrides['log_file'])
@ -3934,7 +3936,7 @@ def apply_master_config(overrides=None, defaults=None):
# Prepend root_dir to other paths
prepend_root_dirs = [
'pki_dir', 'key_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
'autosign_file', 'autoreject_file', 'token_dir', 'syndic_dir',
'sqlite_queue_dir', 'autosign_grains_dir'
]

View file

@ -121,6 +121,7 @@ class IofloMinion(object):
'''
warn_deprecated()
self.opts = opts
self.restart = False
def tune_in(self, behaviors=None):
'''

View file

@ -186,11 +186,11 @@ def mk_key(opts, user):
# The username may contain '\' if it is in Windows
# 'DOMAIN\username' format. Fix this for the keyfile path.
keyfile = os.path.join(
opts['key_dir'], '.{0}_key'.format(user.replace('\\', '_'))
opts['cachedir'], '.{0}_key'.format(user.replace('\\', '_'))
)
else:
keyfile = os.path.join(
opts['key_dir'], '.{0}_key'.format(user)
opts['cachedir'], '.{0}_key'.format(user)
)
if os.path.exists(keyfile):

View file

@ -11,7 +11,12 @@ framer minionudpstack be active first start
exit
do salt raet road stack closer per inode ".salt.road.manor."
framer bootstrap be active first join
framer bootstrap be active first setup
frame setup
enter
do salt raet road usher minion setup per inode ".salt.road.manor."
go join
frame join
print Joining...
enter
@ -44,7 +49,7 @@ framer bootstrap be active first join
frame message
print Messaging...
enter
do raet road stack messenger to contents "Minion 1 Hello" code 15 \
do raet road stack messenger with contents "Minion 1 Hello" code 15 \
per inode ".salt.road.manor."
go next

View file

@ -22,6 +22,10 @@ def test():
if not os.path.exists(pkiDirpath):
os.makedirs(pkiDirpath)
keyDirpath = os.path.join('/tmp', 'raet', 'testo', 'key')
if not os.path.exists(keyDirpath):
os.makedirs(keyDirpath)
acceptedDirpath = os.path.join(pkiDirpath, 'accepted')
if not os.path.exists(acceptedDirpath):
os.makedirs(acceptedDirpath)
@ -64,10 +68,12 @@ def test():
client_acl=dict(),
publisher_acl=dict(),
pki_dir=pkiDirpath,
key_dir=keyDirpath,
sock_dir=sockDirpath,
cachedir=cacheDirpath,
open_mode=True,
auto_accept=True,
client_acl_verify=True,
)
master = salt.daemons.flo.IofloMaster(opts=opts)

View file

@ -348,8 +348,8 @@ if __name__ == '__main__' and __package__ is None:
#console.reinit(verbosity=console.Wordage.concise)
#runAll() # run all unittests
runAll() # run all unittests
runSome() # only run some
#runSome() # only run some
#runOne('testParseHostname')

View file

@ -5,6 +5,7 @@ Raet Ioflo Behavior Unittests
from __future__ import absolute_import, print_function, unicode_literals
import sys
from salt.ext.six.moves import map
import importlib
# pylint: disable=blacklisted-import
if sys.version_info < (2, 7):
import unittest2 as unittest
@ -40,6 +41,9 @@ class PresenterTestCase(testing.FrameIofloTestCase):
'''
Call super if override so House Framer and Frame are setup correctly
'''
behaviors = ['salt.daemons.flo', 'salt.daemons.test.plan']
for behavior in behaviors:
mod = importlib.import_module(behavior)
super(PresenterTestCase, self).setUp()
def tearDown(self):

View file

@ -7,6 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
# pylint: skip-file
# pylint: disable=C0103
import sys
import salt.utils.stringutils
from salt.ext.six.moves import map
if sys.version_info < (2, 7):
import unittest2 as unittest
@ -30,12 +31,15 @@ from raet.road import estating, keeping, stacking
from salt.key import RaetKey
def setUpModule():
console.reinit(verbosity=console.Wordage.concise)
def tearDownModule():
pass
class BasicTestCase(unittest.TestCase):
""""""
@ -47,7 +51,7 @@ class BasicTestCase(unittest.TestCase):
pkiDirpath = os.path.join(self.saltDirpath, 'pki')
if not os.path.exists(pkiDirpath):
os.makedirs(pkiDirpath)
os.makedirs(pkiDirpath)
acceptedDirpath = os.path.join(pkiDirpath, 'accepted')
if not os.path.exists(acceptedDirpath):
@ -81,7 +85,7 @@ class BasicTestCase(unittest.TestCase):
)
self.mainKeeper = RaetKey(opts=self.opts)
self.baseDirpath = tempfile.mkdtemp(prefix="salt", suffix="base", dir='/tmp')
self.baseDirpath = tempfile.mkdtemp(prefix="salt", suffix="base", dir='/tmp')
def tearDown(self):
if os.path.exists(self.saltDirpath):
@ -119,9 +123,9 @@ class BasicTestCase(unittest.TestCase):
self.opts['auto_accept'] = True
self.assertTrue(self.opts['auto_accept'])
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
'local': [],
'rejected': [],
'pending': []})
'local': [],
'rejected': [],
'pending': []})
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {})
@ -129,8 +133,9 @@ class BasicTestCase(unittest.TestCase):
main = self.createRoadData(name='main', base=self.baseDirpath)
self.mainKeeper.write_local(main['prihex'], main['sighex'])
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {'priv': main['prihex'],
'sign': main['sighex']})
self.assertDictEqual(localkeys,
{'priv': salt.utils.stringutils.to_str(main['prihex']),
'sign': salt.utils.stringutils.to_str(main['sighex'])})
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': [],
'local': [self.localFilepath],
@ -147,39 +152,38 @@ class BasicTestCase(unittest.TestCase):
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
'local': [self.localFilepath],
'pending': [],
'rejected': []} )
'local': [self.localFilepath],
'pending': [],
'rejected': []})
remotekeys = self.mainKeeper.read_remote(other1['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
'pub': other1['pubhex'],
'verify': other1['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
'verify': salt.utils.stringutils.to_str(other1['verhex'])})
remotekeys = self.mainKeeper.read_remote(other2['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
'pub': other2['pubhex'],
'verify': other2['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
listkeys = self.mainKeeper.list_keys()
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
'rejected': [],
'pending': []})
allremotekeys = self.mainKeeper.read_all_remote()
self.assertDictEqual(allremotekeys, {'other1':
{'verify': other1['verhex'],
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': other1['pubhex'],},
'other2':
{'verify': other2['verhex'],
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': other2['pubhex'],}
})
self.assertDictEqual(allremotekeys,
{'other1':
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other1['pubhex']), },
'other2':
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other2['pubhex']), }
})
def testManualAccept(self):
'''
@ -189,9 +193,9 @@ class BasicTestCase(unittest.TestCase):
self.opts['auto_accept'] = False
self.assertFalse(self.opts['auto_accept'])
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
'local': [],
'rejected': [],
'pending': []})
'local': [],
'rejected': [],
'pending': []})
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {})
@ -199,8 +203,9 @@ class BasicTestCase(unittest.TestCase):
main = self.createRoadData(name='main', base=self.baseDirpath)
self.mainKeeper.write_local(main['prihex'], main['sighex'])
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {'priv': main['prihex'],
'sign': main['sighex']})
self.assertDictEqual(localkeys,
{'priv': salt.utils.stringutils.to_str(main['prihex']),
'sign': salt.utils.stringutils.to_str(main['sighex'])})
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': [],
'local': [self.localFilepath],
@ -217,9 +222,9 @@ class BasicTestCase(unittest.TestCase):
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': [],
'local': [self.localFilepath],
'pending': ['other1', 'other2'],
'rejected': []} )
'local': [self.localFilepath],
'pending': ['other1', 'other2'],
'rejected': []})
remotekeys = self.mainKeeper.read_remote(other1['name'])
self.assertDictEqual(remotekeys, {})
@ -232,56 +237,60 @@ class BasicTestCase(unittest.TestCase):
'rejected': [],
'pending': ['other1', 'other2']})
allremotekeys = self.mainKeeper.read_all_remote()
self.assertDictEqual(allremotekeys, {'other1':
{'verify': other1['verhex'],
'minion_id': 'other1',
'acceptance': 'pending',
'pub': other1['pubhex'],},
'other2':
{'verify': other2['verhex'],
'minion_id': 'other2',
'acceptance': 'pending',
'pub': other2['pubhex'],}
})
self.assertDictEqual(allremotekeys,
{'other1':
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
'minion_id': 'other1',
'acceptance': 'pending',
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
},
'other2':
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
'minion_id': 'other2',
'acceptance': 'pending',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
}
})
self.mainKeeper.accept_all()
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
'local': [self.localFilepath],
'pending': [],
'rejected': []} )
'local': [self.localFilepath],
'pending': [],
'rejected': []})
remotekeys = self.mainKeeper.read_remote(other1['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
'pub': other1['pubhex'],
'verify': other1['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
'verify': salt.utils.stringutils.to_str(other1['verhex'])})
remotekeys = self.mainKeeper.read_remote(other2['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
'pub': other2['pubhex'],
'verify': other2['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
listkeys = self.mainKeeper.list_keys()
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
'rejected': [],
'pending': []})
allremotekeys = self.mainKeeper.read_all_remote()
self.assertDictEqual(allremotekeys, {'other1':
{'verify': other1['verhex'],
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': other1['pubhex'],},
'other2':
{'verify': other2['verhex'],
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': other2['pubhex'],}
})
self.assertDictEqual(allremotekeys,
{'other1':
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
},
'other2':
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
}
})
def testDelete(self):
'''
@ -291,9 +300,9 @@ class BasicTestCase(unittest.TestCase):
self.opts['auto_accept'] = True
self.assertTrue(self.opts['auto_accept'])
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
'local': [],
'rejected': [],
'pending': []})
'local': [],
'rejected': [],
'pending': []})
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {})
@ -301,8 +310,9 @@ class BasicTestCase(unittest.TestCase):
main = self.createRoadData(name='main', base=self.baseDirpath)
self.mainKeeper.write_local(main['prihex'], main['sighex'])
localkeys = self.mainKeeper.read_local()
self.assertDictEqual(localkeys, {'priv': main['prihex'],
'sign': main['sighex']})
self.assertDictEqual(localkeys,
{'priv': salt.utils.stringutils.to_str(main['prihex']),
'sign': salt.utils.stringutils.to_str(main['sighex'])})
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': [],
'local': [self.localFilepath],
@ -319,70 +329,73 @@ class BasicTestCase(unittest.TestCase):
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
'local': [self.localFilepath],
'pending': [],
'rejected': []} )
'local': [self.localFilepath],
'pending': [],
'rejected': []})
remotekeys = self.mainKeeper.read_remote(other1['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
'pub': other1['pubhex'],
'verify': other1['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
'verify': salt.utils.stringutils.to_str(other1['verhex']),
})
remotekeys = self.mainKeeper.read_remote(other2['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
'pub': other2['pubhex'],
'verify': other2['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
'verify': salt.utils.stringutils.to_str(other2['verhex']),
})
listkeys = self.mainKeeper.list_keys()
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
'rejected': [],
'pending': []})
allremotekeys = self.mainKeeper.read_all_remote()
self.assertDictEqual(allremotekeys, {'other1':
{'verify': other1['verhex'],
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': other1['pubhex']},
'other2':
{'verify': other2['verhex'],
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': other2['pubhex'],}
})
self.assertDictEqual(allremotekeys,
{'other1':
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
'minion_id': 'other1',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other1['pubhex'])
},
'other2':
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
}
})
self.mainKeeper.delete_key(match=other1['name'])
allkeys = self.mainKeeper.all_keys()
self.assertDictEqual(allkeys, {'accepted': ['other2'],
'local': [self.localFilepath],
'pending': [],
'rejected': []} )
'local': [self.localFilepath],
'pending': [],
'rejected': []})
remotekeys = self.mainKeeper.read_remote(other1['name'])
self.assertDictEqual(remotekeys, {} )
self.assertDictEqual(remotekeys, {})
remotekeys = self.mainKeeper.read_remote(other2['name'])
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
'pub': other2['pubhex'],
'verify': other2['verhex']} )
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
listkeys = self.mainKeeper.list_keys()
self.assertDictEqual(listkeys, {'accepted': [ 'other2'],
self.assertDictEqual(listkeys, {'accepted': ['other2'],
'rejected': [],
'pending': []})
allremotekeys = self.mainKeeper.read_all_remote()
self.assertDictEqual(allremotekeys, {
'other2':
{'verify': other2['verhex'],
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': other2['pubhex'],}
})
self.assertDictEqual(allremotekeys,
{'other2':
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
'minion_id': 'other2',
'acceptance': 'accepted',
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
}
})
def runOne(test):
@ -393,11 +406,12 @@ def runOne(test):
suite = unittest.TestSuite([test])
unittest.TextTestRunner(verbosity=2).run(suite)
def runSome():
'''
Unittest runner
'''
tests = []
tests = []
names = ['testAutoAccept',
'testManualAccept',
'testDelete']
@ -407,6 +421,7 @@ def runSome():
suite = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=2).run(suite)
def runAll():
'''
Unittest runner
@ -416,12 +431,12 @@ def runAll():
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__' and __package__ is None:
# console.reinit(verbosity=console.Wordage.concise)
#console.reinit(verbosity=console.Wordage.concise)
runAll() # run all unittests
runAll() #run all unittests
# runSome() #only run some
#runSome()#only run some
#runOne('testDelete')
# runOne('testDelete')

View file

@ -13,10 +13,11 @@ else:
# pylint: enable=blacklisted-import
import os
import stat
import time
import tempfile
import shutil
import socket
import stat
import tempfile
import time
from ioflo.aid.odicting import odict
from ioflo.aid.timing import StoreTimer
@ -29,6 +30,7 @@ from raet.road import estating, stacking
from salt.daemons import salting
import salt.utils.kinds as kinds
import salt.utils.stringutils
def setUpModule():
@ -232,20 +234,21 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
os.path.join('main', 'raet', 'main_master')))
self.assertTrue(main.ha, ("0.0.0.0", raeting.RAET_PORT))
self.assertIs(main.keep.auto, raeting.AutoMode.never.value)
self.assertDictEqual(main.keep.loadLocalData(), {'name': mainData['name'],
'uid': 1,
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
})
self.assertDictEqual(main.keep.loadLocalData(),
{'name': mainData['name'],
'uid': 1,
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
data1 = self.createRoadData(role='remote1',
kind=kinds.APPL_KIND_NAMES[kinds.applKinds.minion],
@ -282,7 +285,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -290,8 +293,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 0,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
},
'remote2_minion':
{'name': data2['name'],
@ -300,7 +303,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -308,8 +311,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data2['role'],
'acceptance': 0,
'verhex': data2['verhex'],
'pubhex': data2['pubhex'],
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
}
})
@ -362,14 +365,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': otherData['role'],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
})
data3 = self.createRoadData(role='remote3',
@ -405,7 +408,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7534],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data3['kind'],
@ -413,8 +416,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data3['role'],
'acceptance': 0,
'verhex': data3['verhex'],
'pubhex': data3['pubhex'],
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
},
'remote4_minion':
{
@ -424,7 +427,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7535],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data4['kind'],
@ -432,8 +435,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data4['role'],
'acceptance': 0,
'verhex': data4['verhex'],
'pubhex': data4['pubhex'],
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
}
})
@ -477,14 +480,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
data1 = self.createRoadData(role='remote1',
@ -520,7 +523,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -528,8 +531,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 1,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
},
'remote2_minion':
{'name': data2['name'],
@ -538,7 +541,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -546,8 +549,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data2['role'],
'acceptance': 1,
'verhex': data2['verhex'],
'pubhex': data2['pubhex'],
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
}
})
@ -600,14 +603,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': otherData['role'],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
})
data3 = self.createRoadData(role='remote3',
@ -643,7 +646,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7534],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data3['kind'],
@ -651,8 +654,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data3['role'],
'acceptance': 1,
'verhex': data3['verhex'],
'pubhex': data3['pubhex'],
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
},
'remote4_minion':
{
@ -662,7 +665,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7535],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data4['kind'],
@ -670,8 +673,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data4['role'],
'acceptance': 1,
'verhex': data4['verhex'],
'pubhex': data4['pubhex'],
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
}
})
@ -715,13 +718,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
'role': mainData['role'],
})
@ -759,7 +762,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -767,8 +770,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 1,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
},
'remote2_minion':
{
@ -778,7 +781,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -786,8 +789,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data2['role'],
'acceptance': 1,
'verhex': data2['verhex'],
'pubhex': data2['pubhex'],
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
}
})
@ -840,14 +843,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': otherData['role'],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
})
data3 = self.createRoadData(role='remote3',
@ -883,7 +886,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7534],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data3['kind'],
@ -891,8 +894,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data3['role'],
'acceptance': 1,
'verhex': data3['verhex'],
'pubhex': data3['pubhex'],
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
},
'remote4_minion':
{
@ -902,7 +905,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7535],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data4['kind'],
@ -910,8 +913,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data4['role'],
'acceptance': 1,
'verhex': data4['verhex'],
'pubhex': data4['pubhex'],
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
}
})
@ -955,14 +958,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
# add multiple remotes all with same role
@ -1006,7 +1009,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -1014,8 +1017,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 0,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
},
'primary_caller':
{
@ -1025,7 +1028,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -1033,8 +1036,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 0,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
}
})
@ -1104,14 +1107,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
# add multiple remotes all with same role
@ -1149,7 +1152,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -1157,8 +1160,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 1,
'verhex': data2['verhex'],
'pubhex': data2['pubhex'],
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
},
'primary_syndic':
{
@ -1168,7 +1171,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -1176,8 +1179,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data2['role'],
'acceptance': 1,
'verhex': data2['verhex'],
'pubhex': data2['pubhex'],
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
}
})
@ -1248,14 +1251,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
# add multiple remotes all with same role but different keys
@ -1300,7 +1303,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data1['kind'],
@ -1308,8 +1311,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data1['role'],
'acceptance': 1,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
},
'primary_syndic':
{
@ -1319,7 +1322,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7533],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'main': False,
'kind': data2['kind'],
@ -1327,8 +1330,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'joined': None,
'role': data2['role'],
'acceptance': 1,
'verhex': data1['verhex'],
'pubhex': data1['pubhex'],
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
}
})
@ -1399,14 +1402,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
opts = self.createOpts(role='other',
@ -1441,14 +1444,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': otherData['role'],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
})
self.join(other, main)
@ -1524,14 +1527,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
opts = self.createOpts(role='other',
@ -1566,14 +1569,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': otherData['role'],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
})
self.join(other, main)
@ -1645,14 +1648,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
opts = self.createOpts(role='other',
@ -1687,13 +1690,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'sighex': otherData['sighex'],
'prihex': otherData['prihex'],
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
'role': otherData['role'],
})
@ -1766,14 +1769,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
opts = self.createOpts(role='primary',
@ -1808,14 +1811,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': other1Data['role'],
'sighex': other1Data['sighex'],
'prihex': other1Data['prihex'],
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
})
self.join(other1, main)
@ -1876,13 +1879,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7532],
'sighex': other2Data['sighex'],
'prihex': other2Data['prihex'],
'sighex': salt.utils.stringutils.to_str(other2Data['sighex']),
'prihex': salt.utils.stringutils.to_str(other2Data['prihex']),
'role': other2Data['role'],
})
@ -1936,14 +1939,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7532],
'role': other2Data['role'],
'sighex': other1Data['sighex'],
'prihex': other1Data['prihex'],
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
})
# should join since same role and keys
@ -2021,14 +2024,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7530],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7530],
'role': mainData['role'],
'sighex': mainData['sighex'],
'prihex': mainData['prihex'],
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
})
opts = self.createOpts(role='primary',
@ -2063,14 +2066,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7531],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7531],
'role': other1Data['role'],
'sighex': other1Data['sighex'],
'prihex': other1Data['prihex'],
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
})
self.join(other1, main)
@ -2130,14 +2133,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
'ha': ['127.0.0.1', 7532],
'iha': None,
'natted': None,
'fqdn': '1.0.0.127.in-addr.arpa',
'fqdn': socket.getfqdn('127.0.0.1'),
'dyned': None,
'sid': 0,
'puid': 1,
'aha': ['0.0.0.0', 7532],
'role': other2Data['role'],
'sighex': other2Data['sighex'],
'prihex': other2Data['prihex'],
'sighex': salt.utils.stringutils.to_str(other2Data['sighex']),
'prihex': salt.utils.stringutils.to_str(other2Data['prihex']),
})
# should join since open mode
@ -2225,8 +2228,8 @@ if __name__ == '__main__' and __package__ is None:
#console.reinit(verbosity=console.Wordage.concise)
#runAll() # run all unittests
runAll() # run all unittests
runSome() # only run some
#runSome() # only run some
#runOne('testBootstrapRoleAuto')

View file

@ -5,6 +5,7 @@ Raet Ioflo Behavior Unittests
from __future__ import absolute_import, print_function, unicode_literals
import sys
from salt.ext.six.moves import map
import importlib
# pylint: disable=blacklisted-import
if sys.version_info < (2, 7):
import unittest2 as unittest
@ -43,6 +44,9 @@ class StatsEventerTestCase(testing.FrameIofloTestCase):
'''
Call super if override so House Framer and Frame are setup correctly
'''
behaviors = ['salt.daemons.flo', 'salt.daemons.test.plan']
for behavior in behaviors:
mod = importlib.import_module(behavior)
super(StatsEventerTestCase, self).setUp()
def tearDown(self):
@ -723,8 +727,8 @@ if __name__ == '__main__' and __package__ is None:
# console.reinit(verbosity=console.Wordage.concise)
#runAll() # run all unittests
runAll() # run all unittests
runSome() # only run some
#runSome() # only run some
#runOne('testMasterLaneStats')

View file

@ -285,7 +285,7 @@ class SaltRenderError(SaltException):
if self.line_num and self.buffer:
# Avoid circular import
import salt.utils.templates
self.context = salt.utils.templates.get_context(
self.context = salt.utils.stringutils.get_context(
self.buffer,
self.line_num,
marker=marker

View file

@ -125,7 +125,7 @@ class KeyCLI(object):
if self.opts['eauth']:
if 'token' in self.opts:
try:
with salt.utils.files.fopen(os.path.join(self.opts['key_dir'], '.root_key'), 'r') as fp_:
with salt.utils.files.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
low['key'] = \
salt.utils.stringutils.to_unicode(fp_.readline())
except IOError:
@ -1082,6 +1082,8 @@ class RaetKey(Key):
pre_path = os.path.join(pre, minion_id)
rej_path = os.path.join(rej, minion_id)
# open mode is turned on, force accept the key
pub = salt.utils.stringutils.to_str(pub)
verify = salt.utils.stringutils.to_str(verify)
keydata = {
'minion_id': minion_id,
'pub': pub,
@ -1148,7 +1150,7 @@ class RaetKey(Key):
verify: <verify>
'''
path = os.path.join(self.opts['pki_dir'], status, minion_id)
with salt.utils.files.fopen(path, 'r') as fp_:
with salt.utils.files.fopen(path, 'rb') as fp_:
keydata = self.serial.loads(fp_.read())
return 'pub: {0}\nverify: {1}'.format(
keydata['pub'],
@ -1158,7 +1160,7 @@ class RaetKey(Key):
'''
Return a sha256 kingerprint for the key
'''
with salt.utils.files.fopen(path, 'r') as fp_:
with salt.utils.files.fopen(path, 'rb') as fp_:
keydata = self.serial.loads(fp_.read())
key = 'pub: {0}\nverify: {1}'.format(
keydata['pub'],
@ -1442,7 +1444,7 @@ class RaetKey(Key):
if os.path.exists(path):
#mode = os.stat(path).st_mode
os.chmod(path, stat.S_IWUSR | stat.S_IRUSR)
with salt.utils.files.fopen(path, 'w+') as fp_:
with salt.utils.files.fopen(path, 'w+b') as fp_:
fp_.write(self.serial.dumps(keydata))
os.chmod(path, stat.S_IRUSR)
os.umask(c_umask)

View file

@ -17,6 +17,7 @@ import logging.handlers
# Import salt libs
from salt.log.mixins import NewStyleClassMixIn, ExcInfoOnLogLevelFormatMixIn
from salt.ext.six.moves import queue
log = logging.getLogger(__name__)
@ -176,7 +177,7 @@ if sys.version_info < (3, 2):
'''
try:
self.queue.put_nowait(record)
except self.queue.Full:
except queue.Full:
sys.stderr.write('[WARNING ] Message queue is full, '
'unable to write "{0}" to log'.format(record)
)

View file

@ -120,6 +120,7 @@ __MP_LOGGING_QUEUE = None
__MP_LOGGING_QUEUE_PROCESS = None
__MP_LOGGING_QUEUE_HANDLER = None
__MP_IN_MAINPROCESS = multiprocessing.current_process().name == 'MainProcess'
__MP_MAINPROCESS_ID = None
class __NullLoggingHandler(TemporaryLoggingHandler):
@ -822,6 +823,7 @@ def set_multiprocessing_logging_queue(queue):
def setup_multiprocessing_logging_listener(opts, queue=None):
global __MP_LOGGING_QUEUE_PROCESS
global __MP_LOGGING_LISTENER_CONFIGURED
global __MP_MAINPROCESS_ID
if __MP_IN_MAINPROCESS is False:
# We're not in the MainProcess, return! No logging listener setup shall happen
@ -830,6 +832,11 @@ def setup_multiprocessing_logging_listener(opts, queue=None):
if __MP_LOGGING_LISTENER_CONFIGURED is True:
return
if __MP_MAINPROCESS_ID is not None and __MP_MAINPROCESS_ID != os.getpid():
# We're not in the MainProcess, return! No logging listener setup shall happen
return
__MP_MAINPROCESS_ID = os.getpid()
__MP_LOGGING_QUEUE_PROCESS = multiprocessing.Process(
target=__process_multiprocessing_logging_queue,
args=(opts, queue or get_multiprocessing_logging_queue(),)
@ -967,6 +974,11 @@ def shutdown_multiprocessing_logging_listener(daemonizing=False):
if __MP_LOGGING_QUEUE_PROCESS is None:
return
if __MP_MAINPROCESS_ID is not None and __MP_MAINPROCESS_ID != os.getpid():
# We're not in the MainProcess, return! No logging listener setup shall happen
return
if __MP_LOGGING_QUEUE_PROCESS.is_alive():
logging.getLogger(__name__).debug('Stopping the multiprocessing logging queue listener')
try:

View file

@ -32,12 +32,12 @@ Connection module for Amazon Cloud Formation
# keep lint from choking on _get_conn and _cache_id
#pylint: disable=E0602
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt libs
from salt.ext import six
import salt.utils.versions
log = logging.getLogger(__name__)
@ -72,7 +72,9 @@ def exists(name, region=None, key=None, keyid=None, profile=None):
'''
Check to see if a stack exists.
CLI example::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.exists mystack region=us-east-1
'''
@ -94,7 +96,9 @@ def describe(name, region=None, key=None, keyid=None, profile=None):
.. versionadded:: 2015.8.0
CLI example::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.describe mystack region=us-east-1
'''
@ -135,7 +139,9 @@ def create(name, template_body=None, template_url=None, parameters=None, notific
'''
Create a CFN stack.
CLI example to create a stack::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.create mystack template_url='https://s3.amazonaws.com/bucket/template.cft' \
region=us-east-1
@ -161,7 +167,9 @@ def update_stack(name, template_body=None, template_url=None, parameters=None, n
.. versionadded:: 2015.8.0
CLI example to update a stack::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.update_stack mystack template_url='https://s3.amazonaws.com/bucket/template.cft' \
region=us-east-1
@ -186,7 +194,9 @@ def delete(name, region=None, key=None, keyid=None, profile=None):
'''
Delete a CFN stack.
CLI example to delete a stack::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.delete mystack region=us-east-1
'''
@ -205,7 +215,9 @@ def get_template(name, region=None, key=None, keyid=None, profile=None):
'''
Check to see if attributes are set on a CFN stack.
CLI example::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.get_template mystack
'''
@ -228,7 +240,9 @@ def validate_template(template_body=None, template_url=None, region=None, key=No
.. versionadded:: 2015.8.0
CLI example::
CLI Example:
.. code-block:: bash
salt myminion boto_cfn.validate_template mystack-template
'''

File diff suppressed because it is too large Load diff

View file

@ -630,7 +630,6 @@ def _client_wrapper(attr, *args, **kwargs):
)
ret = func(*args, **kwargs)
except docker.errors.APIError as exc:
log.exception('Encountered error running API function %s', attr)
if catch_api_errors:
# Generic handling of Docker API errors
raise CommandExecutionError(

View file

@ -29,7 +29,7 @@ try:
except (NameError, KeyError):
import salt.modules.cmdmod
__salt__ = {
'cmd.run_all': salt.modules.cmdmod._run_all_quiet
'cmd.run_all': salt.modules.cmdmod.run_all
}
@ -95,8 +95,7 @@ def __execute_cmd(command, host=None,
output_loglevel='quiet')
if cmd['retcode'] != 0:
log.warning('racadm return an exit code \'{0}\'.'
.format(cmd['retcode']))
log.warning('racadm returned an exit code of %s', cmd['retcode'])
return False
return True
@ -129,8 +128,7 @@ def __execute_ret(command, host=None,
output_loglevel='quiet')
if cmd['retcode'] != 0:
log.warning('racadm return an exit code \'{0}\'.'
.format(cmd['retcode']))
log.warning('racadm returned an exit code of %s', cmd['retcode'])
else:
fmtlines = []
for l in cmd['stdout'].splitlines():
@ -193,8 +191,7 @@ def system_info(host=None,
module=module)
if cmd['retcode'] != 0:
log.warning('racadm return an exit code \'{0}\'.'
.format(cmd['retcode']))
log.warning('racadm returned an exit code of %s', cmd['retcode'])
return cmd
return __parse_drac(cmd['stdout'])
@ -272,8 +269,7 @@ def network_info(host=None,
module=module)
if cmd['retcode'] != 0:
log.warning('racadm return an exit code \'{0}\'.'
.format(cmd['retcode']))
log.warning('racadm returned an exit code of %s', cmd['retcode'])
cmd['stdout'] = 'Network:\n' + 'Device = ' + module + '\n' + \
cmd['stdout']
@ -395,8 +391,7 @@ def list_users(host=None,
admin_password=admin_password)
if cmd['retcode'] != 0:
log.warning('racadm return an exit code \'{0}\'.'
.format(cmd['retcode']))
log.warning('racadm returned an exit code of %s', cmd['retcode'])
for user in cmd['stdout'].splitlines():
if not user.startswith('cfg'):
@ -444,7 +439,7 @@ def delete_user(username,
admin_password=admin_password)
else:
log.warning('\'{0}\' does not exist'.format(username))
log.warning('User \'%s\' does not exist', username)
return False
@ -485,7 +480,7 @@ def change_password(username, password, uid=None, host=None,
host=host, admin_username=admin_username,
admin_password=admin_password, module=module)
else:
log.warning('\'{0}\' does not exist'.format(username))
log.warning('racadm: user \'%s\' does not exist', username)
return False
@ -567,7 +562,7 @@ def create_user(username, password, permissions,
users = list_users()
if username in users:
log.warning('\'{0}\' already exists'.format(username))
log.warning('racadm: user \'%s\' already exists', username)
return False
for idx in six.iterkeys(users):

View file

@ -287,7 +287,7 @@ def snap_eradicate(name, suffix=None):
Eradicate a deleted volume snapshot on a Pure Storage FlashArray.
Will retunr False is snapshot is not in a deleted state.
Will return False if snapshot is not in a deleted state.
.. versionadded:: Oxygen
@ -300,7 +300,7 @@ def snap_eradicate(name, suffix=None):
.. code-block:: bash
salt '*' purefa.snap_delete foo suffix=snap eradicate=True
salt '*' purefa.snap_eradicate foo suffix=snap
'''
array = _get_system()

View file

@ -1820,7 +1820,7 @@ def get_repo_data(saltenv='base'):
serial = salt.payload.Serial(__opts__)
with salt.utils.files.fopen(repo_details.winrepo_file, 'rb') as repofile:
try:
repodata = salt.utils.data.decode(serial.loads(repofile.read()) or {})
repodata = salt.utils.data.decode(serial.loads(repofile.read(), encoding='utf-8') or {})
__context__['winrepo.data'] = repodata
return repodata
except Exception as exc:
@ -1843,7 +1843,7 @@ def _get_name_map(saltenv='base'):
return name_map
for k in name_map:
u_name_map[k.decode('utf-8')] = name_map[k]
u_name_map[k] = name_map[k]
return u_name_map

View file

@ -226,6 +226,9 @@ def strip_esc_sequence(txt):
from writing their own terminal manipulation commands
'''
if isinstance(txt, six.string_types):
return txt.replace('\033', '?')
try:
return txt.replace('\033', '?')
except UnicodeDecodeError:
return txt.replace(str('\033'), str('?')) # future lint: disable=blacklisted-function
else:
return txt

View file

@ -71,20 +71,34 @@ class NestDisplay(object):
endc,
suffix)
except UnicodeDecodeError:
return fmt.format(
indent,
color,
prefix,
salt.utils.stringutils.to_unicode(msg),
endc,
suffix)
try:
return fmt.format(
indent,
color,
prefix,
salt.utils.stringutils.to_unicode(msg),
endc,
suffix)
except UnicodeDecodeError:
# msg contains binary data that can't be decoded
return str(fmt).format( # future lint: disable=blacklisted-function
indent,
color,
prefix,
msg,
endc,
suffix)
def display(self, ret, indent, prefix, out):
'''
Recursively iterate down through data structures to determine output
'''
if isinstance(ret, bytes):
ret = salt.utils.stringutils.to_unicode(ret)
try:
ret = salt.utils.stringutils.to_unicode(ret)
except UnicodeDecodeError:
# ret contains binary data that can't be decoded
pass
if ret is None or ret is True or ret is False:
out.append(
@ -183,4 +197,11 @@ def output(ret, **kwargs):
base_indent = kwargs.get('nested_indent', 0) \
or __opts__.get('nested_indent', 0)
nest = NestDisplay(retcode=retcode)
return '\n'.join(nest.display(ret, base_indent, '', []))
lines = nest.display(ret, base_indent, '', [])
try:
return '\n'.join(lines)
except UnicodeDecodeError:
# output contains binary data that can't be decoded
return str('\n').join( # future lint: disable=blacklisted-function
[salt.utils.stringutils.to_str(x) for x in lines]
)

View file

@ -205,7 +205,7 @@ class Runner(RunnerClient):
if self.opts.get('eauth'):
if 'token' in self.opts:
try:
with salt.utils.files.fopen(os.path.join(self.opts['key_dir'], '.root_key'), 'r') as fp_:
with salt.utils.files.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
low['key'] = salt.utils.stringutils.to_unicode(fp_.readline())
except IOError:
low['token'] = self.opts['token']

View file

@ -1740,15 +1740,15 @@ def dns_check(addr, port, safe=False, ipv6=None):
def get_context(template, line, num_lines=5, marker=None):
# Late import to avoid circular import.
import salt.utils.versions
import salt.utils.templates
import salt.utils.stringutils
salt.utils.versions.warn_until(
'Neon',
'Use of \'salt.utils.get_context\' detected. This function '
'has been moved to \'salt.utils.templates.get_context\' as of '
'has been moved to \'salt.utils.stringutils.get_context\' as of '
'Salt Oxygen. This warning will be removed in Salt Neon.',
stacklevel=3
)
return salt.utils.templates.get_context(template, line, num_lines, marker)
return salt.utils.stringutils.get_context(template, line, num_lines, marker)
def get_master_key(key_user, opts, skip_perm_errors=False):

View file

@ -269,7 +269,13 @@ def shlex_split(s, **kwargs):
Only split if variable is a string
'''
if isinstance(s, six.string_types):
return shlex.split(s, **kwargs)
# On PY2, shlex.split will fail with unicode types if there are
# non-ascii characters in the string. So, we need to make sure we
# invoke it with a str type, and then decode the resulting string back
# to unicode to return it.
return salt.utils.data.decode(
shlex.split(salt.utils.stringutils.to_str(s), **kwargs)
)
else:
return s

View file

@ -114,28 +114,27 @@ def _post_processing(kwargs, skip_translate, invalid):
actual_volumes.sort()
if kwargs.get('port_bindings') is not None \
and (skip_translate is True or
all(x not in skip_translate
for x in ('port_bindings', 'expose', 'ports'))):
and all(x not in skip_translate
for x in ('port_bindings', 'expose', 'ports')):
# Make sure that all ports defined in "port_bindings" are included in
# the "ports" param.
auto_ports = list(kwargs['port_bindings'])
if auto_ports:
actual_ports = []
# Sort list to make unit tests more reliable
for port in auto_ports:
if port in actual_ports:
continue
if isinstance(port, six.integer_types):
actual_ports.append((port, 'tcp'))
else:
port, proto = port.split('/')
actual_ports.append((int(port), proto))
actual_ports.sort()
actual_ports = [
port if proto == 'tcp' else '{}/{}'.format(port, proto) for (port, proto) in actual_ports
]
kwargs.setdefault('ports', actual_ports)
ports_to_bind = list(kwargs['port_bindings'])
if ports_to_bind:
ports_to_open = set(kwargs.get('ports', []))
ports_to_open.update([helpers.get_port_def(x) for x in ports_to_bind])
kwargs['ports'] = list(ports_to_open)
if 'ports' in kwargs \
and all(x not in skip_translate for x in ('expose', 'ports')):
# TCP ports should only be passed as the port number. Normalize the
# input so a port definition of 80/tcp becomes just 80 instead of
# (80, 'tcp').
for index, _ in enumerate(kwargs['ports']):
try:
if kwargs['ports'][index][1] == 'tcp':
kwargs['ports'][index] = ports_to_open[index][0]
except TypeError:
continue
# Functions below must match names of docker-py arguments
@ -552,13 +551,7 @@ def ports(val, **kwargs): # pylint: disable=unused-argument
raise SaltInvocationError(exc.__str__())
new_ports.update([helpers.get_port_def(x, proto)
for x in range(range_start, range_end + 1)])
ordered_new_ports = [
port if proto == 'tcp' else (port, proto) for (port, proto) in sorted(
[(new_port, 'tcp') if isinstance(new_port, six.integer_types) else new_port
for new_port in new_ports]
)
]
return ordered_new_ports
return list(new_ports)
def privileged(val, **kwargs): # pylint: disable=unused-argument

View file

@ -7,7 +7,6 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
from __future__ import absolute_import, print_function, unicode_literals
import copy
import contextlib
import distutils
import errno
import fnmatch
import glob
@ -90,9 +89,9 @@ log = logging.getLogger(__name__)
try:
import git
import gitdb
HAS_GITPYTHON = True
GITPYTHON_VERSION = _LooseVersion(git.__version__)
except ImportError:
HAS_GITPYTHON = False
GITPYTHON_VERSION = None
try:
# Squelch warning on cent7 due to them upgrading cffi
@ -100,7 +99,31 @@ try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
import pygit2
HAS_PYGIT2 = True
PYGIT2_VERSION = _LooseVersion(pygit2.__version__)
LIBGIT2_VERSION = _LooseVersion(pygit2.LIBGIT2_VERSION)
# Work around upstream bug where bytestrings were being decoded using the
# default encoding (which is usually ascii on Python 2). This was fixed
# on 2 Feb 2018, so releases prior to 0.26.2 will need a workaround.
if PYGIT2_VERSION <= _LooseVersion('0.26.2'):
try:
import pygit2.ffi
import pygit2.remote
except ImportError:
# If we couldn't import these, then we're using an old enough
# version where ffi isn't in use and this workaround would be
# useless.
pass
else:
def __maybe_string(ptr):
if not ptr:
return None
return pygit2.ffi.string(ptr).decode('utf-8')
pygit2.remote.maybe_string = __maybe_string
# Older pygit2 releases did not raise a specific exception class, this
# try/except makes Salt's exception catching work on any supported release.
try:
GitError = pygit2.errors.GitError
except AttributeError:
@ -111,16 +134,17 @@ except Exception as exc:
# to rebuild itself against the newer cffi). Therefore, we simply will
# catch a generic exception, and log the exception if it is anything other
# than an ImportError.
HAS_PYGIT2 = False
PYGIT2_VERSION = None
LIBGIT2_VERSION = None
if not isinstance(exc, ImportError):
log.exception('Failed to import pygit2')
# pylint: enable=import-error
# Minimum versions for backend providers
GITPYTHON_MINVER = '0.3'
PYGIT2_MINVER = '0.20.3'
LIBGIT2_MINVER = '0.20.0'
GITPYTHON_MINVER = _LooseVersion('0.3')
PYGIT2_MINVER = _LooseVersion('0.20.3')
LIBGIT2_MINVER = _LooseVersion('0.20.0')
def enforce_types(key, val):
@ -1841,10 +1865,7 @@ class Pygit2(GitProvider):
'''
Assign attributes for pygit2 callbacks
'''
# pygit2 radically changed fetching in 0.23.2
pygit2_version = pygit2.__version__
if distutils.version.LooseVersion(pygit2_version) >= \
distutils.version.LooseVersion('0.23.2'):
if PYGIT2_VERSION >= _LooseVersion('0.23.2'):
self.remotecallbacks = pygit2.RemoteCallbacks(
credentials=self.credentials)
if not self.ssl_verify:
@ -1859,7 +1880,7 @@ class Pygit2(GitProvider):
'pygit2 does not support disabling the SSL certificate '
'check in versions prior to 0.23.2 (installed: {0}). '
'Fetches for self-signed certificates will fail.'.format(
pygit2_version
PYGIT2_VERSION
)
)
@ -2435,10 +2456,10 @@ class GitBase(object):
Check if GitPython is available and at a compatible version (>= 0.3.0)
'''
def _recommend():
if HAS_PYGIT2 and 'pygit2' in self.git_providers:
if PYGIT2_VERSION and 'pygit2' in self.git_providers:
log.error(_RECOMMEND_PYGIT2, self.role, self.role)
if not HAS_GITPYTHON:
if not GITPYTHON_VERSION:
if not quiet:
log.error(
'%s is configured but could not be loaded, is GitPython '
@ -2449,18 +2470,14 @@ class GitBase(object):
elif 'gitpython' not in self.git_providers:
return False
# pylint: disable=no-member
gitver = _LooseVersion(git.__version__)
minver = _LooseVersion(GITPYTHON_MINVER)
# pylint: enable=no-member
errors = []
if gitver < minver:
if GITPYTHON_VERSION < GITPYTHON_MINVER:
errors.append(
'{0} is configured, but the GitPython version is earlier than '
'{1}. Version {2} detected.'.format(
self.role,
GITPYTHON_MINVER,
git.__version__
GITPYTHON_VERSION
)
)
if not salt.utils.path.which('git'):
@ -2486,10 +2503,10 @@ class GitBase(object):
Pygit2 must be at least 0.20.3 and libgit2 must be at least 0.20.0.
'''
def _recommend():
if HAS_GITPYTHON and 'gitpython' in self.git_providers:
if GITPYTHON_VERSION and 'gitpython' in self.git_providers:
log.error(_RECOMMEND_GITPYTHON, self.role, self.role)
if not HAS_PYGIT2:
if not PYGIT2_VERSION:
if not quiet:
log.error(
'%s is configured but could not be loaded, are pygit2 '
@ -2500,31 +2517,23 @@ class GitBase(object):
elif 'pygit2' not in self.git_providers:
return False
# pylint: disable=no-member
pygit2ver = _LooseVersion(pygit2.__version__)
pygit2_minver = _LooseVersion(PYGIT2_MINVER)
libgit2ver = _LooseVersion(pygit2.LIBGIT2_VERSION)
libgit2_minver = _LooseVersion(LIBGIT2_MINVER)
# pylint: enable=no-member
errors = []
if pygit2ver < pygit2_minver:
if PYGIT2_VERSION < PYGIT2_MINVER:
errors.append(
'{0} is configured, but the pygit2 version is earlier than '
'{1}. Version {2} detected.'.format(
self.role,
PYGIT2_MINVER,
pygit2.__version__
PYGIT2_VERSION
)
)
if libgit2ver < libgit2_minver:
if LIBGIT2_VERSION < LIBGIT2_MINVER:
errors.append(
'{0} is configured, but the libgit2 version is earlier than '
'{1}. Version {2} detected.'.format(
self.role,
LIBGIT2_MINVER,
pygit2.LIBGIT2_VERSION
LIBGIT2_VERSION
)
)
if not salt.utils.path.which('git'):

View file

@ -850,6 +850,24 @@ class SerializerExtension(Extension, object):
value = six.text_type(value)
try:
return salt.utils.data.decode(salt.utils.yaml.safe_load(value))
except salt.utils.yaml.YAMLError as exc:
msg = 'Encountered error loading yaml: '
try:
# Reported line is off by one, add 1 to correct it
line = exc.problem_mark.line + 1
buf = exc.problem_mark.buffer
problem = exc.problem
except AttributeError:
# No context information available in the exception, fall back
# to the stringified version of the exception.
msg += six.text_type(exc)
else:
msg += '{0}\n'.format(problem)
msg += salt.utils.stringutils.get_context(
buf,
line,
marker=' <======================')
raise TemplateRuntimeError(msg)
except AttributeError:
raise TemplateRuntimeError(
'Unable to load yaml from {0}'.format(value))

View file

@ -717,7 +717,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
# Remove it from config so it inherits from log_file
self.config.pop(self._logfile_config_setting_name_)
if self.config['verify_env']:
if self.config['verify_env'] and self.config['log_level'] not in ('quiet', ):
# Verify the logfile if it was explicitly set but do not try to
# verify the default
if logfile is not None and not logfile.startswith(('tcp://', 'udp://', 'file://')):

View file

@ -28,7 +28,8 @@ def _load_libcrypto():
Load OpenSSL libcrypto
'''
if sys.platform.startswith('win'):
return cdll.LoadLibrary('libeay32')
# cdll.LoadLibrary on windows requires an 'str' argument
return cdll.LoadLibrary(str('libeay32')) # future lint: disable=blacklisted-function
elif getattr(sys, 'frozen', False) and salt.utils.platform.is_smartos():
return cdll.LoadLibrary(glob.glob(os.path.join(
os.path.dirname(sys.executable),

View file

@ -151,6 +151,44 @@ class Schedule(object):
schedule.update(opts_schedule)
return schedule
def _check_max_running(self, func, data, opts):
'''
Return the schedule data structure
'''
# Check to see if there are other jobs with this
# signature running. If there are more than maxrunning
# jobs present then don't start another.
# If jid_include is False for this job we can ignore all this
# NOTE--jid_include defaults to True, thus if it is missing from the data
# dict we treat it like it was there and is True
data['run'] = True
if 'jid_include' not in data or data['jid_include']:
jobcount = 0
for job in salt.utils.minion.running(self.opts):
if 'schedule' in job:
log.debug(
'schedule.handle_func: Checking job against fun '
'%s: %s', func, job
)
if data['name'] == job['schedule'] \
and salt.utils.process.os_is_running(job['pid']):
jobcount += 1
log.debug(
'schedule.handle_func: Incrementing jobcount, '
'now %s, maxrunning is %s',
jobcount, data['maxrunning']
)
if jobcount >= data['maxrunning']:
log.debug(
'schedule.handle_func: The scheduled job '
'%s was not started, %s already running',
data['name'], data['maxrunning']
)
data['_skip_reason'] = 'maxrunning'
data['run'] = False
return data
return data
def persist(self):
'''
Persist the modified schedule into <<configdir>>/<<default_include>>/_schedule.conf
@ -350,22 +388,27 @@ class Schedule(object):
data['name'] = name
log.info('Running Job: %s', name)
multiprocessing_enabled = self.opts.get('multiprocessing', True)
if multiprocessing_enabled:
thread_cls = salt.utils.process.SignalHandlingMultiprocessingProcess
else:
thread_cls = threading.Thread
if not self.standalone:
data = self._check_max_running(func, data, self.opts)
if multiprocessing_enabled:
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
run = data['run']
if run:
multiprocessing_enabled = self.opts.get('multiprocessing', True)
if multiprocessing_enabled:
thread_cls = salt.utils.process.SignalHandlingMultiprocessingProcess
else:
thread_cls = threading.Thread
if multiprocessing_enabled:
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
proc = thread_cls(target=self.handle_func, args=(multiprocessing_enabled, func, data))
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
proc.start()
proc.join()
else:
proc = thread_cls(target=self.handle_func, args=(multiprocessing_enabled, func, data))
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
proc.start()
proc.join()
else:
proc = thread_cls(target=self.handle_func, args=(multiprocessing_enabled, func, data))
proc.start()
def enable_schedule(self):
'''
@ -538,36 +581,6 @@ class Schedule(object):
ret['jid']
)
# Check to see if there are other jobs with this
# signature running. If there are more than maxrunning
# jobs present then don't start another.
# If jid_include is False for this job we can ignore all this
# NOTE--jid_include defaults to True, thus if it is missing from the data
# dict we treat it like it was there and is True
if 'jid_include' not in data or data['jid_include']:
jobcount = 0
for job in salt.utils.minion.running(self.opts):
if 'schedule' in job:
log.debug(
'schedule.handle_func: Checking job against fun '
'%s: %s', ret['fun'], job
)
if ret['schedule'] == job['schedule'] \
and salt.utils.process.os_is_running(job['pid']):
jobcount += 1
log.debug(
'schedule.handle_func: Incrementing jobcount, '
'now %s, maxrunning is %s',
jobcount, data['maxrunning']
)
if jobcount >= data['maxrunning']:
log.debug(
'schedule.handle_func: The scheduled job '
'%s was not started, %s already running',
ret['schedule'], data['maxrunning']
)
return False
if multiprocessing_enabled and not salt.utils.platform.is_windows():
# Reconfigure multiprocessing logging after daemonizing
log_setup.setup_multiprocessing_logging()
@ -786,9 +799,14 @@ class Schedule(object):
'skip_function',
'skip_during_range']
for job, data in six.iteritems(schedule):
# Clear out _skip_reason from previous runs
if '_skip_reason' in data:
del data['_skip_reason']
run = False
if job in _hidden and not data:
if job in _hidden:
continue
if not isinstance(data, dict):
@ -797,9 +815,6 @@ class Schedule(object):
job, type(data)
)
continue
# Job is disabled, continue
if 'enabled' in data and not data['enabled']:
continue
if 'function' in data:
func = data['function']
elif 'func' in data:
@ -922,6 +937,8 @@ class Schedule(object):
if interval < self.loop_interval:
self.loop_interval = interval
data['_next_scheduled_fire_time'] = now + data['_seconds']
elif 'once' in data:
if data['_next_fire_time'] and \
data['_next_fire_time'] < now - self.opts['loop_interval'] and \
@ -937,6 +954,8 @@ class Schedule(object):
once_fmt)
data['_next_fire_time'] = int(
time.mktime(once.timetuple()))
data['_next_scheduled_fire_time'] = int(
time.mktime(once.timetuple()))
except (TypeError, ValueError):
log.error('Date string could not be parsed: %s, %s',
data['once'], once_fmt)
@ -1018,6 +1037,8 @@ class Schedule(object):
if not data['_next_fire_time']:
data['_next_fire_time'] = when
data['_next_scheduled_fire_time'] = when
if data['_next_fire_time'] < when and \
not run and \
not data['_run']:
@ -1073,6 +1094,8 @@ class Schedule(object):
if not data['_next_fire_time']:
data['_next_fire_time'] = when
data['_next_scheduled_fire_time'] = when
if data['_next_fire_time'] < when and \
not data['_run']:
data['_next_fire_time'] = when
@ -1089,6 +1112,8 @@ class Schedule(object):
try:
data['_next_fire_time'] = int(
croniter.croniter(data['cron'], now).get_next())
data['_next_scheduled_fire_time'] = int(
croniter.croniter(data['cron'], now).get_next())
except (ValueError, KeyError):
log.error('Invalid cron string. Ignoring')
continue
@ -1172,6 +1197,7 @@ class Schedule(object):
if now <= start or now >= end:
run = True
else:
data['_skip_reason'] = 'in_skip_range'
run = False
else:
if start <= now <= end:
@ -1181,6 +1207,7 @@ class Schedule(object):
run = True
func = self.skip_function
else:
data['_skip_reason'] = 'not_in_range'
run = False
else:
log.error(
@ -1247,6 +1274,9 @@ class Schedule(object):
func = self.skip_function
else:
run = False
data['_skip_reason'] = 'in_skip_range'
data['_skipped_time'] = now
data['_skipped'] = True
else:
run = True
else:
@ -1282,6 +1312,9 @@ class Schedule(object):
func = self.skip_function
else:
run = False
data['_skip_reason'] = 'skip_explicit'
data['_skipped_time'] = now
data['_skipped'] = True
else:
run = True
@ -1322,22 +1355,33 @@ class Schedule(object):
returners = self.returners
self.returners = {}
try:
if multiprocessing_enabled:
thread_cls = salt.utils.process.SignalHandlingMultiprocessingProcess
# Job is disabled, continue
if 'enabled' in data and not data['enabled']:
log.debug('Job: %s is disabled', job)
data['_skip_reason'] = 'disabled'
continue
else:
thread_cls = threading.Thread
proc = thread_cls(target=self.handle_func, args=(multiprocessing_enabled, func, data))
if not self.standalone:
data = self._check_max_running(func, data, self.opts)
if multiprocessing_enabled:
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
proc.start()
else:
proc.start()
run = data['run']
if run:
if multiprocessing_enabled:
thread_cls = salt.utils.process.SignalHandlingMultiprocessingProcess
else:
thread_cls = threading.Thread
proc = thread_cls(target=self.handle_func, args=(multiprocessing_enabled, func, data))
if multiprocessing_enabled:
proc.join()
if multiprocessing_enabled:
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
proc.start()
else:
proc.start()
if multiprocessing_enabled:
proc.join()
finally:
if '_seconds' in data:
data['_next_fire_time'] = now + data['_seconds']

View file

@ -5,6 +5,7 @@ Functions for manipulating or otherwise processing strings
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import base64
import errno
import fnmatch
import logging
@ -203,7 +204,7 @@ def is_binary(data):
@jinja_filter('random_str')
def random(size=32):
key = os.urandom(size)
return key.encode('base64').replace('\n', '')[:size]
return to_unicode(base64.b64encode(key).replace(b'\n', b'')[:size])
@jinja_filter('contains_whitespace')
@ -429,3 +430,38 @@ def print_cli(msg, retries=10, step=0.01):
else:
raise
break
def get_context(template, line, num_lines=5, marker=None):
'''
Returns debugging context around a line in a given string
Returns:: string
'''
template_lines = template.splitlines()
num_template_lines = len(template_lines)
# In test mode, a single line template would return a crazy line number like,
# 357. Do this sanity check and if the given line is obviously wrong, just
# return the entire template
if line > num_template_lines:
return template
context_start = max(0, line - num_lines - 1) # subt 1 for 0-based indexing
context_end = min(num_template_lines, line + num_lines)
error_line_in_context = line - context_start - 1 # subtr 1 for 0-based idx
buf = []
if context_start > 0:
buf.append('[...]')
error_line_in_context += 1
buf.extend(template_lines[context_start:context_end])
if context_end < num_template_lines:
buf.append('[...]')
if marker:
buf[error_line_in_context] += marker
return '---\n{0}\n---'.format('\n'.join(buf))

View file

@ -95,41 +95,6 @@ class AliasedModule(object):
return getattr(self.wrapped, name)
def get_context(template, line, num_lines=5, marker=None):
'''
Returns debugging context around a line in a given string
Returns:: string
'''
template_lines = template.splitlines()
num_template_lines = len(template_lines)
# in test, a single line template would return a crazy line number like,
# 357. do this sanity check and if the given line is obviously wrong, just
# return the entire template
if line > num_template_lines:
return template
context_start = max(0, line - num_lines - 1) # subt 1 for 0-based indexing
context_end = min(num_template_lines, line + num_lines)
error_line_in_context = line - context_start - 1 # subtr 1 for 0-based idx
buf = []
if context_start > 0:
buf.append('[...]')
error_line_in_context += 1
buf.extend(template_lines[context_start:context_end])
if context_end < num_template_lines:
buf.append('[...]')
if marker:
buf[error_line_in_context] += marker
return '---\n{0}\n---'.format('\n'.join(buf))
def wrap_tmpl_func(render_str):
def render_tmpl(tmplsrc,
@ -202,11 +167,9 @@ def wrap_tmpl_func(render_str):
tmplsrc.close()
try:
output = render_str(tmplstr, context, tmplpath)
if six.PY2:
output = output.encode(SLS_ENCODING)
if salt.utils.platform.is_windows():
newline = False
if salt.utils.stringutils.to_unicode(output).endswith(('\n', os.linesep)):
if salt.utils.stringutils.to_unicode(output, encoding=SLS_ENCODING).endswith(('\n', os.linesep)):
newline = True
# Write out with Windows newlines
output = os.linesep.join(output.splitlines())
@ -223,9 +186,7 @@ def wrap_tmpl_func(render_str):
if to_str: # then render as string
return dict(result=True, data=output)
with tempfile.NamedTemporaryFile('wb', delete=False, prefix=salt.utils.files.TEMPFILE_PREFIX) as outf:
if six.PY3:
output = output.encode(SLS_ENCODING)
outf.write(output)
outf.write(salt.utils.stringutils.to_bytes(output, encoding=SLS_ENCODING))
# Note: If nothing is replaced or added by the rendering
# function, then the contents of the output file will
# be exactly the same as the input.
@ -315,7 +276,7 @@ def _get_jinja_error(trace, context=None):
out = '\n{0}\n'.format(msg.splitlines()[0])
with salt.utils.files.fopen(template_path) as fp_:
template_contents = salt.utils.stringutils.to_unicode(fp_.read())
out += get_context(
out += salt.utils.stringutils.get_context(
template_contents,
line,
marker=' <======================')
@ -417,15 +378,6 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
template = jinja_env.from_string(tmplstr)
template.globals.update(decoded_context)
output = template.render(**decoded_context)
except jinja2.exceptions.TemplateSyntaxError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ''
raise SaltRenderError(
'Jinja syntax error: {0}{1}'.format(exc, out),
line,
tmplstr)
except jinja2.exceptions.UndefinedError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
out = _get_jinja_error(trace, context=decoded_context)[1]
@ -436,6 +388,16 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
'Jinja variable {0}{1}'.format(
exc, out),
buf=tmplstr)
except (jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.TemplateSyntaxError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ''
raise SaltRenderError(
'Jinja syntax error: {0}{1}'.format(exc, out),
line,
tmplstr)
except (SaltInvocationError, CommandExecutionError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)

View file

@ -96,7 +96,7 @@ def _get_vault_connection():
Get the connection details for calling Vault, from local configuration if
it exists, or from the master otherwise
'''
if 'vault' in __opts__ and __opts__.get('__role', 'minion') == 'master':
def _use_local_config():
log.debug('Using Vault connection details from local config')
try:
if __opts__['vault']['auth']['method'] == 'approle':
@ -121,6 +121,11 @@ def _get_vault_connection():
except KeyError as err:
errmsg = 'Minion has "vault" config section, but could not find key "{0}" within'.format(err.message)
raise salt.exceptions.CommandExecutionError(errmsg)
if 'vault' in __opts__ and __opts__.get('__role', 'minion') == 'master':
return _use_local_config()
elif '_ssh_version' in __opts__:
return _use_local_config()
else:
log.debug('Contacting master for Vault connection details')
return _get_token_and_url_from_master()

View file

@ -31,7 +31,6 @@ import salt.utils.files
import salt.utils.path
import salt.utils.platform
import salt.utils.user
import salt.utils.versions
log = logging.getLogger(__name__)
@ -204,28 +203,16 @@ def verify_env(
permissive=False,
pki_dir='',
skip_extra=False,
root_dir=ROOT_DIR,
sensitive_dirs=None):
root_dir=ROOT_DIR):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if pki_dir:
salt.utils.versions.warn_until(
'Neon',
'Use of \'pki_dir\' was detected: \'pki_dir\' has been deprecated '
'in favor of \'sensitive_dirs\'. Support for \'pki_dir\' will be '
'removed in Salt Neon.'
)
sensitive_dirs = sensitive_dirs or []
sensitive_dirs.append(list(pki_dir))
if salt.utils.platform.is_windows():
return win_verify_env(root_dir,
dirs,
permissive=permissive,
skip_extra=skip_extra,
sensitive_dirs=sensitive_dirs)
skip_extra=skip_extra)
import pwd # after confirming not running Windows
try:
pwnam = pwd.getpwnam(user)
@ -300,11 +287,10 @@ def verify_env(
# to read in what it needs to integrate.
#
# If the permissions aren't correct, default to the more secure 700.
# If acls are enabled, the sensitive_dirs (i.e. pki_dir, key_dir) needs to
# remain readable, this is still secure because the private keys are still
# only readable by the user running the master
sensitive_dirs = sensitive_dirs or []
if dir_ in sensitive_dirs:
# If acls are enabled, the pki_dir needs to remain readable, this
# is still secure because the private keys are still only readable
# by the user running the master
if dir_ == pki_dir:
smode = stat.S_IMODE(mode.st_mode)
if smode != 448 and smode != 488:
if os.access(dir_, os.W_OK):
@ -555,22 +541,11 @@ def win_verify_env(
dirs,
permissive=False,
pki_dir='',
skip_extra=False,
sensitive_dirs=None):
skip_extra=False):
'''
Verify that the named directories are in place and that the environment
can shake the salt
'''
if pki_dir:
salt.utils.versions.warn_until(
'Neon',
'Use of \'pki_dir\' was detected: \'pki_dir\' has been deprecated '
'in favor of \'sensitive_dirs\'. Support for \'pki_dir\' will be '
'removed in Salt Neon.'
)
sensitive_dirs = sensitive_dirs or []
sensitive_dirs.append(list(pki_dir))
import salt.utils.win_functions
import salt.utils.win_dacl
import salt.utils.path
@ -647,9 +622,8 @@ def win_verify_env(
sys.stderr.write(msg.format(dir_, err))
sys.exit(err.errno)
# The senitive_dirs (i.e. pki_dir, key_dir) gets its own permissions
sensitive_dirs = sensitive_dirs or []
if dir_ in sensitive_dirs:
# The PKI dir gets its own permissions
if dir_ == pki_dir:
try:
# Make Administrators group the owner
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')

View file

@ -14,7 +14,8 @@ except (ImportError, ValueError):
HAS_WIN32 = False
if HAS_WIN32:
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
kernel32 = ctypes.WinDLL(str('kernel32'), # future lint: disable=blacklisted-function
use_last_error=True)
# Although utils are often directly imported, it is also possible to use the

View file

@ -46,8 +46,8 @@ def __virtual__():
if HAS_WIN32:
# ctypes definitions
kernel32 = ctypes.WinDLL('kernel32')
advapi32 = ctypes.WinDLL('advapi32')
kernel32 = ctypes.WinDLL(str('kernel32')) # future lint: disable=blacklisted-function
advapi32 = ctypes.WinDLL(str('advapi32')) # future lint: disable=blacklisted-function
INVALID_HANDLE_VALUE = wintypes.HANDLE(-1).value
INVALID_DWORD_VALUE = wintypes.DWORD(-1).value # ~WinAPI

View file

@ -76,18 +76,25 @@ class SaltYamlSafeLoader(yaml.SafeLoader):
self.flatten_mapping(node)
context = 'while constructing a mapping'
mapping = self.dictclass()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError:
err = ('While constructing a mapping {0} found unacceptable '
'key {1}').format(node.start_mark, key_node.start_mark)
raise ConstructorError(err)
raise ConstructorError(
context,
node.start_mark,
"found unacceptable key {0}".format(key_node.value),
key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
raise ConstructorError('Conflicting ID \'{0}\''.format(key))
raise ConstructorError(
context,
node.start_mark,
"found conflicting ID '{0}'".format(key),
key_node.start_mark)
mapping[key] = value
return mapping

View file

@ -36,7 +36,7 @@ class GCETest(ShellCase):
provider = 'gce'
providers = self.run_cloud('--list-providers')
# Create the cloud instance name to be used throughout the tests
self.INSTANCE_NAME = generate_random_name('cloud-test-')
self.INSTANCE_NAME = generate_random_name('cloud-test-').lower()
if profile_str not in providers:
self.skipTest(

View file

@ -45,7 +45,7 @@ def __has_required_azure():
else:
version = LooseVersion(azure.common.__version__)
if REQUIRED_AZURE <= version:
if LooseVersion(REQUIRED_AZURE) <= version:
return True
return False

View file

@ -0,0 +1 @@


View file

@ -121,7 +121,7 @@ class CMDModuleTest(ModuleCase):
'''
self.assertEqual(self.run_function('cmd.run',
['bad_command --foo']).rstrip(),
'ERROR: This shell command is not permitted: "bad_command --foo"')
'ERROR: The shell command "bad_command --foo" is not permitted')
def test_script(self):
'''

View file

@ -87,24 +87,27 @@ from tests.support.unit import skipIf
# Import Salt libs
import salt.utils.path
import salt.utils.platform
from salt.utils.gitfs import GITPYTHON_MINVER, PYGIT2_MINVER
from salt.utils.versions import LooseVersion
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES as VIRTUALENV_NAMES
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
from salt.utils.gitfs import (
GITPYTHON_VERSION,
GITPYTHON_MINVER,
PYGIT2_VERSION,
PYGIT2_MINVER,
LIBGIT2_VERSION,
LIBGIT2_MINVER
)
# Check for requisite components
try:
import git
HAS_GITPYTHON = \
LooseVersion(git.__version__) >= LooseVersion(GITPYTHON_MINVER)
HAS_GITPYTHON = GITPYTHON_VERSION >= GITPYTHON_MINVER
except ImportError:
HAS_GITPYTHON = False
try:
import pygit2
HAS_PYGIT2 = \
LooseVersion(pygit2.__version__) >= LooseVersion(PYGIT2_MINVER)
except ImportError:
HAS_PYGIT2 = PYGIT2_VERSION >= PYGIT2_MINVER \
and LIBGIT2_VERSION >= LIBGIT2_MINVER
except AttributeError:
HAS_PYGIT2 = False
HAS_SSHD = bool(salt.utils.path.which('sshd'))
@ -419,7 +422,7 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin):
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@skip_if_not_root
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
@skipIf(not HAS_SSHD, 'sshd not present')
class TestPygit2SSH(GitPillarSSHTestBase):
'''
@ -433,12 +436,6 @@ class TestPygit2SSH(GitPillarSSHTestBase):
username = USERNAME
passphrase = PASSWORD
def setUp(self):
super(TestPygit2SSH, self).setUp()
if self.is_el7(): # pylint: disable=E1120
self.skipTest(
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
@requires_system_grains
def test_single_source(self, grains):
'''
@ -1199,19 +1196,13 @@ class TestPygit2SSH(GitPillarSSHTestBase):
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@skip_if_not_root
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
@skipIf(not HAS_NGINX, 'nginx not present')
@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
class TestPygit2HTTP(GitPillarHTTPTestBase):
'''
Test git_pillar with pygit2 using SSH authentication
'''
def setUp(self):
super(TestPygit2HTTP, self).setUp()
if self.is_el7(): # pylint: disable=E1120
self.skipTest(
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
def test_single_source(self):
'''
Test using a single ext_pillar repo
@ -1452,7 +1443,7 @@ class TestPygit2HTTP(GitPillarHTTPTestBase):
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(_windows_or_mac(), 'minion is windows or mac')
@skip_if_not_root
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
@skipIf(not HAS_NGINX, 'nginx not present')
@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
@ -1465,12 +1456,6 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
user = USERNAME
password = PASSWORD
def setUp(self):
super(TestPygit2AuthenticatedHTTP, self).setUp()
if self.is_el7(): # pylint: disable=E1120
self.skipTest(
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
def test_single_source(self):
'''
Test using a single ext_pillar repo

View file

@ -52,6 +52,9 @@ class SchedulerEvalTest(ModuleCase, SaltReturnAssertsMixin):
self.schedule = salt.utils.schedule.Schedule(copy.deepcopy(DEFAULT_CONFIG), functions, returners={})
self.schedule.opts['loop_interval'] = 1
def tearDown(self):
del self.schedule
def test_eval(self):
'''
verify that scheduled job runs

View file

@ -5,6 +5,9 @@ from __future__ import absolute_import
import copy
import logging
import os
import time
import dateutil.parser as dateutil_parser
# Import Salt Testing libs
from tests.support.case import ModuleCase
@ -41,6 +44,9 @@ class SchedulerPostponeTest(ModuleCase, SaltReturnAssertsMixin):
self.schedule = salt.utils.schedule.Schedule(copy.deepcopy(DEFAULT_CONFIG), functions, returners={})
self.schedule.opts['loop_interval'] = 1
def tearDown(self):
del self.schedule
def test_postpone(self):
'''
verify that scheduled job is postponed until the specified time.
@ -55,7 +61,7 @@ class SchedulerPostponeTest(ModuleCase, SaltReturnAssertsMixin):
}
# 11/29/2017 4pm
run_time = 1512000000
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 4:00pm').timetuple()))
# 5 minute delay
delay = 300

View file

@ -44,6 +44,9 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
self.schedule = salt.utils.schedule.Schedule(copy.deepcopy(DEFAULT_CONFIG), functions, returners={})
self.schedule.opts['loop_interval'] = 1
def tearDown(self):
del self.schedule
def test_skip(self):
'''
verify that scheduled job is skipped at the specified time
@ -67,6 +70,8 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
self.assertNotIn('_last_run', ret)
self.assertEqual(ret['_skip_reason'], 'skip_explicit')
self.assertEqual(ret['_skipped_time'], run_time)
# Run 11/29/2017 at 5pm
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 5:00pm').timetuple()))
@ -84,8 +89,8 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
'function': 'test.ping',
'hours': '1',
'skip_during_range': {
'start': '2pm',
'end': '3pm'
'start': '11/29/2017 2pm',
'end': '11/29/2017 3pm'
}
}
}
@ -94,11 +99,18 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
# Add job to schedule
self.schedule.opts.update(job)
# eval at 1:30pm to prime.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 1:30pm').timetuple()))
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
# eval at 2:30pm, will not run during range.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 2:30pm').timetuple()))
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
self.assertNotIn('_last_run', ret)
self.assertEqual(ret['_skip_reason'], 'in_skip_range')
self.assertEqual(ret['_skipped_time'], run_time)
# eval at 3:30pm, will run.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 3:30pm').timetuple()))
@ -113,8 +125,8 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
job = {
'schedule': {
'skip_during_range': {
'start': '2pm',
'end': '3pm'
'start': '11/29/2017 2pm',
'end': '11/29/2017 3pm'
},
'job1': {
'function': 'test.ping',
@ -126,11 +138,18 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
# Add job to schedule
self.schedule.opts.update(job)
# eval at 1:30pm to prime.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 1:30pm').timetuple()))
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
# eval at 2:30pm, will not run during range.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 2:30pm').timetuple()))
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
self.assertNotIn('_last_run', ret)
self.assertEqual(ret['_skip_reason'], 'in_skip_range')
self.assertEqual(ret['_skipped_time'], run_time)
# eval at 3:30pm, will run.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 3:30pm').timetuple()))
@ -164,6 +183,8 @@ class SchedulerSkipTest(ModuleCase, SaltReturnAssertsMixin):
self.schedule.eval(now=run_time)
ret = self.schedule.job_status('job1')
self.assertNotIn('_last_run', ret)
self.assertEqual(ret['_skip_reason'], 'in_skip_range')
self.assertEqual(ret['_skipped_time'], run_time)
# eval at 3:00:01pm, will run.
run_time = int(time.mktime(dateutil_parser.parse('11/29/2017 3:00:01pm').timetuple()))

View file

@ -444,6 +444,44 @@ class DockerContainerTestCase(ModuleCase, SaltReturnAssertsMixin):
image_info['Config']['Cmd']
)
@container_name
def test_running_with_port_bindings(self, name):
'''
This tests that the ports which are being bound are also exposed, even
when not explicitly configured. This test will create a container with
only some of the ports exposed, including some which aren't even bound.
The resulting containers exposed ports should contain all of the ports
defined in the "ports" argument, as well as each of the ports which are
being bound.
'''
# Create the container
ret = self.run_state(
'docker_container.running',
name=name,
image=self.image,
command='sleep 600',
shutdown_timeout=1,
port_bindings=[1234, '1235-1236', '2234/udp', '2235-2236/udp'],
ports=[1235, '2235/udp', 9999],
)
self.assertSaltTrueReturn(ret)
# Check the created container's port bindings and exposed ports. The
# port bindings should only contain the ports defined in the
# port_bindings argument, while the exposed ports should also contain
# the extra port (9999/tcp) which was included in the ports argument.
cinfo = self.run_function('docker.inspect_container', [name])
ports = ['1234/tcp', '1235/tcp', '1236/tcp',
'2234/udp', '2235/udp', '2236/udp']
self.assertEqual(
sorted(cinfo['HostConfig']['PortBindings']),
ports
)
self.assertEqual(
sorted(cinfo['Config']['ExposedPorts']),
ports + ['9999/tcp']
)
@container_name
def test_absent_with_stopped_container(self, name):
'''

View file

@ -1239,6 +1239,38 @@ class VMwareTestCase(ExtendedTestCase):
kwargs={'name': 'cCD2GgJGPG1DUnPeFBoPeqtdmUxIWxDoVFbA14vIG0BPoUECkgbRMnnY6gaUPBvIDCcsZ5HU48ubgQu5c'},
call='function')
def test__add_new_hard_disk_helper(self):
with patch('salt.cloud.clouds.vmware._get_si', MagicMock(return_value=None)):
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=[None, None]):
self.assertRaises(
SaltCloudSystemExit,
vmware._add_new_hard_disk_helper,
disk_label='test',
size_gb=100,
unit_number=0,
datastore='whatever'
)
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=['Datastore', None]):
self.assertRaises(
AttributeError,
vmware._add_new_hard_disk_helper,
disk_label='test',
size_gb=100,
unit_number=0,
datastore='whatever'
)
vmware.salt.utils.vmware.get_mor_using_container_view.assert_called_with(None, vim.Datastore, 'whatever')
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=[None, 'Cluster']):
self.assertRaises(
AttributeError,
vmware._add_new_hard_disk_helper,
disk_label='test',
size_gb=100,
unit_number=0,
datastore='whatever'
)
vmware.salt.utils.vmware.get_mor_using_container_view.assert_called_with(None, vim.StoragePod, 'whatever')
class CloneFromSnapshotTest(TestCase):
'''

View file

@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Erik Johnson <erik@saltstack.com>`
:codeauthor: :email:`Erik Johnson <erik@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import copy
import errno
import os
import shutil
@ -18,15 +19,6 @@ try:
except ImportError:
pass
# Import 3rd-party libs
try:
import git # pylint: disable=unused-import
HAS_GITPYTHON = True
GITFS_AVAILABLE = True
except ImportError:
HAS_GITPYTHON = False
GITFS_AVAILABLE = False
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
@ -36,11 +28,36 @@ from tests.support.paths import TMP, FILES
# Import salt libs
import salt.fileserver.gitfs as gitfs
import salt.utils.files
import salt.utils.gitfs
import salt.utils.platform
import salt.utils.win_functions
import salt.utils.yaml
import salt.utils.gitfs
from salt.utils.gitfs import (
GITPYTHON_VERSION,
GITPYTHON_MINVER,
PYGIT2_VERSION,
PYGIT2_MINVER,
LIBGIT2_VERSION,
LIBGIT2_MINVER
)
try:
import git
# We still need to use GitPython here for temp repo setup, so we do need to
# actually import it. But we don't need import pygit2 in this module, we
# can just use the LooseVersion instances imported along with
# salt.utils.gitfs to check if we have a compatible version.
HAS_GITPYTHON = GITPYTHON_VERSION >= GITPYTHON_MINVER
except (ImportError, AttributeError):
HAS_GITPYTHON = False
try:
HAS_PYGIT2 = PYGIT2_VERSION >= PYGIT2_MINVER \
and LIBGIT2_VERSION >= LIBGIT2_MINVER
except AttributeError:
HAS_PYGIT2 = False
log = logging.getLogger(__name__)
TMP_SOCK_DIR = tempfile.mkdtemp(dir=TMP)
@ -48,6 +65,38 @@ TMP_REPO_DIR = os.path.join(TMP, 'gitfs_root')
INTEGRATION_BASE_FILES = os.path.join(FILES, 'file', 'base')
UNICODE_FILENAME = 'питон.txt'
UNICODE_DIRNAME = UNICODE_ENVNAME = 'соль'
TAG_NAME = 'mytag'
OPTS = {
'sock_dir': TMP_SOCK_DIR,
'gitfs_remotes': ['file://' + TMP_REPO_DIR],
'gitfs_root': '',
'fileserver_backend': ['gitfs'],
'gitfs_base': 'master',
'fileserver_events': True,
'transport': 'zeromq',
'gitfs_mountpoint': '',
'gitfs_saltenv': [],
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'gitfs_saltenv_whitelist': [],
'gitfs_saltenv_blacklist': [],
'gitfs_user': '',
'gitfs_password': '',
'gitfs_insecure_auth': False,
'gitfs_privkey': '',
'gitfs_pubkey': '',
'gitfs_passphrase': '',
'gitfs_refspecs': [
'+refs/heads/*:refs/remotes/origin/*',
'+refs/tags/*:refs/tags/*'
],
'gitfs_ssl_verify': True,
'gitfs_disable_saltenv_mapping': False,
'gitfs_ref_types': ['branch', 'tag', 'sha'],
'gitfs_update_interval': 60,
'__role': 'master',
}
def _rmtree_error(func, path, excinfo):
@ -55,43 +104,23 @@ def _rmtree_error(func, path, excinfo):
func(path)
@skipIf(not HAS_GITPYTHON, 'GitPython is not installed')
def _clear_instance_map():
try:
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
except KeyError:
pass
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required'.format(GITPYTHON_MINVER))
class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
opts = copy.deepcopy(OPTS)
opts['cachedir'] = self.tmp_cachedir
opts['sock_dir'] = self.tmp_sock_dir
return {
gitfs: {
'__opts__': {
'cachedir': self.tmp_cachedir,
'sock_dir': TMP_SOCK_DIR,
'gitfs_root': 'salt',
'fileserver_backend': ['gitfs'],
'gitfs_base': 'master',
'fileserver_events': True,
'transport': 'zeromq',
'gitfs_mountpoint': '',
'gitfs_saltenv': [],
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'gitfs_saltenv_whitelist': [],
'gitfs_saltenv_blacklist': [],
'gitfs_user': '',
'gitfs_password': '',
'gitfs_insecure_auth': False,
'gitfs_privkey': '',
'gitfs_pubkey': '',
'gitfs_passphrase': '',
'gitfs_refspecs': [
'+refs/heads/*:refs/remotes/origin/*',
'+refs/tags/*:refs/tags/*'
],
'gitfs_ssl_verify': True,
'gitfs_disable_saltenv_mapping': False,
'gitfs_ref_types': ['branch', 'tag', 'sha'],
'gitfs_update_interval': 60,
'__role': 'master',
}
'__opts__': opts,
}
}
@ -99,16 +128,27 @@ class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
def setUpClass(cls):
# Clear the instance map so that we make sure to create a new instance
# for this test class.
try:
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
except KeyError:
pass
_clear_instance_map()
cls.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
cls.tmp_sock_dir = tempfile.mkdtemp(dir=TMP)
def tearDown(self):
shutil.rmtree(self.tmp_cachedir)
@classmethod
def tearDownClass(cls):
'''
Remove the temporary git repository and gitfs cache directory to ensure
a clean environment for the other test class(es).
'''
for path in (cls.tmp_cachedir, cls.tmp_sock_dir):
try:
shutil.rmtree(path, onerror=_rmtree_error)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
def test_per_saltenv_config(self):
opts_override = textwrap.dedent('''
gitfs_root: salt
gitfs_saltenv:
- baz:
# when loaded, the "salt://" prefix will be removed
@ -186,110 +226,27 @@ class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
LOAD = {'saltenv': 'base'}
@skipIf(not GITFS_AVAILABLE, "GitFS could not be loaded. Skipping GitFS tests!")
@skipIf(NO_MOCK, NO_MOCK_REASON)
class GitFSTest(TestCase, LoaderModuleMockMixin):
class GitFSTestFuncs(object):
'''
These are where the tests go, so that they can be run using both GitPython
and pygit2.
def setup_loader_modules(self):
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
return {
gitfs: {
'__opts__': {
'cachedir': self.tmp_cachedir,
'sock_dir': TMP_SOCK_DIR,
'gitfs_remotes': ['file://' + TMP_REPO_DIR],
'gitfs_root': '',
'fileserver_backend': ['gitfs'],
'gitfs_base': 'master',
'fileserver_events': True,
'transport': 'zeromq',
'gitfs_mountpoint': '',
'gitfs_saltenv': [],
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'gitfs_saltenv_whitelist': [],
'gitfs_saltenv_blacklist': [],
'gitfs_user': '',
'gitfs_password': '',
'gitfs_insecure_auth': False,
'gitfs_privkey': '',
'gitfs_pubkey': '',
'gitfs_passphrase': '',
'gitfs_refspecs': [
'+refs/heads/*:refs/remotes/origin/*',
'+refs/tags/*:refs/tags/*'
],
'gitfs_ssl_verify': True,
'gitfs_disable_saltenv_mapping': False,
'gitfs_ref_types': ['branch', 'tag', 'sha'],
'gitfs_update_interval': 60,
'__role': 'master',
}
}
}
NOTE: The gitfs.update() has to happen AFTER the setUp is called. This is
because running it inside the setUp will spawn a new singleton, which means
that tests which need to mock the __opts__ will be too late; the setUp will
have created a new singleton that will bypass our mocking. To ensure that
our tests are reliable and correct, we want to make sure that each test
uses a new gitfs object, allowing different manipulations of the opts to be
tested.
@classmethod
def setUpClass(cls):
# Clear the instance map so that we make sure to create a new instance
# for this test class.
try:
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
except KeyError:
pass
# Create the dir if it doesn't already exist
try:
shutil.copytree(INTEGRATION_BASE_FILES, TMP_REPO_DIR + '/')
except OSError:
# We probably caught an error because files already exist. Ignore
pass
try:
repo = git.Repo(TMP_REPO_DIR)
except git.exc.InvalidGitRepositoryError:
repo = git.Repo.init(TMP_REPO_DIR)
if 'USERNAME' not in os.environ:
try:
if salt.utils.platform.is_windows():
os.environ['USERNAME'] = salt.utils.win_functions.get_current_user()
else:
os.environ['USERNAME'] = pwd.getpwuid(os.geteuid()).pw_name
except AttributeError:
log.error('Unable to get effective username, falling back to '
'\'root\'.')
os.environ['USERNAME'] = 'root'
repo.index.add([x for x in os.listdir(TMP_REPO_DIR)
if x != '.git'])
repo.index.commit('Test')
# Add another branch with unicode characters in the name
repo.create_head(UNICODE_ENVNAME, 'HEAD')
def setUp(self):
'''
We don't want to check in another .git dir into GH because that just
gets messy. Instead, we'll create a temporary repo on the fly for the
tests to examine.
'''
if not gitfs.__virtual__():
self.skipTest("GitFS could not be loaded. Skipping GitFS tests!")
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
gitfs.update()
def tearDown(self):
'''
Remove the temporary git repository and gitfs cache directory to ensure
a clean environment for each test.
'''
try:
shutil.rmtree(self.tmp_cachedir, onerror=_rmtree_error)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
Therefore, keep the following in mind:
1. Each test needs to call gitfs.update() *after* any patching, and
*before* calling the function being tested.
2. Do *NOT* move the gitfs.update() into the setUp.
'''
def test_file_list(self):
gitfs.update()
ret = gitfs.file_list(LOAD)
self.assertIn('testfile', ret)
self.assertIn(UNICODE_FILENAME, ret)
@ -298,11 +255,242 @@ class GitFSTest(TestCase, LoaderModuleMockMixin):
self.assertIn('/'.join((UNICODE_DIRNAME, 'foo.txt')), ret)
def test_dir_list(self):
gitfs.update()
ret = gitfs.dir_list(LOAD)
self.assertIn('grail', ret)
self.assertIn(UNICODE_DIRNAME, ret)
def test_envs(self):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
self.assertIn('base', ret)
self.assertIn(UNICODE_ENVNAME, ret)
self.assertIn(TAG_NAME, ret)
def test_ref_types_global(self):
'''
Test the global gitfs_ref_types config option
'''
with patch.dict(gitfs.__opts__, {'gitfs_ref_types': ['branch']}):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to branches only, the tag should not
# appear in the envs list.
self.assertIn('base', ret)
self.assertIn(UNICODE_ENVNAME, ret)
self.assertNotIn(TAG_NAME, ret)
def test_ref_types_per_remote(self):
'''
Test the per_remote ref_types config option, using a different
ref_types setting than the global test.
'''
remotes = [{'file://' + TMP_REPO_DIR: [{'ref_types': ['tag']}]}]
with patch.dict(gitfs.__opts__, {'gitfs_remotes': remotes}):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to tags only, the tag should appear in
# the envs list, but the branches should not.
self.assertNotIn('base', ret)
self.assertNotIn(UNICODE_ENVNAME, ret)
self.assertIn(TAG_NAME, ret)
def test_disable_saltenv_mapping_global_with_mapping_defined_globally(self):
'''
Test the global gitfs_disable_saltenv_mapping config option, combined
with the per-saltenv mapping being defined in the global gitfs_saltenv
option.
'''
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
gitfs_disable_saltenv_mapping: True
gitfs_saltenv:
- foo:
- ref: base
'''))
with patch.dict(gitfs.__opts__, opts):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to tags only, the tag should appear in
# the envs list, but the branches should not.
self.assertEqual(ret, ['foo'])
def test_disable_saltenv_mapping_global_with_mapping_defined_per_remote(self):
'''
Test the global gitfs_disable_saltenv_mapping config option, combined
with the per-saltenv mapping being defined in the remote itself via the
"saltenv" per-remote option.
'''
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
gitfs_disable_saltenv_mapping: True
gitfs_remotes:
- file://{0}:
- saltenv:
- bar:
- ref: base
'''.format(TMP_REPO_DIR)))
with patch.dict(gitfs.__opts__, opts):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to tags only, the tag should appear in
# the envs list, but the branches should not.
self.assertEqual(ret, ['bar'])
def test_disable_saltenv_mapping_per_remote_with_mapping_defined_globally(self):
'''
Test the per-remote disable_saltenv_mapping config option, combined
with the per-saltenv mapping being defined in the global gitfs_saltenv
option.
'''
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
gitfs_remotes:
- file://{0}:
- disable_saltenv_mapping: True
gitfs_saltenv:
- hello:
- ref: base
'''))
with patch.dict(gitfs.__opts__, opts):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to tags only, the tag should appear in
# the envs list, but the branches should not.
self.assertEqual(ret, ['hello'])
def test_disable_saltenv_mapping_per_remote_with_mapping_defined_per_remote(self):
'''
Test the per-remote disable_saltenv_mapping config option, combined
with the per-saltenv mapping being defined in the remote itself via the
"saltenv" per-remote option.
'''
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
gitfs_remotes:
- file://{0}:
- disable_saltenv_mapping: True
- saltenv:
- world:
- ref: base
'''.format(TMP_REPO_DIR)))
with patch.dict(gitfs.__opts__, opts):
gitfs.update()
ret = gitfs.envs(ignore_cache=True)
# Since we are restricting to tags only, the tag should appear in
# the envs list, but the branches should not.
self.assertEqual(ret, ['world'])
class GitFSTestBase(object):
@classmethod
def setUpClass(cls):
cls.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
cls.tmp_sock_dir = tempfile.mkdtemp(dir=TMP)
try:
shutil.rmtree(TMP_REPO_DIR)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
shutil.copytree(INTEGRATION_BASE_FILES, TMP_REPO_DIR + '/')
repo = git.Repo.init(TMP_REPO_DIR)
username_key = str('USERNAME')
orig_username = os.environ.get(username_key)
try:
if username_key not in os.environ:
try:
if salt.utils.platform.is_windows():
os.environ[username_key] = \
salt.utils.win_functions.get_current_user()
else:
os.environ[username_key] = \
pwd.getpwuid(os.geteuid()).pw_name
except AttributeError:
log.error(
'Unable to get effective username, falling back to '
'\'root\'.'
)
os.environ[username_key] = str('root')
repo.index.add([x for x in os.listdir(TMP_REPO_DIR)
if x != '.git'])
repo.index.commit('Test')
# Add another branch with unicode characters in the name
repo.create_head(UNICODE_ENVNAME, 'HEAD')
# Add a tag
repo.create_tag(TAG_NAME, 'HEAD')
finally:
if orig_username is not None:
os.environ[username_key] = orig_username
else:
os.environ.pop(username_key, None)
@classmethod
def tearDownClass(cls):
'''
Remove the temporary git repository and gitfs cache directory to ensure
a clean environment for the other test class(es).
'''
for path in (cls.tmp_cachedir, cls.tmp_sock_dir, TMP_REPO_DIR):
try:
shutil.rmtree(path, onerror=_rmtree_error)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
def setUp(self):
'''
We don't want to check in another .git dir into GH because that just
gets messy. Instead, we'll create a temporary repo on the fly for the
tests to examine.
Also ensure we A) don't re-use the singleton, and B) that the cachedirs
are cleared. This keeps these performance enhancements from affecting
the results of subsequent tests.
'''
if not gitfs.__virtual__():
self.skipTest("GitFS could not be loaded. Skipping GitFS tests!")
_clear_instance_map()
for subdir in ('gitfs', 'file_lists'):
try:
shutil.rmtree(os.path.join(self.tmp_cachedir, subdir))
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required'.format(GITPYTHON_MINVER))
@skipIf(NO_MOCK, NO_MOCK_REASON)
class GitPythonTest(GitFSTestBase, GitFSTestFuncs, TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
opts = copy.deepcopy(OPTS)
opts['cachedir'] = self.tmp_cachedir
opts['sock_dir'] = self.tmp_sock_dir
opts['gitfs_provider'] = 'gitpython'
return {
gitfs: {
'__opts__': opts,
}
}
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required for temp repo setup'.format(GITPYTHON_MINVER))
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
@skipIf(NO_MOCK, NO_MOCK_REASON)
class Pygit2Test(GitFSTestBase, GitFSTestFuncs, TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
opts = copy.deepcopy(OPTS)
opts['cachedir'] = self.tmp_cachedir
opts['sock_dir'] = self.tmp_sock_dir
opts['gitfs_provider'] = 'pygit2'
return {
gitfs: {
'__opts__': opts,
}
}

View file

@ -10,14 +10,17 @@ import sys
import tempfile
# Import Salt Libs
import salt.utils.files
import salt.utils.platform
import salt.modules.cmdmod as cmdmod
from salt.exceptions import CommandExecutionError
from salt.log import LOG_LEVELS
from salt.ext.six.moves import builtins # pylint: disable=import-error
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.paths import FILES
from tests.support.mock import (
mock_open,
Mock,
@ -33,6 +36,39 @@ MOCK_SHELL_FILE = '# List of acceptable shells\n' \
'/bin/bash\n'
class MockTimedProc(object):
'''
Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
'''
class _Process(object):
'''
Used to provide a dummy "process" attribute
'''
def __init__(self, returncode=0, pid=12345):
self.returncode = returncode
self.pid = pid
def __init__(self, stdout=None, stderr=None, returncode=0, pid=12345):
if stdout is not None and not isinstance(stdout, bytes):
raise TypeError('Must pass stdout to MockTimedProc as bytes')
if stderr is not None and not isinstance(stderr, bytes):
raise TypeError('Must pass stderr to MockTimedProc as bytes')
self._stdout = stdout
self._stderr = stderr
self.process = self._Process(returncode=returncode, pid=pid)
def run(self):
pass
@property
def stdout(self):
return self._stdout
@property
def stderr(self):
return self._stderr
@skipIf(NO_MOCK, NO_MOCK_REASON)
class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
'''
@ -303,3 +339,85 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
pass
else:
raise RuntimeError
def test_run_all_binary_replace(self):
'''
Test for failed decoding of binary data, for instance when doing
something silly like using dd to read from /dev/urandom and write to
/dev/stdout.
'''
# Since we're using unicode_literals, read the random bytes from a file
rand_bytes_file = os.path.join(FILES, 'file', 'base', 'random_bytes')
with salt.utils.files.fopen(rand_bytes_file, 'rb') as fp_:
stdout_bytes = fp_.read()
# stdout with the non-decodable bits replaced with the unicode
# replacement character U+FFFD.
stdout_unicode = '\ufffd\x1b\ufffd\ufffd\n'
stderr_bytes = b'1+0 records in\n1+0 records out\n' \
b'4 bytes copied, 9.1522e-05 s, 43.7 kB/s\n'
stderr_unicode = stderr_bytes.decode()
proc = MagicMock(
return_value=MockTimedProc(
stdout=stdout_bytes,
stderr=stderr_bytes
)
)
with patch('salt.utils.timed_subprocess.TimedProc', proc):
ret = cmdmod.run_all(
'dd if=/dev/urandom of=/dev/stdout bs=4 count=1',
rstrip=False)
self.assertEqual(ret['stdout'], stdout_unicode)
self.assertEqual(ret['stderr'], stderr_unicode)
def test_run_all_none(self):
'''
Tests cases when proc.stdout or proc.stderr are None. These should be
caught and replaced with empty strings.
'''
proc = MagicMock(return_value=MockTimedProc(stdout=None, stderr=None))
with patch('salt.utils.timed_subprocess.TimedProc', proc):
ret = cmdmod.run_all('some command', rstrip=False)
self.assertEqual(ret['stdout'], '')
self.assertEqual(ret['stderr'], '')
def test_run_all_unicode(self):
'''
Ensure that unicode stdout and stderr are decoded properly
'''
stdout_unicode = 'Here is some unicode: спам'
stderr_unicode = 'Here is some unicode: яйца'
stdout_bytes = stdout_unicode.encode('utf-8')
stderr_bytes = stderr_unicode.encode('utf-8')
proc = MagicMock(
return_value=MockTimedProc(
stdout=stdout_bytes,
stderr=stderr_bytes
)
)
with patch('salt.utils.timed_subprocess.TimedProc', proc), \
patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
ret = cmdmod.run_all('some command', rstrip=False)
self.assertEqual(ret['stdout'], stdout_unicode)
self.assertEqual(ret['stderr'], stderr_unicode)
def test_run_all_output_encoding(self):
'''
Test that specifying the output encoding works as expected
'''
stdout = 'Æ'
stdout_latin1_enc = stdout.encode('latin1')
proc = MagicMock(return_value=MockTimedProc(stdout=stdout_latin1_enc))
with patch('salt.utils.timed_subprocess.TimedProc', proc), \
patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
ret = cmdmod.run_all('some command', output_encoding='latin1')
self.assertEqual(ret['stdout'], stdout)

View file

@ -241,7 +241,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
@patch('salt.modules.localemod.dbus', None)
@patch('salt.modules.localemod.__salt__', {'cmd.run': MagicMock()})
@patch('salt.utils.systemd.booted', MagicMock(return_value=False))
def test_get_locale_with_no_systemd_slowlaris(self):
def test_get_locale_with_no_systemd_solaris(self):
'''
Test getting current system locale with systemd and dbus available on Solaris.
:return:
@ -389,9 +389,9 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
'file.replace': MagicMock()})
@patch('salt.modules.localemod._localectl_set', MagicMock())
@patch('salt.utils.systemd.booted', MagicMock(return_value=False))
def test_set_locale_with_no_systemd_slowlaris_with_list_avail(self):
def test_set_locale_with_no_systemd_solaris_with_list_avail(self):
'''
Test setting current system locale with systemd and dbus available on Slowlaris.
Test setting current system locale with systemd and dbus available on Solaris.
The list_avail returns the proper locale.
:return:
'''
@ -410,9 +410,9 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
'file.replace': MagicMock()})
@patch('salt.modules.localemod._localectl_set', MagicMock())
@patch('salt.utils.systemd.booted', MagicMock(return_value=False))
def test_set_locale_with_no_systemd_slowlaris_without_list_avail(self):
def test_set_locale_with_no_systemd_solaris_without_list_avail(self):
'''
Test setting current system locale with systemd and dbus is not available on Slowlaris.
Test setting current system locale with systemd and dbus is not available on Solaris.
The list_avail does not return the proper locale.
:return:
'''

View file

@ -34,11 +34,8 @@ from salt.utils.jinja import (
ensure_sequence_filter
)
from salt.utils.odict import OrderedDict
from salt.utils.templates import (
get_context,
JINJA,
render_jinja_tmpl
)
from salt.utils.templates import JINJA, render_jinja_tmpl
# dateutils is needed so that the strftime jinja filter is loaded
import salt.utils.dateutils # pylint: disable=unused-import
import salt.utils.files
@ -379,36 +376,6 @@ class TestGetTemplate(TestCase):
result = salt.utils.stringutils.to_unicode(fp.read(), 'utf-8')
self.assertEqual(salt.utils.stringutils.to_unicode('Assunção' + os.linesep), result)
def test_get_context_has_enough_context(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = get_context(template, 8)
expected = '---\n[...]\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_at_top_of_file(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = get_context(template, 1)
expected = '---\n1\n2\n3\n4\n5\n6\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_at_bottom_of_file(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = get_context(template, 15)
expected = '---\n[...]\na\nb\nc\nd\ne\nf\n---'
self.assertEqual(expected, context)
def test_get_context_2_context_lines(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = get_context(template, 8, num_lines=2)
expected = '---\n[...]\n6\n7\n8\n9\na\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_with_marker(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = get_context(template, 8, num_lines=2, marker=' <---')
expected = '---\n[...]\n6\n7\n8 <---\n9\na\n[...]\n---'
self.assertEqual(expected, context)
def test_render_with_syntax_error(self):
template = 'hello\n\n{{ bad\n\nfoo'
expected = r'.*---\nhello\n\n{{ bad\n\nfoo <======================\n---'

View file

@ -798,6 +798,29 @@ class TranslateContainerInputTestCase(TranslateBase):
'''
translator = salt.utils.docker.translate.container
@staticmethod
def normalize_ports(ret):
'''
When we translate exposed ports, we can end up with a mixture of ints
(representing TCP ports) and tuples (representing UDP ports). Python 2
will sort an iterable containing these mixed types, but Python 3 will
not. This helper is used to munge the ports in the return data so that
the resulting list is sorted in a way that can reliably be compared to
the expected results in the test.
This helper should only be needed for port_bindings and ports.
'''
if 'ports' in ret:
tcp_ports = []
udp_ports = []
for item in ret['ports']:
if isinstance(item, six.integer_types):
tcp_ports.append(item)
else:
udp_ports.append(item)
ret['ports'] = sorted(tcp_ports) + sorted(udp_ports)
return ret
@assert_bool(salt.utils.docker.translate.container)
def test_auto_remove(self):
'''
@ -1288,9 +1311,11 @@ class TranslateContainerInputTestCase(TranslateBase):
)
for val in (bindings, bindings.split(',')):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
)
),
{'port_bindings': {80: [('10.1.2.3', 8080),
('10.1.2.3', 8888)],
@ -1302,8 +1327,9 @@ class TranslateContainerInputTestCase(TranslateBase):
'3334/udp': ('10.4.5.6', 3334),
'5505/udp': ('10.7.8.9', 15505),
'5506/udp': ('10.7.8.9', 15506)},
'ports': [80, '81/udp', 3333, '3334/udp',
4505, 4506, '5505/udp', '5506/udp']}
'ports': [80, 3333, 4505, 4506,
(81, 'udp'), (3334, 'udp'),
(5505, 'udp'), (5506, 'udp')]}
)
# ip::containerPort - Bind a specific IP and an ephemeral port to a
@ -1315,9 +1341,11 @@ class TranslateContainerInputTestCase(TranslateBase):
)
for val in (bindings, bindings.split(',')):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
)
),
{'port_bindings': {80: [('10.1.2.3',), ('10.1.2.3',)],
3333: ('10.4.5.6',),
@ -1327,8 +1355,9 @@ class TranslateContainerInputTestCase(TranslateBase):
'3334/udp': ('10.4.5.6',),
'5505/udp': ('10.7.8.9',),
'5506/udp': ('10.7.8.9',)},
'ports': [80, '81/udp', 3333, '3334/udp',
4505, 4506, '5505/udp', '5506/udp']}
'ports': [80, 3333, 4505, 4506,
(81, 'udp'), (3334, 'udp'),
(5505, 'udp'), (5506, 'udp')]}
)
# hostPort:containerPort - Bind a specific port on all of the host's
@ -1339,9 +1368,11 @@ class TranslateContainerInputTestCase(TranslateBase):
)
for val in (bindings, bindings.split(',')):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
)
),
{'port_bindings': {80: [8080, 8888],
3333: 3333,
@ -1351,8 +1382,9 @@ class TranslateContainerInputTestCase(TranslateBase):
'3334/udp': 3334,
'5505/udp': 15505,
'5506/udp': 15506},
'ports': [80, '81/udp', 3333, '3334/udp',
4505, 4506, '5505/udp', '5506/udp']}
'ports': [80, 3333, 4505, 4506,
(81, 'udp'), (3334, 'udp'),
(5505, 'udp'), (5506, 'udp')]}
)
# containerPort - Bind an ephemeral port on all of the host's
@ -1360,9 +1392,11 @@ class TranslateContainerInputTestCase(TranslateBase):
bindings = '80,3333,4505-4506,81/udp,3334/udp,5505-5506/udp'
for val in (bindings, bindings.split(',')):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
)
),
{'port_bindings': {80: None,
3333: None,
@ -1372,8 +1406,9 @@ class TranslateContainerInputTestCase(TranslateBase):
'3334/udp': None,
'5505/udp': None,
'5506/udp': None},
'ports': [80, '81/udp', 3333, '3334/udp',
4505, 4506, '5505/udp', '5506/udp']},
'ports': [80, 3333, 4505, 4506,
(81, 'udp'), (3334, 'udp'),
(5505, 'udp'), (5506, 'udp')]}
)
# Test a mixture of different types of input
@ -1384,9 +1419,11 @@ class TranslateContainerInputTestCase(TranslateBase):
)
for val in (bindings, bindings.split(',')):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
port_bindings=val,
)
),
{'port_bindings': {80: ('10.1.2.3', 8080),
3333: ('10.4.5.6',),
@ -1402,10 +1439,10 @@ class TranslateContainerInputTestCase(TranslateBase):
'19999/udp': None,
'20000/udp': None,
'20001/udp': None},
'ports': [80, '81/udp', 3333, '3334/udp',
4505, 4506, '5505/udp', '5506/udp',
9999, 10000, 10001, '19999/udp',
'20000/udp', '20001/udp']}
'ports': [80, 3333, 4505, 4506, 9999, 10000, 10001,
(81, 'udp'), (3334, 'udp'), (5505, 'udp'),
(5506, 'udp'), (19999, 'udp'),
(20000, 'udp'), (20001, 'udp')]}
)
# Error case: too many items (max 3)
@ -1506,11 +1543,13 @@ class TranslateContainerInputTestCase(TranslateBase):
[1111, '2222/tcp', '3333/udp', '4505-4506'],
['1111', '2222/tcp', '3333/udp', '4505-4506']):
self.assertEqual(
salt.utils.docker.translate_input(
self.translator,
ports=val,
self.normalize_ports(
salt.utils.docker.translate_input(
self.translator,
ports=val,
)
),
{'ports': [1111, 2222, (3333, 'udp'), 4505, 4506]}
{'ports': [1111, 2222, 4505, 4506, (3333, 'udp')]}
)
# Error case: non-integer and non/string value

View file

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import textwrap
# Import Salt libs
from tests.support.mock import patch
@ -108,3 +109,43 @@ class StringutilsTestCase(TestCase):
'(?:[\\s]+)?$'
ret = salt.utils.stringutils.build_whitespace_split_regex(' '.join(LOREM_IPSUM.split()[:5]))
self.assertEqual(ret, expected_regex)
def test_get_context(self):
expected_context = textwrap.dedent('''\
---
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque eget urna a arcu lacinia sagittis.
Sed scelerisque, lacus eget malesuada vestibulum, justo diam facilisis tortor, in sodales dolor
[...]
---''')
ret = salt.utils.stringutils.get_context(LOREM_IPSUM, 1, num_lines=1)
self.assertEqual(ret, expected_context)
def test_get_context_has_enough_context(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = salt.utils.stringutils.get_context(template, 8)
expected = '---\n[...]\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_at_top_of_file(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = salt.utils.stringutils.get_context(template, 1)
expected = '---\n1\n2\n3\n4\n5\n6\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_at_bottom_of_file(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = salt.utils.stringutils.get_context(template, 15)
expected = '---\n[...]\na\nb\nc\nd\ne\nf\n---'
self.assertEqual(expected, context)
def test_get_context_2_context_lines(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = salt.utils.stringutils.get_context(template, 8, num_lines=2)
expected = '---\n[...]\n6\n7\n8\n9\na\n[...]\n---'
self.assertEqual(expected, context)
def test_get_context_with_marker(self):
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
context = salt.utils.stringutils.get_context(template, 8, num_lines=2, marker=' <---')
expected = '---\n[...]\n6\n7\n8 <---\n9\na\n[...]\n---'
self.assertEqual(expected, context)

View file

@ -1,25 +0,0 @@
# -*- coding: utf-8 -*-
'''
Tests for salt.utils.data
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import textwrap
# Import Salt libs
import salt.utils.templates
from tests.support.unit import TestCase, LOREM_IPSUM
class TemplatesTestCase(TestCase):
def test_get_context(self):
expected_context = textwrap.dedent('''\
---
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque eget urna a arcu lacinia sagittis.
Sed scelerisque, lacus eget malesuada vestibulum, justo diam facilisis tortor, in sodales dolor
[...]
---''')
ret = salt.utils.templates.get_context(LOREM_IPSUM, 1, num_lines=1)
self.assertEqual(ret, expected_context)

View file

@ -113,20 +113,13 @@ class TestVerify(TestCase):
root_dir = tempfile.mkdtemp(dir=TMP)
var_dir = os.path.join(root_dir, 'var', 'log', 'salt')
key_dir = os.path.join(root_dir, 'key_dir')
verify_env([var_dir, key_dir], getpass.getuser(), root_dir=root_dir, sensitive_dirs=[key_dir])
verify_env([var_dir], getpass.getuser(), root_dir=root_dir)
self.assertTrue(os.path.exists(var_dir))
self.assertTrue(os.path.exists(key_dir))
var_dir_stat = os.stat(var_dir)
self.assertEqual(var_dir_stat.st_uid, os.getuid())
self.assertEqual(var_dir_stat.st_mode & stat.S_IRWXU, stat.S_IRWXU)
self.assertEqual(var_dir_stat.st_mode & stat.S_IRWXG, 40)
self.assertEqual(var_dir_stat.st_mode & stat.S_IRWXO, 5)
key_dir_stat = os.stat(key_dir)
self.assertEqual(key_dir_stat.st_mode & stat.S_IRWXU, stat.S_IRWXU)
self.assertEqual(key_dir_stat.st_mode & stat.S_IRWXG, 0)
self.assertEqual(key_dir_stat.st_mode & stat.S_IRWXO, 0)
dir_stat = os.stat(var_dir)
self.assertEqual(dir_stat.st_uid, os.getuid())
self.assertEqual(dir_stat.st_mode & stat.S_IRWXU, stat.S_IRWXU)
self.assertEqual(dir_stat.st_mode & stat.S_IRWXG, 40)
self.assertEqual(dir_stat.st_mode & stat.S_IRWXO, 5)
@requires_network(only_local_network=True)
def test_verify_socket(self):