mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #43720 from rallytime/merge-develop
[develop] Merge forward from 2017.7 to develop
This commit is contained in:
commit
74379bac9c
20 changed files with 1963 additions and 141 deletions
|
@ -25,6 +25,9 @@ configuration:
|
|||
- web*:
|
||||
- test.*
|
||||
- pkg.*
|
||||
# Allow managers to use saltutil module functions
|
||||
manager_.*:
|
||||
- saltutil.*
|
||||
|
||||
Permission Issues
|
||||
-----------------
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
salt.runners.auth module
|
||||
========================
|
||||
salt.runners.auth
|
||||
=================
|
||||
|
||||
.. automodule:: salt.runners.auth
|
||||
:members:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
salt.runners.event module
|
||||
=========================
|
||||
salt.runners.event
|
||||
==================
|
||||
|
||||
.. automodule:: salt.runners.event
|
||||
:members:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
salt.runners.smartos_vmadm module
|
||||
=================================
|
||||
salt.runners.smartos_vmadm
|
||||
==========================
|
||||
|
||||
.. automodule:: salt.runners.smartos_vmadm
|
||||
:members:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
salt.runners.vistara module
|
||||
===========================
|
||||
salt.runners.vistara
|
||||
====================
|
||||
|
||||
.. automodule:: salt.runners.vistara
|
||||
:members:
|
||||
|
|
1719
doc/topics/releases/2016.11.8.rst
Normal file
1719
doc/topics/releases/2016.11.8.rst
Normal file
File diff suppressed because it is too large
Load diff
|
@ -369,46 +369,13 @@ class LoadAuth(object):
|
|||
eauth_config = self.opts['external_auth'][eauth]
|
||||
if not groups:
|
||||
groups = []
|
||||
group_perm_keys = [item for item in eauth_config if item.endswith('%')] # The configured auth groups
|
||||
|
||||
# First we need to know if the user is allowed to proceed via any of their group memberships.
|
||||
group_auth_match = False
|
||||
for group_config in group_perm_keys:
|
||||
if group_config.rstrip('%') in groups:
|
||||
group_auth_match = True
|
||||
break
|
||||
# If a group_auth_match is set it means only that we have a
|
||||
# user which matches at least one or more of the groups defined
|
||||
# in the configuration file.
|
||||
|
||||
external_auth_in_db = False
|
||||
for entry in eauth_config:
|
||||
if entry.startswith('^'):
|
||||
external_auth_in_db = True
|
||||
break
|
||||
|
||||
# If neither a catchall, a named membership or a group
|
||||
# membership is found, there is no need to continue. Simply
|
||||
# deny the user access.
|
||||
if not ((name in eauth_config) |
|
||||
('*' in eauth_config) |
|
||||
group_auth_match | external_auth_in_db):
|
||||
# Auth successful, but no matching user found in config
|
||||
log.warning('Authorization failure occurred.')
|
||||
return None
|
||||
|
||||
# We now have an authenticated session and it is time to determine
|
||||
# what the user has access to.
|
||||
auth_list = []
|
||||
if name in eauth_config:
|
||||
auth_list = eauth_config[name]
|
||||
elif '*' in eauth_config:
|
||||
auth_list = eauth_config['*']
|
||||
if group_auth_match:
|
||||
auth_list = self.ckminions.fill_auth_list_from_groups(
|
||||
eauth_config,
|
||||
groups,
|
||||
auth_list)
|
||||
auth_list = self.ckminions.fill_auth_list(
|
||||
eauth_config,
|
||||
name,
|
||||
groups)
|
||||
|
||||
auth_list = self.__process_acl(load, auth_list)
|
||||
|
||||
|
|
21
salt/cache/redis_cache.py
vendored
21
salt/cache/redis_cache.py
vendored
|
@ -481,18 +481,17 @@ def list_(bank):
|
|||
Lists entries stored in the specified bank.
|
||||
'''
|
||||
redis_server = _get_redis_server()
|
||||
bank_keys_redis_key = _get_bank_keys_redis_key(bank)
|
||||
bank_keys = None
|
||||
bank_redis_key = _get_bank_redis_key(bank)
|
||||
try:
|
||||
bank_keys = redis_server.smembers(bank_keys_redis_key)
|
||||
banks = redis_server.smembers(bank_redis_key)
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot list the Redis cache key {rkey}: {rerr}'.format(rkey=bank_keys_redis_key,
|
||||
mesg = 'Cannot list the Redis cache key {rkey}: {rerr}'.format(rkey=bank_redis_key,
|
||||
rerr=rerr)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
if not bank_keys:
|
||||
if not banks:
|
||||
return []
|
||||
return list(bank_keys)
|
||||
return list(banks)
|
||||
|
||||
|
||||
def contains(bank, key):
|
||||
|
@ -500,15 +499,11 @@ def contains(bank, key):
|
|||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
redis_server = _get_redis_server()
|
||||
bank_keys_redis_key = _get_bank_keys_redis_key(bank)
|
||||
bank_keys = None
|
||||
bank_redis_key = _get_bank_redis_key(bank)
|
||||
try:
|
||||
bank_keys = redis_server.smembers(bank_keys_redis_key)
|
||||
return redis_server.sismember(bank_redis_key, key)
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot retrieve the Redis cache key {rkey}: {rerr}'.format(rkey=bank_keys_redis_key,
|
||||
mesg = 'Cannot retrieve the Redis cache key {rkey}: {rerr}'.format(rkey=bank_redis_key,
|
||||
rerr=rerr)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
if not bank_keys:
|
||||
return False
|
||||
return key in bank_keys
|
||||
|
|
|
@ -3543,16 +3543,15 @@ def list_nodes_min(location=None, call=None):
|
|||
|
||||
for instance in instances:
|
||||
if isinstance(instance['instancesSet']['item'], list):
|
||||
for item in instance['instancesSet']['item']:
|
||||
state = item['instanceState']['name']
|
||||
name = _extract_name_tag(item)
|
||||
id = item['instanceId']
|
||||
items = instance['instancesSet']['item']
|
||||
else:
|
||||
item = instance['instancesSet']['item']
|
||||
items = [instance['instancesSet']['item']]
|
||||
|
||||
for item in items:
|
||||
state = item['instanceState']['name']
|
||||
name = _extract_name_tag(item)
|
||||
id = item['instanceId']
|
||||
ret[name] = {'state': state, 'id': id}
|
||||
ret[name] = {'state': state, 'id': id}
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ __virtualname__ = 'libvirt'
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def libvirt_error_handler(ctx, error):
|
||||
def libvirt_error_handler(ctx, error): # pylint: disable=unused-argument
|
||||
'''
|
||||
Redirect stderr prints from libvirt to salt logging.
|
||||
'''
|
||||
|
|
|
@ -749,6 +749,10 @@ VALID_OPTS = {
|
|||
'fileserver_limit_traversal': bool,
|
||||
'fileserver_verify_config': bool,
|
||||
|
||||
# Optionally apply '*' permissioins to any user. By default '*' is a fallback case that is
|
||||
# applied only if the user didn't matched by other matchers.
|
||||
'permissive_acl': bool,
|
||||
|
||||
# Optionally enables keeping the calculated user's auth list in the token file.
|
||||
'keep_acl_in_token': bool,
|
||||
|
||||
|
@ -1530,6 +1534,7 @@ DEFAULT_MASTER_OPTS = {
|
|||
'external_auth': {},
|
||||
'token_expire': 43200,
|
||||
'token_expire_user_override': False,
|
||||
'permissive_acl': False,
|
||||
'keep_acl_in_token': False,
|
||||
'eauth_acl_module': '',
|
||||
'eauth_tokens': 'localfs',
|
||||
|
|
|
@ -170,6 +170,14 @@ def clean_old_jobs(opts):
|
|||
|
||||
|
||||
def mk_key(opts, user):
|
||||
if HAS_PWD:
|
||||
uid = None
|
||||
try:
|
||||
uid = pwd.getpwnam(user).pw_uid
|
||||
except KeyError:
|
||||
# User doesn't exist in the system
|
||||
if opts['client_acl_verify']:
|
||||
return None
|
||||
if salt.utils.platform.is_windows():
|
||||
# The username may contain '\' if it is in Windows
|
||||
# 'DOMAIN\username' format. Fix this for the keyfile path.
|
||||
|
@ -197,9 +205,9 @@ def mk_key(opts, user):
|
|||
# Write access is necessary since on subsequent runs, if the file
|
||||
# exists, it needs to be written to again. Windows enforces this.
|
||||
os.chmod(keyfile, 0o600)
|
||||
if HAS_PWD:
|
||||
if HAS_PWD and uid is not None:
|
||||
try:
|
||||
os.chown(keyfile, pwd.getpwnam(user).pw_uid, -1)
|
||||
os.chown(keyfile, uid, -1)
|
||||
except OSError:
|
||||
# The master is not being run as root and can therefore not
|
||||
# chown the key file
|
||||
|
@ -214,27 +222,26 @@ def access_keys(opts):
|
|||
'''
|
||||
# TODO: Need a way to get all available users for systems not supported by pwd module.
|
||||
# For now users pattern matching will not work for publisher_acl.
|
||||
users = []
|
||||
keys = {}
|
||||
publisher_acl = opts['publisher_acl']
|
||||
acl_users = set(publisher_acl.keys())
|
||||
if opts.get('user'):
|
||||
acl_users.add(opts['user'])
|
||||
acl_users.add(salt.utils.get_user())
|
||||
for user in acl_users:
|
||||
log.info('Preparing the %s key for local communication', user)
|
||||
key = mk_key(opts, user)
|
||||
if key is not None:
|
||||
keys[user] = key
|
||||
|
||||
# Check other users matching ACL patterns
|
||||
if opts['client_acl_verify'] and HAS_PWD:
|
||||
log.profile('Beginning pwd.getpwall() call in masterarpi access_keys function')
|
||||
for user in pwd.getpwall():
|
||||
users.append(user.pw_name)
|
||||
log.profile('End pwd.getpwall() call in masterarpi access_keys function')
|
||||
for user in acl_users:
|
||||
log.info('Preparing the %s key for local communication', user)
|
||||
keys[user] = mk_key(opts, user)
|
||||
|
||||
# Check other users matching ACL patterns
|
||||
if HAS_PWD:
|
||||
for user in users:
|
||||
user = user.pw_name
|
||||
if user not in keys and salt.utils.check_whitelist_blacklist(user, whitelist=acl_users):
|
||||
keys[user] = mk_key(opts, user)
|
||||
log.profile('End pwd.getpwall() call in masterarpi access_keys function')
|
||||
|
||||
return keys
|
||||
|
||||
|
|
|
@ -599,9 +599,14 @@ def exists(vpc_id=None, name=None, cidr=None, tags=None, region=None, key=None,
|
|||
try:
|
||||
vpc_ids = _find_vpcs(vpc_id=vpc_id, vpc_name=name, cidr=cidr, tags=tags,
|
||||
region=region, key=key, keyid=keyid, profile=profile)
|
||||
return {'exists': bool(vpc_ids)}
|
||||
except BotoServerError as e:
|
||||
return {'error': salt.utils.boto.get_error(e)}
|
||||
except BotoServerError as err:
|
||||
boto_err = salt.utils.boto.get_error(err)
|
||||
if boto_err.get('aws', {}).get('code') == 'InvalidVpcID.NotFound':
|
||||
# VPC was not found: handle the error and return False.
|
||||
return {'exists': False}
|
||||
return {'error': boto_err}
|
||||
|
||||
return {'exists': bool(vpc_ids)}
|
||||
|
||||
|
||||
def create(cidr_block, instance_tenancy=None, vpc_name=None,
|
||||
|
@ -723,27 +728,34 @@ def describe(vpc_id=None, vpc_name=None, region=None, key=None,
|
|||
try:
|
||||
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
|
||||
vpc_id = check_vpc(vpc_id, vpc_name, region, key, keyid, profile)
|
||||
if not vpc_id:
|
||||
except BotoServerError as err:
|
||||
boto_err = salt.utils.boto.get_error(err)
|
||||
if boto_err.get('aws', {}).get('code') == 'InvalidVpcID.NotFound':
|
||||
# VPC was not found: handle the error and return None.
|
||||
return {'vpc': None}
|
||||
return {'error': boto_err}
|
||||
|
||||
filter_parameters = {'vpc_ids': vpc_id}
|
||||
if not vpc_id:
|
||||
return {'vpc': None}
|
||||
|
||||
filter_parameters = {'vpc_ids': vpc_id}
|
||||
|
||||
try:
|
||||
vpcs = conn.get_all_vpcs(**filter_parameters)
|
||||
except BotoServerError as err:
|
||||
return {'error': salt.utils.boto.get_error(err)}
|
||||
|
||||
if vpcs:
|
||||
vpc = vpcs[0] # Found!
|
||||
log.debug('Found VPC: {0}'.format(vpc.id))
|
||||
if vpcs:
|
||||
vpc = vpcs[0] # Found!
|
||||
log.debug('Found VPC: {0}'.format(vpc.id))
|
||||
|
||||
keys = ('id', 'cidr_block', 'is_default', 'state', 'tags',
|
||||
'dhcp_options_id', 'instance_tenancy')
|
||||
_r = dict([(k, getattr(vpc, k)) for k in keys])
|
||||
_r.update({'region': getattr(vpc, 'region').name})
|
||||
return {'vpc': _r}
|
||||
else:
|
||||
return {'vpc': None}
|
||||
|
||||
except BotoServerError as e:
|
||||
return {'error': salt.utils.boto.get_error(e)}
|
||||
keys = ('id', 'cidr_block', 'is_default', 'state', 'tags',
|
||||
'dhcp_options_id', 'instance_tenancy')
|
||||
_r = dict([(k, getattr(vpc, k)) for k in keys])
|
||||
_r.update({'region': getattr(vpc, 'region').name})
|
||||
return {'vpc': _r}
|
||||
else:
|
||||
return {'vpc': None}
|
||||
|
||||
|
||||
def describe_vpcs(vpc_id=None, name=None, cidr=None, tags=None,
|
||||
|
@ -809,7 +821,7 @@ def _find_subnets(subnet_name=None, vpc_id=None, cidr=None, tags=None, conn=None
|
|||
Given subnet properties, find and return matching subnet ids
|
||||
'''
|
||||
|
||||
if not any(subnet_name, tags, cidr):
|
||||
if not any([subnet_name, tags, cidr]):
|
||||
raise SaltInvocationError('At least one of the following must be '
|
||||
'specified: subnet_name, cidr or tags.')
|
||||
|
||||
|
@ -927,34 +939,38 @@ def subnet_exists(subnet_id=None, name=None, subnet_name=None, cidr=None,
|
|||
|
||||
try:
|
||||
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
|
||||
filter_parameters = {'filters': {}}
|
||||
except BotoServerError as err:
|
||||
return {'error': salt.utils.boto.get_error(err)}
|
||||
|
||||
if subnet_id:
|
||||
filter_parameters['subnet_ids'] = [subnet_id]
|
||||
|
||||
if subnet_name:
|
||||
filter_parameters['filters']['tag:Name'] = subnet_name
|
||||
|
||||
if cidr:
|
||||
filter_parameters['filters']['cidr'] = cidr
|
||||
|
||||
if tags:
|
||||
for tag_name, tag_value in six.iteritems(tags):
|
||||
filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value
|
||||
|
||||
if zones:
|
||||
filter_parameters['filters']['availability_zone'] = zones
|
||||
filter_parameters = {'filters': {}}
|
||||
if subnet_id:
|
||||
filter_parameters['subnet_ids'] = [subnet_id]
|
||||
if subnet_name:
|
||||
filter_parameters['filters']['tag:Name'] = subnet_name
|
||||
if cidr:
|
||||
filter_parameters['filters']['cidr'] = cidr
|
||||
if tags:
|
||||
for tag_name, tag_value in six.iteritems(tags):
|
||||
filter_parameters['filters']['tag:{0}'.format(tag_name)] = tag_value
|
||||
if zones:
|
||||
filter_parameters['filters']['availability_zone'] = zones
|
||||
|
||||
try:
|
||||
subnets = conn.get_all_subnets(**filter_parameters)
|
||||
log.debug('The filters criteria {0} matched the following subnets:{1}'.format(filter_parameters, subnets))
|
||||
if subnets:
|
||||
log.info('Subnet {0} exists.'.format(subnet_name or subnet_id))
|
||||
return {'exists': True}
|
||||
else:
|
||||
log.info('Subnet {0} does not exist.'.format(subnet_name or subnet_id))
|
||||
except BotoServerError as err:
|
||||
boto_err = salt.utils.boto.get_error(err)
|
||||
if boto_err.get('aws', {}).get('code') == 'InvalidSubnetID.NotFound':
|
||||
# Subnet was not found: handle the error and return False.
|
||||
return {'exists': False}
|
||||
except BotoServerError as e:
|
||||
return {'error': salt.utils.boto.get_error(e)}
|
||||
return {'error': boto_err}
|
||||
|
||||
log.debug('The filters criteria {0} matched the following subnets:{1}'.format(filter_parameters, subnets))
|
||||
if subnets:
|
||||
log.info('Subnet {0} exists.'.format(subnet_name or subnet_id))
|
||||
return {'exists': True}
|
||||
else:
|
||||
log.info('Subnet {0} does not exist.'.format(subnet_name or subnet_id))
|
||||
return {'exists': False}
|
||||
|
||||
|
||||
def get_subnet_association(subnets, region=None, key=None, keyid=None,
|
||||
|
|
|
@ -1347,6 +1347,7 @@ def install(name=None,
|
|||
to_install = []
|
||||
to_downgrade = []
|
||||
to_reinstall = []
|
||||
_available = {}
|
||||
# The above three lists will be populated with tuples containing the
|
||||
# package name and the string being used for this particular package
|
||||
# modification. The reason for this method is that the string we use for
|
||||
|
|
|
@ -414,7 +414,7 @@ def extracted(name,
|
|||
.. versionadded:: 2017.7.3
|
||||
|
||||
keep : True
|
||||
Same as ``keep_source``.
|
||||
Same as ``keep_source``, kept for backward-compatibility.
|
||||
|
||||
.. note::
|
||||
If both ``keep_source`` and ``keep`` are used, ``keep`` will be
|
||||
|
@ -648,6 +648,21 @@ def extracted(name,
|
|||
# Remove pub kwargs as they're irrelevant here.
|
||||
kwargs = salt.utils.args.clean_kwargs(**kwargs)
|
||||
|
||||
if 'keep_source' in kwargs and 'keep' in kwargs:
|
||||
ret.setdefault('warnings', []).append(
|
||||
'Both \'keep_source\' and \'keep\' were used. Since these both '
|
||||
'do the same thing, \'keep\' was ignored.'
|
||||
)
|
||||
keep_source = bool(kwargs.pop('keep_source'))
|
||||
kwargs.pop('keep')
|
||||
elif 'keep_source' in kwargs:
|
||||
keep_source = bool(kwargs.pop('keep_source'))
|
||||
elif 'keep' in kwargs:
|
||||
keep_source = bool(kwargs.pop('keep'))
|
||||
else:
|
||||
# Neither was passed, default is True
|
||||
keep_source = True
|
||||
|
||||
if 'keep_source' in kwargs and 'keep' in kwargs:
|
||||
ret.setdefault('warnings', []).append(
|
||||
'Both \'keep_source\' and \'keep\' were used. Since these both '
|
||||
|
|
|
@ -6637,6 +6637,28 @@ def cached(name,
|
|||
else:
|
||||
pre_hash = None
|
||||
|
||||
def _try_cache(path, checksum):
|
||||
'''
|
||||
This helper is not needed anymore in develop as the fileclient in the
|
||||
develop branch now has means of skipping a download if the existing
|
||||
hash matches one passed to cp.cache_file. Remove this helper and the
|
||||
code that invokes it, once we have merged forward into develop.
|
||||
'''
|
||||
if not path or not checksum:
|
||||
return True
|
||||
form = salt.utils.files.HASHES_REVMAP.get(len(checksum))
|
||||
if form is None:
|
||||
# Shouldn't happen, an invalid checksum length should be caught
|
||||
# before we get here. But in the event this gets through, don't let
|
||||
# it cause any trouble, and just return True.
|
||||
return True
|
||||
try:
|
||||
return salt.utils.get_hash(path, form=form) != checksum
|
||||
except (IOError, OSError, ValueError):
|
||||
# Again, shouldn't happen, but don't let invalid input/permissions
|
||||
# in the call to get_hash blow this up.
|
||||
return True
|
||||
|
||||
# Cache the file. Note that this will not actually download the file if
|
||||
# either of the following is true:
|
||||
# 1. source is a salt:// URL and the fileserver determines that the hash
|
||||
|
@ -6645,14 +6667,18 @@ def cached(name,
|
|||
# matches the cached copy.
|
||||
# Remote, non salt:// sources _will_ download if a copy of the file was
|
||||
# not already present in the minion cache.
|
||||
try:
|
||||
local_copy = __salt__['cp.cache_file'](
|
||||
name,
|
||||
saltenv=saltenv,
|
||||
source_hash=source_sum.get('hsum'))
|
||||
except Exception as exc:
|
||||
ret['comment'] = exc.__str__()
|
||||
return ret
|
||||
if _try_cache(local_copy, source_sum.get('hsum')):
|
||||
# The _try_cache helper is obsolete in the develop branch. Once merged
|
||||
# forward, remove the helper as well as this if statement, and dedent
|
||||
# the below block.
|
||||
try:
|
||||
local_copy = __salt__['cp.cache_file'](
|
||||
name,
|
||||
saltenv=saltenv,
|
||||
source_hash=source_sum.get('hsum'))
|
||||
except Exception as exc:
|
||||
ret['comment'] = exc.__str__()
|
||||
return ret
|
||||
|
||||
if not local_copy:
|
||||
ret['comment'] = (
|
||||
|
|
|
@ -966,6 +966,31 @@ class CkMinions(object):
|
|||
auth_list.append(matcher)
|
||||
return auth_list
|
||||
|
||||
def fill_auth_list(self, auth_provider, name, groups, auth_list=None, permissive=None):
|
||||
'''
|
||||
Returns a list of authorisation matchers that a user is eligible for.
|
||||
This list is a combination of the provided personal matchers plus the
|
||||
matchers of any group the user is in.
|
||||
'''
|
||||
if auth_list is None:
|
||||
auth_list = []
|
||||
if permissive is None:
|
||||
permissive = self.opts.get('permissive_acl')
|
||||
name_matched = False
|
||||
for match in auth_provider:
|
||||
if match == '*' and not permissive:
|
||||
continue
|
||||
if match.endswith('%'):
|
||||
if match.rstrip('%') in groups:
|
||||
auth_list.extend(auth_provider[match])
|
||||
else:
|
||||
if salt.utils.expr_match(match, name):
|
||||
name_matched = True
|
||||
auth_list.extend(auth_provider[match])
|
||||
if not permissive and not name_matched and '*' in auth_provider:
|
||||
auth_list.extend(auth_provider['*'])
|
||||
return auth_list
|
||||
|
||||
def wheel_check(self, auth_list, fun, args):
|
||||
'''
|
||||
Check special API permissions
|
||||
|
@ -982,6 +1007,8 @@ class CkMinions(object):
|
|||
'''
|
||||
Check special API permissions
|
||||
'''
|
||||
if not auth_list:
|
||||
return False
|
||||
if form != 'cloud':
|
||||
comps = fun.split('.')
|
||||
if len(comps) != 2:
|
||||
|
|
|
@ -63,7 +63,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
|
||||
with patch('salt.auth.LoadAuth.authenticate_token', MagicMock(return_value=mock_token)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.local_funcs.runner(load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -93,7 +93,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
||||
def test_runner_eauth_salt_invocation_errpr(self):
|
||||
def test_runner_eauth_salt_invocation_error(self):
|
||||
'''
|
||||
Asserts that an EauthAuthenticationError is returned when the user authenticates, but the
|
||||
command is malformed.
|
||||
|
@ -102,7 +102,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
mock_ret = {u'error': {u'name': u'SaltInvocationError',
|
||||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
with patch('salt.auth.LoadAuth.authenticate_eauth', MagicMock(return_value=True)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.local_funcs.runner(load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -146,7 +146,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
|
||||
with patch('salt.auth.LoadAuth.authenticate_token', MagicMock(return_value=mock_token)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.local_funcs.wheel(load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -176,7 +176,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
||||
def test_wheel_eauth_salt_invocation_errpr(self):
|
||||
def test_wheel_eauth_salt_invocation_error(self):
|
||||
'''
|
||||
Asserts that an EauthAuthenticationError is returned when the user authenticates, but the
|
||||
command is malformed.
|
||||
|
@ -185,7 +185,7 @@ class LocalFuncsTestCase(TestCase):
|
|||
mock_ret = {u'error': {u'name': u'SaltInvocationError',
|
||||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
with patch('salt.auth.LoadAuth.authenticate_eauth', MagicMock(return_value=True)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.local_funcs.wheel(load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
|
|
@ -63,7 +63,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
|
||||
with patch('salt.auth.LoadAuth.authenticate_token', MagicMock(return_value=mock_token)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.clear_funcs.runner(clear_load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -93,7 +93,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
||||
def test_runner_eauth_salt_invocation_errpr(self):
|
||||
def test_runner_eauth_salt_invocation_error(self):
|
||||
'''
|
||||
Asserts that an EauthAuthenticationError is returned when the user authenticates, but the
|
||||
command is malformed.
|
||||
|
@ -102,7 +102,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
mock_ret = {u'error': {u'name': u'SaltInvocationError',
|
||||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
with patch('salt.auth.LoadAuth.authenticate_eauth', MagicMock(return_value=True)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.clear_funcs.runner(clear_load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -155,7 +155,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
|
||||
with patch('salt.auth.LoadAuth.authenticate_token', MagicMock(return_value=mock_token)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.clear_funcs.wheel(clear_load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
@ -185,7 +185,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
||||
def test_wheel_eauth_salt_invocation_errpr(self):
|
||||
def test_wheel_eauth_salt_invocation_error(self):
|
||||
'''
|
||||
Asserts that an EauthAuthenticationError is returned when the user authenticates, but the
|
||||
command is malformed.
|
||||
|
@ -194,7 +194,7 @@ class ClearFuncsTestCase(TestCase):
|
|||
mock_ret = {u'error': {u'name': u'SaltInvocationError',
|
||||
u'message': u'A command invocation error occurred: Check syntax.'}}
|
||||
with patch('salt.auth.LoadAuth.authenticate_eauth', MagicMock(return_value=True)), \
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=[])):
|
||||
patch('salt.auth.LoadAuth.get_auth_list', MagicMock(return_value=['testing'])):
|
||||
ret = self.clear_funcs.wheel(clear_load)
|
||||
|
||||
self.assertDictEqual(mock_ret, ret)
|
||||
|
|
|
@ -958,5 +958,47 @@ class SaltAPIParserTestCase(LogSettingsParserTests):
|
|||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class DaemonMixInTestCase(TestCase):
|
||||
'''
|
||||
Tests the PIDfile deletion in the DaemonMixIn.
|
||||
'''
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
Setting up
|
||||
'''
|
||||
# Set PID
|
||||
self.pid = '/some/fake.pid'
|
||||
|
||||
# Setup mixin
|
||||
self.mixin = salt.utils.parsers.DaemonMixIn()
|
||||
self.mixin.info = None
|
||||
self.mixin.config = {}
|
||||
self.mixin.config['pidfile'] = self.pid
|
||||
|
||||
def test_pid_file_deletion(self):
|
||||
'''
|
||||
PIDfile deletion without exception.
|
||||
'''
|
||||
with patch('os.unlink', MagicMock()) as os_unlink:
|
||||
with patch('os.path.isfile', MagicMock(return_value=True)):
|
||||
with patch.object(self.mixin, 'info', MagicMock()):
|
||||
self.mixin._mixin_before_exit()
|
||||
assert self.mixin.info.call_count == 0
|
||||
assert os_unlink.call_count == 1
|
||||
|
||||
def test_pid_file_deletion_with_oserror(self):
|
||||
'''
|
||||
PIDfile deletion with exception
|
||||
'''
|
||||
with patch('os.unlink', MagicMock(side_effect=OSError())) as os_unlink:
|
||||
with patch('os.path.isfile', MagicMock(return_value=True)):
|
||||
with patch.object(self.mixin, 'info', MagicMock()):
|
||||
self.mixin._mixin_before_exit()
|
||||
assert os_unlink.call_count == 1
|
||||
self.mixin.info.assert_called_with(
|
||||
'PIDfile could not be deleted: {0}'.format(self.pid))
|
||||
|
||||
# Hide the class from unittest framework when it searches for TestCase classes in the module
|
||||
del LogSettingsParserTests
|
||||
|
|
Loading…
Add table
Reference in a new issue