mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into 2017.7_update_safe_filename_func
This commit is contained in:
commit
e97651d49b
10 changed files with 614 additions and 204 deletions
|
@ -25,6 +25,9 @@ configuration:
|
|||
- web*:
|
||||
- test.*
|
||||
- pkg.*
|
||||
# Allow managers to use saltutil module functions
|
||||
manager_.*:
|
||||
- saltutil.*
|
||||
|
||||
Permission Issues
|
||||
-----------------
|
||||
|
|
|
@ -377,46 +377,13 @@ class LoadAuth(object):
|
|||
eauth_config = self.opts['external_auth'][eauth]
|
||||
if not groups:
|
||||
groups = []
|
||||
group_perm_keys = [item for item in eauth_config if item.endswith('%')] # The configured auth groups
|
||||
|
||||
# First we need to know if the user is allowed to proceed via any of their group memberships.
|
||||
group_auth_match = False
|
||||
for group_config in group_perm_keys:
|
||||
if group_config.rstrip('%') in groups:
|
||||
group_auth_match = True
|
||||
break
|
||||
# If a group_auth_match is set it means only that we have a
|
||||
# user which matches at least one or more of the groups defined
|
||||
# in the configuration file.
|
||||
|
||||
external_auth_in_db = False
|
||||
for entry in eauth_config:
|
||||
if entry.startswith('^'):
|
||||
external_auth_in_db = True
|
||||
break
|
||||
|
||||
# If neither a catchall, a named membership or a group
|
||||
# membership is found, there is no need to continue. Simply
|
||||
# deny the user access.
|
||||
if not ((name in eauth_config) |
|
||||
('*' in eauth_config) |
|
||||
group_auth_match | external_auth_in_db):
|
||||
# Auth successful, but no matching user found in config
|
||||
log.warning('Authorization failure occurred.')
|
||||
return None
|
||||
|
||||
# We now have an authenticated session and it is time to determine
|
||||
# what the user has access to.
|
||||
auth_list = []
|
||||
if name in eauth_config:
|
||||
auth_list = eauth_config[name]
|
||||
elif '*' in eauth_config:
|
||||
auth_list = eauth_config['*']
|
||||
if group_auth_match:
|
||||
auth_list = self.ckminions.fill_auth_list_from_groups(
|
||||
eauth_config,
|
||||
groups,
|
||||
auth_list)
|
||||
auth_list = self.ckminions.fill_auth_list(
|
||||
eauth_config,
|
||||
name,
|
||||
groups)
|
||||
|
||||
auth_list = self.__process_acl(load, auth_list)
|
||||
|
||||
|
|
|
@ -717,6 +717,10 @@ VALID_OPTS = {
|
|||
'fileserver_limit_traversal': bool,
|
||||
'fileserver_verify_config': bool,
|
||||
|
||||
# Optionally apply '*' permissioins to any user. By default '*' is a fallback case that is
|
||||
# applied only if the user didn't matched by other matchers.
|
||||
'permissive_acl': bool,
|
||||
|
||||
# Optionally enables keeping the calculated user's auth list in the token file.
|
||||
'keep_acl_in_token': bool,
|
||||
|
||||
|
@ -1466,6 +1470,7 @@ DEFAULT_MASTER_OPTS = {
|
|||
'external_auth': {},
|
||||
'token_expire': 43200,
|
||||
'token_expire_user_override': False,
|
||||
'permissive_acl': False,
|
||||
'keep_acl_in_token': False,
|
||||
'eauth_acl_module': '',
|
||||
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'master', 'extmods'),
|
||||
|
|
|
@ -204,6 +204,14 @@ def clean_old_jobs(opts):
|
|||
|
||||
|
||||
def mk_key(opts, user):
|
||||
if HAS_PWD:
|
||||
uid = None
|
||||
try:
|
||||
uid = pwd.getpwnam(user).pw_uid
|
||||
except KeyError:
|
||||
# User doesn't exist in the system
|
||||
if opts['client_acl_verify']:
|
||||
return None
|
||||
if salt.utils.is_windows():
|
||||
# The username may contain '\' if it is in Windows
|
||||
# 'DOMAIN\username' format. Fix this for the keyfile path.
|
||||
|
@ -231,9 +239,9 @@ def mk_key(opts, user):
|
|||
# Write access is necessary since on subsequent runs, if the file
|
||||
# exists, it needs to be written to again. Windows enforces this.
|
||||
os.chmod(keyfile, 0o600)
|
||||
if HAS_PWD:
|
||||
if HAS_PWD and uid is not None:
|
||||
try:
|
||||
os.chown(keyfile, pwd.getpwnam(user).pw_uid, -1)
|
||||
os.chown(keyfile, uid, -1)
|
||||
except OSError:
|
||||
# The master is not being run as root and can therefore not
|
||||
# chown the key file
|
||||
|
@ -248,27 +256,26 @@ def access_keys(opts):
|
|||
'''
|
||||
# TODO: Need a way to get all available users for systems not supported by pwd module.
|
||||
# For now users pattern matching will not work for publisher_acl.
|
||||
users = []
|
||||
keys = {}
|
||||
publisher_acl = opts['publisher_acl']
|
||||
acl_users = set(publisher_acl.keys())
|
||||
if opts.get('user'):
|
||||
acl_users.add(opts['user'])
|
||||
acl_users.add(salt.utils.get_user())
|
||||
for user in acl_users:
|
||||
log.info('Preparing the %s key for local communication', user)
|
||||
key = mk_key(opts, user)
|
||||
if key is not None:
|
||||
keys[user] = key
|
||||
|
||||
# Check other users matching ACL patterns
|
||||
if opts['client_acl_verify'] and HAS_PWD:
|
||||
log.profile('Beginning pwd.getpwall() call in masterarpi access_keys function')
|
||||
for user in pwd.getpwall():
|
||||
users.append(user.pw_name)
|
||||
log.profile('End pwd.getpwall() call in masterarpi access_keys function')
|
||||
for user in acl_users:
|
||||
log.info('Preparing the %s key for local communication', user)
|
||||
keys[user] = mk_key(opts, user)
|
||||
|
||||
# Check other users matching ACL patterns
|
||||
if HAS_PWD:
|
||||
for user in users:
|
||||
user = user.pw_name
|
||||
if user not in keys and salt.utils.check_whitelist_blacklist(user, whitelist=acl_users):
|
||||
keys[user] = mk_key(opts, user)
|
||||
log.profile('End pwd.getpwall() call in masterarpi access_keys function')
|
||||
|
||||
return keys
|
||||
|
||||
|
|
|
@ -54,7 +54,8 @@ import salt.utils.files
|
|||
import salt.utils.locales
|
||||
import salt.utils.templates
|
||||
import salt.utils.url
|
||||
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError, get_error_message as _get_error_message
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError, get_error_message as _get_error_message
|
||||
from salt.utils.files import HASHES, HASHES_REVMAP
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -62,16 +63,6 @@ __func_alias__ = {
|
|||
'makedirs_': 'makedirs'
|
||||
}
|
||||
|
||||
HASHES = {
|
||||
'sha512': 128,
|
||||
'sha384': 96,
|
||||
'sha256': 64,
|
||||
'sha224': 56,
|
||||
'sha1': 40,
|
||||
'md5': 32,
|
||||
}
|
||||
HASHES_REVMAP = dict([(y, x) for x, y in six.iteritems(HASHES)])
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
|
@ -3627,14 +3618,8 @@ def source_list(source, source_hash, saltenv):
|
|||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif proto.startswith('http') or proto == 'ftp':
|
||||
try:
|
||||
if __salt__['cp.cache_file'](single_src):
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
except MinionError as exc:
|
||||
# Error downloading file. Log the caught exception and
|
||||
# continue on to the next source.
|
||||
log.exception(exc)
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif proto == 'file' and os.path.exists(urlparsed_single_src.path):
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
|
@ -3654,9 +3639,8 @@ def source_list(source, source_hash, saltenv):
|
|||
ret = (single, source_hash)
|
||||
break
|
||||
elif proto.startswith('http') or proto == 'ftp':
|
||||
if __salt__['cp.cache_file'](single):
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
elif single.startswith('/') and os.path.exists(single):
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
|
@ -4478,7 +4462,7 @@ def check_file_meta(
|
|||
'''
|
||||
changes = {}
|
||||
if not source_sum:
|
||||
source_sum = dict()
|
||||
source_sum = {}
|
||||
lstats = stats(name, hash_type=source_sum.get('hash_type', None), follow_symlinks=False)
|
||||
if not lstats:
|
||||
changes['newfile'] = name
|
||||
|
|
|
@ -1262,6 +1262,7 @@ def install(name=None,
|
|||
to_install = []
|
||||
to_downgrade = []
|
||||
to_reinstall = []
|
||||
_available = {}
|
||||
# The above three lists will be populated with tuples containing the
|
||||
# package name and the string being used for this particular package
|
||||
# modification. The reason for this method is that the string we use for
|
||||
|
@ -1281,7 +1282,8 @@ def install(name=None,
|
|||
if pkg_type == 'repository':
|
||||
has_wildcards = [x for x, y in six.iteritems(pkg_params)
|
||||
if y is not None and '*' in y]
|
||||
_available = list_repo_pkgs(*has_wildcards, byrepo=False, **kwargs)
|
||||
if has_wildcards:
|
||||
_available = list_repo_pkgs(*has_wildcards, byrepo=False, **kwargs)
|
||||
pkg_params_items = six.iteritems(pkg_params)
|
||||
elif pkg_type == 'advisory':
|
||||
pkg_params_items = []
|
||||
|
|
|
@ -61,16 +61,30 @@ def _gen_checksum(path):
|
|||
'hash_type': __opts__['hash_type']}
|
||||
|
||||
|
||||
def _update_checksum(cached_source):
|
||||
cached_source_sum = '.'.join((cached_source, 'hash'))
|
||||
source_sum = _gen_checksum(cached_source)
|
||||
def _checksum_file_path(path):
|
||||
relpath = '.'.join((os.path.relpath(path, __opts__['cachedir']), 'hash'))
|
||||
if re.match(r'..[/\\]', relpath):
|
||||
# path is a local file
|
||||
relpath = salt.utils.path_join(
|
||||
'local',
|
||||
os.path.splitdrive(path)[-1].lstrip('/\\'),
|
||||
)
|
||||
return salt.utils.path_join(__opts__['cachedir'], 'archive_hash', relpath)
|
||||
|
||||
|
||||
def _update_checksum(path):
|
||||
checksum_file = _checksum_file_path(path)
|
||||
checksum_dir = os.path.dirname(checksum_file)
|
||||
if not os.path.isdir(checksum_dir):
|
||||
os.makedirs(checksum_dir)
|
||||
source_sum = _gen_checksum(path)
|
||||
hash_type = source_sum.get('hash_type')
|
||||
hsum = source_sum.get('hsum')
|
||||
if hash_type and hsum:
|
||||
lines = []
|
||||
try:
|
||||
try:
|
||||
with salt.utils.fopen(cached_source_sum, 'r') as fp_:
|
||||
with salt.utils.fopen(checksum_file, 'r') as fp_:
|
||||
for line in fp_:
|
||||
try:
|
||||
lines.append(line.rstrip('\n').split(':', 1))
|
||||
|
@ -80,7 +94,7 @@ def _update_checksum(cached_source):
|
|||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
with salt.utils.fopen(cached_source_sum, 'w') as fp_:
|
||||
with salt.utils.fopen(checksum_file, 'w') as fp_:
|
||||
for line in lines:
|
||||
if line[0] == hash_type:
|
||||
line[1] = hsum
|
||||
|
@ -90,16 +104,16 @@ def _update_checksum(cached_source):
|
|||
except (IOError, OSError) as exc:
|
||||
log.warning(
|
||||
'Failed to update checksum for %s: %s',
|
||||
cached_source, exc.__str__()
|
||||
path, exc.__str__(), exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def _read_cached_checksum(cached_source, form=None):
|
||||
def _read_cached_checksum(path, form=None):
|
||||
if form is None:
|
||||
form = __opts__['hash_type']
|
||||
path = '.'.join((cached_source, 'hash'))
|
||||
checksum_file = _checksum_file_path(path)
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.fopen(checksum_file, 'r') as fp_:
|
||||
for line in fp_:
|
||||
# Should only be one line in this file but just in case it
|
||||
# isn't, read only a single line to avoid overuse of memory.
|
||||
|
@ -114,9 +128,9 @@ def _read_cached_checksum(cached_source, form=None):
|
|||
return {'hash_type': hash_type, 'hsum': hsum}
|
||||
|
||||
|
||||
def _compare_checksum(cached_source, source_sum):
|
||||
def _compare_checksum(cached, source_sum):
|
||||
cached_sum = _read_cached_checksum(
|
||||
cached_source,
|
||||
cached,
|
||||
form=source_sum.get('hash_type', __opts__['hash_type'])
|
||||
)
|
||||
return source_sum == cached_sum
|
||||
|
@ -152,7 +166,6 @@ def extracted(name,
|
|||
user=None,
|
||||
group=None,
|
||||
if_missing=None,
|
||||
keep=False,
|
||||
trim_output=False,
|
||||
use_cmd_unzip=None,
|
||||
extract_perms=True,
|
||||
|
@ -389,6 +402,22 @@ def extracted(name,
|
|||
|
||||
.. versionadded:: 2016.3.4
|
||||
|
||||
keep_source : True
|
||||
For ``source`` archives not local to the minion (i.e. from the Salt
|
||||
fileserver or a remote source such as ``http(s)`` or ``ftp``), Salt
|
||||
will need to download the archive to the minion cache before they can
|
||||
be extracted. To remove the downloaded archive after extraction, set
|
||||
this argument to ``False``.
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
keep : True
|
||||
Same as ``keep_source``, kept for backward-compatibility.
|
||||
|
||||
.. note::
|
||||
If both ``keep_source`` and ``keep`` are used, ``keep`` will be
|
||||
ignored.
|
||||
|
||||
password
|
||||
**For ZIP archives only.** Password used for extraction.
|
||||
|
||||
|
@ -527,13 +556,6 @@ def extracted(name,
|
|||
simply checked for existence and extraction will be skipped if
|
||||
if is present.
|
||||
|
||||
keep : False
|
||||
For ``source`` archives not local to the minion (i.e. from the Salt
|
||||
fileserver or a remote source such as ``http(s)`` or ``ftp``), Salt
|
||||
will need to download the archive to the minion cache before they can
|
||||
be extracted. After extraction, these source archives will be removed
|
||||
unless this argument is set to ``True``.
|
||||
|
||||
trim_output : False
|
||||
Useful for archives with many files in them. This can either be set to
|
||||
``True`` (in which case only the first 100 files extracted will be
|
||||
|
@ -635,6 +657,21 @@ def extracted(name,
|
|||
# Remove pub kwargs as they're irrelevant here.
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
|
||||
if 'keep_source' in kwargs and 'keep' in kwargs:
|
||||
ret.setdefault('warnings', []).append(
|
||||
'Both \'keep_source\' and \'keep\' were used. Since these both '
|
||||
'do the same thing, \'keep\' was ignored.'
|
||||
)
|
||||
keep_source = bool(kwargs.pop('keep_source'))
|
||||
kwargs.pop('keep')
|
||||
elif 'keep_source' in kwargs:
|
||||
keep_source = bool(kwargs.pop('keep_source'))
|
||||
elif 'keep' in kwargs:
|
||||
keep_source = bool(kwargs.pop('keep'))
|
||||
else:
|
||||
# Neither was passed, default is True
|
||||
keep_source = True
|
||||
|
||||
if not _path_is_abs(name):
|
||||
ret['comment'] = '{0} is not an absolute path'.format(name)
|
||||
return ret
|
||||
|
@ -730,10 +767,10 @@ def extracted(name,
|
|||
urlparsed_source = _urlparse(source_match)
|
||||
source_hash_basename = urlparsed_source.path or urlparsed_source.netloc
|
||||
|
||||
source_is_local = urlparsed_source.scheme in ('', 'file')
|
||||
source_is_local = urlparsed_source.scheme in salt.utils.files.LOCAL_PROTOS
|
||||
if source_is_local:
|
||||
# Get rid of "file://" from start of source_match
|
||||
source_match = urlparsed_source.path
|
||||
source_match = os.path.realpath(os.path.expanduser(urlparsed_source.path))
|
||||
if not os.path.isfile(source_match):
|
||||
ret['comment'] = 'Source file \'{0}\' does not exist'.format(source_match)
|
||||
return ret
|
||||
|
@ -882,95 +919,59 @@ def extracted(name,
|
|||
source_sum = {}
|
||||
|
||||
if source_is_local:
|
||||
cached_source = source_match
|
||||
cached = source_match
|
||||
else:
|
||||
cached_source = os.path.join(
|
||||
__opts__['cachedir'],
|
||||
'files',
|
||||
__env__,
|
||||
re.sub(r'[:/\\]', '_', source_hash_basename),
|
||||
)
|
||||
|
||||
if os.path.isdir(cached_source):
|
||||
# Prevent a traceback from attempting to read from a directory path
|
||||
salt.utils.rm_rf(cached_source)
|
||||
|
||||
existing_cached_source_sum = _read_cached_checksum(cached_source)
|
||||
|
||||
if source_is_local:
|
||||
# No need to download archive, it's local to the minion
|
||||
update_source = False
|
||||
else:
|
||||
if not os.path.isfile(cached_source):
|
||||
# Archive not cached, we need to download it
|
||||
update_source = True
|
||||
else:
|
||||
# Archive is cached, keep=True likely used in prior run. If we need
|
||||
# to verify the hash, then we *have* to update the source archive
|
||||
# to know whether or not the hash changed. Hence the below
|
||||
# statement. bool(source_hash) will be True if source_hash was
|
||||
# passed, and otherwise False.
|
||||
update_source = bool(source_hash)
|
||||
|
||||
if update_source:
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = (
|
||||
'Archive {0} would be downloaded to cache and checked to '
|
||||
'discover if extraction is necessary'.format(
|
||||
'Archive {0} would be ached (if necessary) and checked to '
|
||||
'discover if extraction is needed'.format(
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
# NOTE: This will result in more than one copy of the source archive on
|
||||
# the minion. The reason this is necessary is because if we are
|
||||
# tracking the checksum using source_hash_update, we need a location
|
||||
# where we can place the checksum file alongside the cached source
|
||||
# file, where it won't be overwritten by caching a file with the same
|
||||
# name in the same parent dir as the source file. Long term, we should
|
||||
# come up with a better solution for this.
|
||||
file_result = __states__['file.managed'](cached_source,
|
||||
source=source_match,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
makedirs=True,
|
||||
skip_verify=skip_verify)
|
||||
log.debug('file.managed: {0}'.format(file_result))
|
||||
|
||||
# Prevent a traceback if errors prevented the above state from getting
|
||||
# off the ground.
|
||||
if isinstance(file_result, list):
|
||||
try:
|
||||
ret['comment'] = '\n'.join(file_result)
|
||||
except TypeError:
|
||||
ret['comment'] = '\n'.join([str(x) for x in file_result])
|
||||
if 'file.cached' not in __states__:
|
||||
# Shouldn't happen unless there is a traceback keeping
|
||||
# salt/states/file.py from being processed through the loader. If
|
||||
# that is the case, we have much more important problems as _all_
|
||||
# file states would be unavailable.
|
||||
ret['comment'] = (
|
||||
'Unable to cache {0}, file.cached state not available'.format(
|
||||
source_match
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
try:
|
||||
if not file_result['result']:
|
||||
log.debug(
|
||||
'failed to download %s',
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
return file_result
|
||||
except TypeError:
|
||||
if not file_result:
|
||||
log.debug(
|
||||
'failed to download %s',
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
return file_result
|
||||
result = __states__['file.cached'](source_match,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
skip_verify=skip_verify,
|
||||
saltenv=__env__)
|
||||
except Exception as exc:
|
||||
msg = 'Failed to cache {0}: {1}'.format(source_match, exc.__str__())
|
||||
log.exception(msg)
|
||||
ret['comment'] = msg
|
||||
return ret
|
||||
else:
|
||||
log.debug('file.cached: {0}'.format(result))
|
||||
|
||||
else:
|
||||
log.debug(
|
||||
'Archive %s is already in cache',
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
if result['result']:
|
||||
# Get the path of the file in the minion cache
|
||||
cached = __salt__['cp.is_cached'](source_match)
|
||||
else:
|
||||
log.debug(
|
||||
'failed to download %s',
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
return result
|
||||
|
||||
existing_cached_source_sum = _read_cached_checksum(cached)
|
||||
|
||||
if source_hash and source_hash_update and not skip_verify:
|
||||
# Create local hash sum file if we're going to track sum update
|
||||
_update_checksum(cached_source)
|
||||
_update_checksum(cached)
|
||||
|
||||
if archive_format == 'zip' and not password:
|
||||
log.debug('Checking %s to see if it is password-protected',
|
||||
|
@ -979,7 +980,7 @@ def extracted(name,
|
|||
# implicitly enabled by setting the "options" argument.
|
||||
try:
|
||||
encrypted_zip = __salt__['archive.is_encrypted'](
|
||||
cached_source,
|
||||
cached,
|
||||
clean=False,
|
||||
saltenv=__env__)
|
||||
except CommandExecutionError:
|
||||
|
@ -997,7 +998,7 @@ def extracted(name,
|
|||
return ret
|
||||
|
||||
try:
|
||||
contents = __salt__['archive.list'](cached_source,
|
||||
contents = __salt__['archive.list'](cached,
|
||||
archive_format=archive_format,
|
||||
options=list_options,
|
||||
strip_components=strip_components,
|
||||
|
@ -1166,7 +1167,7 @@ def extracted(name,
|
|||
if not extraction_needed \
|
||||
and source_hash_update \
|
||||
and existing_cached_source_sum is not None \
|
||||
and not _compare_checksum(cached_source, existing_cached_source_sum):
|
||||
and not _compare_checksum(cached, existing_cached_source_sum):
|
||||
extraction_needed = True
|
||||
source_hash_trigger = True
|
||||
else:
|
||||
|
@ -1224,13 +1225,13 @@ def extracted(name,
|
|||
__states__['file.directory'](name, user=user, makedirs=True)
|
||||
created_destdir = True
|
||||
|
||||
log.debug('Extracting {0} to {1}'.format(cached_source, name))
|
||||
log.debug('Extracting {0} to {1}'.format(cached, name))
|
||||
try:
|
||||
if archive_format == 'zip':
|
||||
if use_cmd_unzip:
|
||||
try:
|
||||
files = __salt__['archive.cmd_unzip'](
|
||||
cached_source,
|
||||
cached,
|
||||
name,
|
||||
options=options,
|
||||
trim_output=trim_output,
|
||||
|
@ -1240,7 +1241,7 @@ def extracted(name,
|
|||
ret['comment'] = exc.strerror
|
||||
return ret
|
||||
else:
|
||||
files = __salt__['archive.unzip'](cached_source,
|
||||
files = __salt__['archive.unzip'](cached,
|
||||
name,
|
||||
options=options,
|
||||
trim_output=trim_output,
|
||||
|
@ -1248,7 +1249,7 @@ def extracted(name,
|
|||
**kwargs)
|
||||
elif archive_format == 'rar':
|
||||
try:
|
||||
files = __salt__['archive.unrar'](cached_source,
|
||||
files = __salt__['archive.unrar'](cached,
|
||||
name,
|
||||
trim_output=trim_output,
|
||||
**kwargs)
|
||||
|
@ -1258,7 +1259,7 @@ def extracted(name,
|
|||
else:
|
||||
if options is None:
|
||||
try:
|
||||
with closing(tarfile.open(cached_source, 'r')) as tar:
|
||||
with closing(tarfile.open(cached, 'r')) as tar:
|
||||
tar.extractall(name)
|
||||
files = tar.getnames()
|
||||
if trim_output:
|
||||
|
@ -1266,7 +1267,7 @@ def extracted(name,
|
|||
except tarfile.ReadError:
|
||||
if salt.utils.which('xz'):
|
||||
if __salt__['cmd.retcode'](
|
||||
['xz', '-t', cached_source],
|
||||
['xz', '-t', cached],
|
||||
python_shell=False,
|
||||
ignore_retcode=True) == 0:
|
||||
# XZ-compressed data
|
||||
|
@ -1282,7 +1283,7 @@ def extracted(name,
|
|||
# pipe it to tar for extraction.
|
||||
cmd = 'xz --decompress --stdout {0} | tar xvf -'
|
||||
results = __salt__['cmd.run_all'](
|
||||
cmd.format(_cmd_quote(cached_source)),
|
||||
cmd.format(_cmd_quote(cached)),
|
||||
cwd=name,
|
||||
python_shell=True)
|
||||
if results['retcode'] != 0:
|
||||
|
@ -1352,7 +1353,7 @@ def extracted(name,
|
|||
|
||||
tar_cmd.append(tar_shortopts)
|
||||
tar_cmd.extend(tar_longopts)
|
||||
tar_cmd.extend(['-f', cached_source])
|
||||
tar_cmd.extend(['-f', cached])
|
||||
|
||||
results = __salt__['cmd.run_all'](tar_cmd,
|
||||
cwd=name,
|
||||
|
@ -1523,18 +1524,15 @@ def extracted(name,
|
|||
for item in enforce_failed:
|
||||
ret['comment'] += '\n- {0}'.format(item)
|
||||
|
||||
if not source_is_local and not keep:
|
||||
for path in (cached_source, __salt__['cp.is_cached'](source_match)):
|
||||
if not path:
|
||||
continue
|
||||
log.debug('Cleaning cached source file %s', path)
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
log.error(
|
||||
'Failed to clean cached source file %s: %s',
|
||||
cached_source, exc.__str__()
|
||||
)
|
||||
if not source_is_local:
|
||||
if keep_source:
|
||||
log.debug('Keeping cached source file %s', cached)
|
||||
else:
|
||||
log.debug('Cleaning cached source file %s', cached)
|
||||
result = __states__['file.not_cached'](source_match, saltenv=__env__)
|
||||
if not result['result']:
|
||||
# Don't let failure to delete cached file cause the state
|
||||
# itself to fail, just drop it in the warnings.
|
||||
ret.setdefault('warnings', []).append(result['comment'])
|
||||
|
||||
return ret
|
||||
|
|
|
@ -294,6 +294,7 @@ if salt.utils.is_windows():
|
|||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import zip_longest
|
||||
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=no-name-in-module
|
||||
if salt.utils.is_windows():
|
||||
import pywintypes
|
||||
import win32com.client
|
||||
|
@ -1519,6 +1520,7 @@ def managed(name,
|
|||
source=None,
|
||||
source_hash='',
|
||||
source_hash_name=None,
|
||||
keep_source=True,
|
||||
user=None,
|
||||
group=None,
|
||||
mode=None,
|
||||
|
@ -1717,6 +1719,15 @@ def managed(name,
|
|||
|
||||
.. versionadded:: 2016.3.5
|
||||
|
||||
keep_source : True
|
||||
Set to ``False`` to discard the cached copy of the source file once the
|
||||
state completes. This can be useful for larger files to keep them from
|
||||
taking up space in minion cache. However, keep in mind that discarding
|
||||
the source file will result in the state needing to re-download the
|
||||
source file if the state is run again.
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
user
|
||||
The user to own the file, this defaults to the user salt is running as
|
||||
on the minion
|
||||
|
@ -2415,8 +2426,9 @@ def managed(name,
|
|||
except Exception as exc:
|
||||
ret['changes'] = {}
|
||||
log.debug(traceback.format_exc())
|
||||
if os.path.isfile(tmp_filename):
|
||||
os.remove(tmp_filename)
|
||||
salt.utils.files.remove(tmp_filename)
|
||||
if not keep_source and sfn:
|
||||
salt.utils.files.remove(sfn)
|
||||
return _error(ret, 'Unable to check_cmd file: {0}'.format(exc))
|
||||
|
||||
# file being updated to verify using check_cmd
|
||||
|
@ -2434,15 +2446,9 @@ def managed(name,
|
|||
cret = mod_run_check_cmd(check_cmd, tmp_filename, **check_cmd_opts)
|
||||
if isinstance(cret, dict):
|
||||
ret.update(cret)
|
||||
if os.path.isfile(tmp_filename):
|
||||
os.remove(tmp_filename)
|
||||
if sfn and os.path.isfile(sfn):
|
||||
os.remove(sfn)
|
||||
salt.utils.files.remove(tmp_filename)
|
||||
return ret
|
||||
|
||||
if sfn and os.path.isfile(sfn):
|
||||
os.remove(sfn)
|
||||
|
||||
# Since we generated a new tempfile and we are not returning here
|
||||
# lets change the original sfn to the new tempfile or else we will
|
||||
# get file not found
|
||||
|
@ -2490,10 +2496,10 @@ def managed(name,
|
|||
log.debug(traceback.format_exc())
|
||||
return _error(ret, 'Unable to manage file: {0}'.format(exc))
|
||||
finally:
|
||||
if tmp_filename and os.path.isfile(tmp_filename):
|
||||
os.remove(tmp_filename)
|
||||
if sfn and os.path.isfile(sfn):
|
||||
os.remove(sfn)
|
||||
if tmp_filename:
|
||||
salt.utils.files.remove(tmp_filename)
|
||||
if not keep_source and sfn:
|
||||
salt.utils.files.remove(sfn)
|
||||
|
||||
|
||||
_RECURSE_TYPES = ['user', 'group', 'mode', 'ignore_files', 'ignore_dirs']
|
||||
|
@ -3022,6 +3028,7 @@ def directory(name,
|
|||
|
||||
def recurse(name,
|
||||
source,
|
||||
keep_source=True,
|
||||
clean=False,
|
||||
require=None,
|
||||
user=None,
|
||||
|
@ -3053,6 +3060,15 @@ def recurse(name,
|
|||
located on the master in the directory named spam, and is called eggs,
|
||||
the source string is salt://spam/eggs
|
||||
|
||||
keep_source : True
|
||||
Set to ``False`` to discard the cached copy of the source file once the
|
||||
state completes. This can be useful for larger files to keep them from
|
||||
taking up space in minion cache. However, keep in mind that discarding
|
||||
the source file will result in the state needing to re-download the
|
||||
source file if the state is run again.
|
||||
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
clean
|
||||
Make sure that only files that are set up by salt and required by this
|
||||
function are kept. If this option is set then everything in this
|
||||
|
@ -3333,6 +3349,7 @@ def recurse(name,
|
|||
_ret = managed(
|
||||
path,
|
||||
source=source,
|
||||
keep_source=keep_source,
|
||||
user=user,
|
||||
group=group,
|
||||
mode='keep' if keep_mode else file_mode,
|
||||
|
@ -6423,3 +6440,376 @@ def shortcut(
|
|||
ret['comment'] += (', but was unable to set ownership to '
|
||||
'{0}'.format(user))
|
||||
return ret
|
||||
|
||||
|
||||
def cached(name,
|
||||
source_hash='',
|
||||
source_hash_name=None,
|
||||
skip_verify=False,
|
||||
saltenv='base'):
|
||||
'''
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
Ensures that a file is saved to the minion's cache. This state is primarily
|
||||
invoked by other states to ensure that we do not re-download a source file
|
||||
if we do not need to.
|
||||
|
||||
name
|
||||
The URL of the file to be cached. To cache a file from an environment
|
||||
other than ``base``, either use the ``saltenv`` argument or include the
|
||||
saltenv in the URL (e.g. ``salt://path/to/file.conf?saltenv=dev``).
|
||||
|
||||
.. note::
|
||||
A list of URLs is not supported, this must be a single URL. If a
|
||||
local file is passed here, then the state will obviously not try to
|
||||
download anything, but it will compare a hash if one is specified.
|
||||
|
||||
source_hash
|
||||
See the documentation for this same argument in the
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
|
||||
.. note::
|
||||
For remote files not originating from the ``salt://`` fileserver,
|
||||
such as http(s) or ftp servers, this state will not re-download the
|
||||
file if the locally-cached copy matches this hash. This is done to
|
||||
prevent unnecessary downloading on repeated runs of this state. To
|
||||
update the cached copy of a file, it is necessary to update this
|
||||
hash.
|
||||
|
||||
source_hash_name
|
||||
See the documentation for this same argument in the
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
|
||||
skip_verify
|
||||
See the documentation for this same argument in the
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
|
||||
.. note::
|
||||
Setting this to ``True`` will result in a copy of the file being
|
||||
downloaded from a remote (http(s), ftp, etc.) source each time the
|
||||
state is run.
|
||||
|
||||
saltenv
|
||||
Used to specify the environment from which to download a file from the
|
||||
Salt fileserver (i.e. those with ``salt://`` URL).
|
||||
|
||||
|
||||
This state will in most cases not be useful in SLS files, but it is useful
|
||||
when writing a state or remote-execution module that needs to make sure
|
||||
that a file at a given URL has been downloaded to the cachedir. One example
|
||||
of this is in the :py:func:`archive.extracted <salt.states.file.extracted>`
|
||||
state:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
result = __states__['file.cached'](source_match,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
skip_verify=skip_verify,
|
||||
saltenv=__env__)
|
||||
|
||||
This will return a dictionary containing the state's return data, including
|
||||
a ``result`` key which will state whether or not the state was successful.
|
||||
Note that this will not catch exceptions, so it is best used within a
|
||||
try/except.
|
||||
|
||||
Once this state has been run from within another state or remote-execution
|
||||
module, the actual location of the cached file can be obtained using
|
||||
:py:func:`cp.is_cached <salt.modules.cp.is_cached>`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cached = __salt__['cp.is_cached'](source_match)
|
||||
|
||||
This function will return the cached path of the file, or an empty string
|
||||
if the file is not present in the minion cache.
|
||||
|
||||
This state will in most cases not be useful in SLS files, but it is useful
|
||||
when writing a state or remote-execution module that needs to make sure
|
||||
that a file at a given URL has been downloaded to the cachedir. One example
|
||||
of this is in the :py:func:`archive.extracted <salt.states.file.extracted>`
|
||||
state:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
result = __states__['file.cached'](source_match,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
skip_verify=skip_verify,
|
||||
saltenv=__env__)
|
||||
|
||||
This will return a dictionary containing the state's return data, including
|
||||
a ``result`` key which will state whether or not the state was successful.
|
||||
Note that this will not catch exceptions, so it is best used within a
|
||||
try/except.
|
||||
|
||||
Once this state has been run from within another state or remote-execution
|
||||
module, the actual location of the cached file can be obtained using
|
||||
:py:func:`cp.is_cached <salt.modules.cp.is_cached>`:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cached = __salt__['cp.is_cached'](source_match)
|
||||
|
||||
This function will return the cached path of the file, or an empty string
|
||||
if the file is not present in the minion cache.
|
||||
'''
|
||||
ret = {'changes': {},
|
||||
'comment': '',
|
||||
'name': name,
|
||||
'result': False}
|
||||
|
||||
try:
|
||||
parsed = _urlparse(name)
|
||||
except Exception:
|
||||
ret['comment'] = 'Only URLs or local file paths are valid input'
|
||||
return ret
|
||||
|
||||
# This if statement will keep the state from proceeding if a remote source
|
||||
# is specified and no source_hash is presented (unless we're skipping hash
|
||||
# verification).
|
||||
if not skip_verify \
|
||||
and not source_hash \
|
||||
and parsed.scheme in salt.utils.files.REMOTE_PROTOS:
|
||||
ret['comment'] = (
|
||||
'Unable to verify upstream hash of source file {0}, please set '
|
||||
'source_hash or set skip_verify to True'.format(name)
|
||||
)
|
||||
return ret
|
||||
|
||||
if source_hash:
|
||||
# Get the hash and hash type from the input. This takes care of parsing
|
||||
# the hash out of a file containing checksums, if that is how the
|
||||
# source_hash was specified.
|
||||
try:
|
||||
source_sum = __salt__['file.get_source_sum'](
|
||||
source=name,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
saltenv=saltenv)
|
||||
except CommandExecutionError as exc:
|
||||
ret['comment'] = exc.strerror
|
||||
return ret
|
||||
else:
|
||||
if not source_sum:
|
||||
# We shouldn't get here, problems in retrieving the hash in
|
||||
# file.get_source_sum should result in a CommandExecutionError
|
||||
# being raised, which we catch above. Nevertheless, we should
|
||||
# provide useful information in the event that
|
||||
# file.get_source_sum regresses.
|
||||
ret['comment'] = (
|
||||
'Failed to get source hash from {0}. This may be a bug. '
|
||||
'If this error persists, please report it and set '
|
||||
'skip_verify to True to work around it.'.format(source_hash)
|
||||
)
|
||||
return ret
|
||||
else:
|
||||
source_sum = {}
|
||||
|
||||
if parsed.scheme in salt.utils.files.LOCAL_PROTOS:
|
||||
# Source is a local file path
|
||||
full_path = os.path.realpath(os.path.expanduser(parsed.path))
|
||||
if os.path.exists(full_path):
|
||||
if not skip_verify and source_sum:
|
||||
# Enforce the hash
|
||||
local_hash = __salt__['file.get_hash'](
|
||||
full_path,
|
||||
source_sum.get('hash_type', __opts__['hash_type']))
|
||||
if local_hash == source_sum['hsum']:
|
||||
ret['result'] = True
|
||||
ret['comment'] = (
|
||||
'File {0} is present on the minion and has hash '
|
||||
'{1}'.format(full_path, local_hash)
|
||||
)
|
||||
else:
|
||||
ret['comment'] = (
|
||||
'File {0} is present on the minion, but the hash ({1}) '
|
||||
'does not match the specified hash ({2})'.format(
|
||||
full_path, local_hash, source_sum['hsum']
|
||||
)
|
||||
)
|
||||
return ret
|
||||
else:
|
||||
ret['result'] = True
|
||||
ret['comment'] = 'File {0} is present on the minion'.format(
|
||||
full_path
|
||||
)
|
||||
return ret
|
||||
else:
|
||||
ret['comment'] = 'File {0} is not present on the minion'.format(
|
||||
full_path
|
||||
)
|
||||
return ret
|
||||
|
||||
local_copy = __salt__['cp.is_cached'](name, saltenv=saltenv)
|
||||
|
||||
if local_copy:
|
||||
# File is already cached
|
||||
pre_hash = __salt__['file.get_hash'](
|
||||
local_copy,
|
||||
source_sum.get('hash_type', __opts__['hash_type']))
|
||||
|
||||
if not skip_verify and source_sum:
|
||||
# Get the local copy's hash to compare with the hash that was
|
||||
# specified via source_hash. If it matches, we can exit early from
|
||||
# the state without going any further, because the file is cached
|
||||
# with the correct hash.
|
||||
if pre_hash == source_sum['hsum']:
|
||||
ret['result'] = True
|
||||
ret['comment'] = (
|
||||
'File is already cached to {0} with hash {1}'.format(
|
||||
local_copy, pre_hash
|
||||
)
|
||||
)
|
||||
else:
|
||||
pre_hash = None
|
||||
|
||||
def _try_cache(path, checksum):
|
||||
'''
|
||||
This helper is not needed anymore in develop as the fileclient in the
|
||||
develop branch now has means of skipping a download if the existing
|
||||
hash matches one passed to cp.cache_file. Remove this helper and the
|
||||
code that invokes it, once we have merged forward into develop.
|
||||
'''
|
||||
if not path or not checksum:
|
||||
return True
|
||||
form = salt.utils.files.HASHES_REVMAP.get(len(checksum))
|
||||
if form is None:
|
||||
# Shouldn't happen, an invalid checksum length should be caught
|
||||
# before we get here. But in the event this gets through, don't let
|
||||
# it cause any trouble, and just return True.
|
||||
return True
|
||||
try:
|
||||
return salt.utils.get_hash(path, form=form) != checksum
|
||||
except (IOError, OSError, ValueError):
|
||||
# Again, shouldn't happen, but don't let invalid input/permissions
|
||||
# in the call to get_hash blow this up.
|
||||
return True
|
||||
|
||||
# Cache the file. Note that this will not actually download the file if
|
||||
# either of the following is true:
|
||||
# 1. source is a salt:// URL and the fileserver determines that the hash
|
||||
# of the minion's copy matches that of the fileserver.
|
||||
# 2. File is remote (http(s), ftp, etc.) and the specified source_hash
|
||||
# matches the cached copy.
|
||||
# Remote, non salt:// sources _will_ download if a copy of the file was
|
||||
# not already present in the minion cache.
|
||||
if _try_cache(local_copy, source_sum.get('hsum')):
|
||||
# The _try_cache helper is obsolete in the develop branch. Once merged
|
||||
# forward, remove the helper as well as this if statement, and dedent
|
||||
# the below block.
|
||||
try:
|
||||
local_copy = __salt__['cp.cache_file'](
|
||||
name,
|
||||
saltenv=saltenv)
|
||||
# Once this is merged into develop, uncomment the source_hash
|
||||
# line below and add it to the list of arguments to
|
||||
# cp.cache_file (note that this also means removing the
|
||||
# close-parenthesis above and replacing it with a comma). The
|
||||
# develop branch has modifications to the fileclient which will
|
||||
# allow it to skip the download if the source_hash matches what
|
||||
# is passed to cp.cache_file, so the helper is just a stopgap
|
||||
# for the 2017.7 release cycle.
|
||||
#source_hash=source_sum.get('hsum'))
|
||||
except Exception as exc:
|
||||
ret['comment'] = exc.__str__()
|
||||
return ret
|
||||
|
||||
if not local_copy:
|
||||
ret['comment'] = (
|
||||
'Failed to cache {0}, check minion log for more '
|
||||
'information'.format(name)
|
||||
)
|
||||
return ret
|
||||
|
||||
post_hash = __salt__['file.get_hash'](
|
||||
local_copy,
|
||||
source_sum.get('hash_type', __opts__['hash_type']))
|
||||
|
||||
if pre_hash != post_hash:
|
||||
ret['changes']['hash'] = {'old': pre_hash, 'new': post_hash}
|
||||
|
||||
# Check the hash, if we're enforcing one. Note that this will be the first
|
||||
# hash check if the file was not previously cached, and the 2nd hash check
|
||||
# if it was cached and the
|
||||
if not skip_verify and source_sum:
|
||||
if post_hash == source_sum['hsum']:
|
||||
ret['result'] = True
|
||||
ret['comment'] = (
|
||||
'File is already cached to {0} with hash {1}'.format(
|
||||
local_copy, post_hash
|
||||
)
|
||||
)
|
||||
else:
|
||||
ret['comment'] = (
|
||||
'File is cached to {0}, but the hash ({1}) does not match '
|
||||
'the specified hash ({2})'.format(
|
||||
local_copy, post_hash, source_sum['hsum']
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
# We're not enforcing a hash, and we already know that the file was
|
||||
# successfully cached, so we know the state was successful.
|
||||
ret['result'] = True
|
||||
ret['comment'] = 'File is cached to {0}'.format(local_copy)
|
||||
return ret
|
||||
|
||||
|
||||
def not_cached(name, saltenv='base'):
|
||||
'''
|
||||
Ensures that a file is saved to the minion's cache. This state is primarily
|
||||
invoked by other states to ensure that we do not re-download a source file
|
||||
if we do not need to.
|
||||
|
||||
name
|
||||
The URL of the file to be cached. To cache a file from an environment
|
||||
other than ``base``, either use the ``saltenv`` argument or include the
|
||||
saltenv in the URL (e.g. ``salt://path/to/file.conf?saltenv=dev``).
|
||||
|
||||
.. note::
|
||||
A list of URLs is not supported, this must be a single URL. If a
|
||||
local file is passed here, the state will take no action.
|
||||
|
||||
saltenv
|
||||
Used to specify the environment from which to download a file from the
|
||||
Salt fileserver (i.e. those with ``salt://`` URL).
|
||||
'''
|
||||
ret = {'changes': {},
|
||||
'comment': '',
|
||||
'name': name,
|
||||
'result': False}
|
||||
|
||||
try:
|
||||
parsed = _urlparse(name)
|
||||
except Exception:
|
||||
ret['comment'] = 'Only URLs or local file paths are valid input'
|
||||
return ret
|
||||
else:
|
||||
if parsed.scheme in salt.utils.files.LOCAL_PROTOS:
|
||||
full_path = os.path.realpath(os.path.expanduser(parsed.path))
|
||||
ret['result'] = True
|
||||
ret['comment'] = (
|
||||
'File {0} is a local path, no action taken'.format(
|
||||
full_path
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
local_copy = __salt__['cp.is_cached'](name, saltenv=saltenv)
|
||||
|
||||
if local_copy:
|
||||
try:
|
||||
os.remove(local_copy)
|
||||
except Exception as exc:
|
||||
ret['comment'] = 'Failed to delete {0}: {1}'.format(
|
||||
local_copy, exc.__str__()
|
||||
)
|
||||
else:
|
||||
ret['result'] = True
|
||||
ret['changes']['deleted'] = True
|
||||
ret['comment'] = '{0} was deleted'.format(local_copy)
|
||||
else:
|
||||
ret['result'] = True
|
||||
ret['comment'] = '{0} is not cached'.format(name)
|
||||
return ret
|
||||
|
|
|
@ -23,10 +23,21 @@ from salt.ext import six
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
LOCAL_PROTOS = ('', 'file')
|
||||
REMOTE_PROTOS = ('http', 'https', 'ftp', 'swift', 's3')
|
||||
VALID_PROTOS = ('salt', 'file') + REMOTE_PROTOS
|
||||
TEMPFILE_PREFIX = '__salt.tmp.'
|
||||
|
||||
HASHES = {
|
||||
'sha512': 128,
|
||||
'sha384': 96,
|
||||
'sha256': 64,
|
||||
'sha224': 56,
|
||||
'sha1': 40,
|
||||
'md5': 32,
|
||||
}
|
||||
HASHES_REVMAP = dict([(y, x) for x, y in six.iteritems(HASHES)])
|
||||
|
||||
|
||||
def guess_archive_type(name):
|
||||
'''
|
||||
|
@ -306,3 +317,15 @@ def safe_filepath(file_path_name, dir_sep=None):
|
|||
if drive:
|
||||
path = dir_sep.join([drive, path])
|
||||
return path
|
||||
|
||||
|
||||
def remove(path):
|
||||
'''
|
||||
Runs os.remove(path) and suppresses the OSError if the file doesn't exist
|
||||
'''
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
|
|
@ -985,10 +985,37 @@ class CkMinions(object):
|
|||
auth_list.append(matcher)
|
||||
return auth_list
|
||||
|
||||
def fill_auth_list(self, auth_provider, name, groups, auth_list=None, permissive=None):
|
||||
'''
|
||||
Returns a list of authorisation matchers that a user is eligible for.
|
||||
This list is a combination of the provided personal matchers plus the
|
||||
matchers of any group the user is in.
|
||||
'''
|
||||
if auth_list is None:
|
||||
auth_list = []
|
||||
if permissive is None:
|
||||
permissive = self.opts.get('permissive_acl')
|
||||
name_matched = False
|
||||
for match in auth_provider:
|
||||
if match == '*' and not permissive:
|
||||
continue
|
||||
if match.endswith('%'):
|
||||
if match.rstrip('%') in groups:
|
||||
auth_list.extend(auth_provider[match])
|
||||
else:
|
||||
if salt.utils.expr_match(match, name):
|
||||
name_matched = True
|
||||
auth_list.extend(auth_provider[match])
|
||||
if not permissive and not name_matched and '*' in auth_provider:
|
||||
auth_list.extend(auth_provider['*'])
|
||||
return auth_list
|
||||
|
||||
def wheel_check(self, auth_list, fun):
|
||||
'''
|
||||
Check special API permissions
|
||||
'''
|
||||
if not auth_list:
|
||||
return False
|
||||
comps = fun.split('.')
|
||||
if len(comps) != 2:
|
||||
return False
|
||||
|
@ -1020,6 +1047,8 @@ class CkMinions(object):
|
|||
'''
|
||||
Check special API permissions
|
||||
'''
|
||||
if not auth_list:
|
||||
return False
|
||||
comps = fun.split('.')
|
||||
if len(comps) != 2:
|
||||
return False
|
||||
|
@ -1051,6 +1080,8 @@ class CkMinions(object):
|
|||
'''
|
||||
Check special API permissions
|
||||
'''
|
||||
if not auth_list:
|
||||
return False
|
||||
if form != 'cloud':
|
||||
comps = fun.split('.')
|
||||
if len(comps) != 2:
|
||||
|
|
Loading…
Add table
Reference in a new issue