mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #46307 from rallytime/merge-2018.3
[2018.3] Merge forward from 2018.3.0rc1 to 2018.3
This commit is contained in:
commit
241611aca5
15 changed files with 384 additions and 175 deletions
|
@ -1,5 +1,5 @@
|
|||
Jinja2
|
||||
msgpack-python>0.3
|
||||
msgpack-python>0.3,!=0.5.5
|
||||
PyYAML
|
||||
MarkupSafe
|
||||
requests>=1.0.0
|
||||
|
|
|
@ -659,6 +659,8 @@ class SSH(object):
|
|||
self.cache_job(jid, host, ret[host], fun)
|
||||
if self.event:
|
||||
id_, data = next(six.iteritems(ret))
|
||||
if isinstance(data, six.text_type):
|
||||
data = {'return': data}
|
||||
if 'id' not in data:
|
||||
data['id'] = id_
|
||||
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
|
||||
|
@ -772,6 +774,8 @@ class SSH(object):
|
|||
self.opts)
|
||||
if self.event:
|
||||
id_, data = next(six.iteritems(ret))
|
||||
if isinstance(data, six.text_type):
|
||||
data = {'return': data}
|
||||
if 'id' not in data:
|
||||
data['id'] = id_
|
||||
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
|
||||
|
|
|
@ -2126,10 +2126,6 @@ def _read_conf_file(path):
|
|||
conf_opts['id'] = six.text_type(conf_opts['id'])
|
||||
else:
|
||||
conf_opts['id'] = sdecode(conf_opts['id'])
|
||||
for key, value in six.iteritems(conf_opts.copy()):
|
||||
if isinstance(value, six.text_type) and six.PY2:
|
||||
# We do not want unicode settings
|
||||
conf_opts[key] = value.encode('utf-8')
|
||||
return conf_opts
|
||||
|
||||
|
||||
|
@ -2222,7 +2218,6 @@ def include_config(include, orig_path, verbose, exit_on_config_errors=False):
|
|||
main config file.
|
||||
'''
|
||||
# Protect against empty option
|
||||
|
||||
if not include:
|
||||
return {}
|
||||
|
||||
|
@ -3581,7 +3576,7 @@ def get_id(opts, cache_minion_id=False):
|
|||
if opts.get('minion_id_caching', True):
|
||||
try:
|
||||
with salt.utils.files.fopen(id_cache) as idf:
|
||||
name = idf.readline().strip()
|
||||
name = salt.utils.stringutils.to_unicode(idf.readline().strip())
|
||||
bname = salt.utils.stringutils.to_bytes(name)
|
||||
if bname.startswith(codecs.BOM): # Remove BOM if exists
|
||||
name = salt.utils.stringutils.to_str(bname.replace(codecs.BOM, '', 1))
|
||||
|
@ -3703,7 +3698,9 @@ def apply_minion_config(overrides=None,
|
|||
)
|
||||
opts['fileserver_backend'][idx] = new_val
|
||||
|
||||
opts['__cli'] = os.path.basename(sys.argv[0])
|
||||
opts['__cli'] = salt.utils.stringutils.to_unicode(
|
||||
os.path.basename(sys.argv[0])
|
||||
)
|
||||
|
||||
# No ID provided. Will getfqdn save us?
|
||||
using_ip_for_id = False
|
||||
|
@ -3837,10 +3834,10 @@ def master_config(path, env_var='SALT_MASTER_CONFIG', defaults=None, exit_on_con
|
|||
defaults['default_include'])
|
||||
include = overrides.get('include', [])
|
||||
|
||||
overrides.update(include_config(default_include, path, verbose=False),
|
||||
exit_on_config_errors=exit_on_config_errors)
|
||||
overrides.update(include_config(include, path, verbose=True),
|
||||
exit_on_config_errors=exit_on_config_errors)
|
||||
overrides.update(include_config(default_include, path, verbose=False,
|
||||
exit_on_config_errors=exit_on_config_errors))
|
||||
overrides.update(include_config(include, path, verbose=True,
|
||||
exit_on_config_errors=exit_on_config_errors))
|
||||
opts = apply_master_config(overrides, defaults)
|
||||
_validate_ssh_minion_opts(opts)
|
||||
_validate_opts(opts)
|
||||
|
@ -3889,6 +3886,10 @@ def apply_master_config(overrides=None, defaults=None):
|
|||
)
|
||||
opts['saltenv'] = opts['environment']
|
||||
|
||||
if six.PY2 and 'rest_cherrypy' in opts:
|
||||
# CherryPy is not unicode-compatible
|
||||
opts['rest_cherrypy'] = salt.utils.data.encode(opts['rest_cherrypy'])
|
||||
|
||||
for idx, val in enumerate(opts['fileserver_backend']):
|
||||
if val in ('git', 'hg', 'svn', 'minion'):
|
||||
new_val = val + 'fs'
|
||||
|
|
|
@ -2174,7 +2174,7 @@ def _hw_data(osdata):
|
|||
if os.path.exists(contents_file):
|
||||
try:
|
||||
with salt.utils.files.fopen(contents_file, 'r') as ifile:
|
||||
grains[key] = ifile.read()
|
||||
grains[key] = ifile.read().strip()
|
||||
if key == 'uuid':
|
||||
grains['uuid'] = grains['uuid'].lower()
|
||||
except (IOError, OSError) as err:
|
||||
|
|
|
@ -129,7 +129,10 @@ def assign(name, value):
|
|||
tran_tab = name.translate(''.maketrans('./', '/.'))
|
||||
else:
|
||||
if isinstance(name, unicode): # pylint: disable=incompatible-py3-code
|
||||
trans_args = ({ord(x): None for x in ''.join(['./', '/.'])},)
|
||||
trans_args = ({
|
||||
ord('/'): '.',
|
||||
ord('.'): '/'
|
||||
},)
|
||||
else:
|
||||
trans_args = string.maketrans('./', '/.')
|
||||
tran_tab = name.translate(*trans_args)
|
||||
|
|
140
salt/payload.py
140
salt/payload.py
|
@ -17,6 +17,7 @@ import salt.log
|
|||
import salt.crypt
|
||||
import salt.transport.frame
|
||||
import salt.utils.immutabletypes as immutabletypes
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import SaltReqTimeoutError
|
||||
|
||||
# Import third party libs
|
||||
|
@ -128,15 +129,21 @@ class Serial(object):
|
|||
the contents cannot be converted.
|
||||
'''
|
||||
try:
|
||||
def ext_type_decoder(code, data):
|
||||
if code == 78:
|
||||
data = salt.utils.stringutils.to_unicode(data)
|
||||
return datetime.datetime.strptime(data, '%Y%m%dT%H:%M:%S.%f')
|
||||
return data
|
||||
|
||||
gc.disable() # performance optimization for msgpack
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
# msgpack only supports 'encoding' starting in 0.4.0.
|
||||
# Due to this, if we don't need it, don't pass it at all so
|
||||
# that under Python 2 we can still work with older versions
|
||||
# of msgpack.
|
||||
ret = msgpack.loads(msg, use_list=True, encoding=encoding)
|
||||
ret = msgpack.loads(msg, use_list=True, ext_hook=ext_type_decoder, encoding=encoding)
|
||||
else:
|
||||
ret = msgpack.loads(msg, use_list=True)
|
||||
ret = msgpack.loads(msg, use_list=True, ext_hook=ext_type_decoder)
|
||||
if six.PY3 and encoding is None and not raw:
|
||||
ret = salt.transport.frame.decode_embedded_strs(ret)
|
||||
except Exception as exc:
|
||||
|
@ -175,19 +182,40 @@ class Serial(object):
|
|||
Since this changes the wire protocol, this
|
||||
option should not be used outside of IPC.
|
||||
'''
|
||||
def ext_type_encoder(obj):
|
||||
if isinstance(obj, six.integer_types):
|
||||
# msgpack can't handle the very long Python longs for jids
|
||||
# Convert any very long longs to strings
|
||||
return six.text_type(obj)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
# msgpack doesn't support datetime.datetime datatype
|
||||
# So here we have converted datetime.datetime to custom datatype
|
||||
# This is msgpack Extended types numbered 78
|
||||
return msgpack.ExtType(78, salt.utils.stringutils.to_bytes(
|
||||
obj.strftime('%Y%m%dT%H:%M:%S.%f')))
|
||||
# The same for immutable types
|
||||
elif isinstance(obj, immutabletypes.ImmutableDict):
|
||||
return dict(obj)
|
||||
elif isinstance(obj, immutabletypes.ImmutableList):
|
||||
return list(obj)
|
||||
elif isinstance(obj, (set, immutabletypes.ImmutableSet)):
|
||||
# msgpack can't handle set so translate it to tuple
|
||||
return tuple(obj)
|
||||
# Nothing known exceptions found. Let msgpack raise it's own.
|
||||
return obj
|
||||
|
||||
try:
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
# msgpack only supports 'use_bin_type' starting in 0.4.0.
|
||||
# Due to this, if we don't need it, don't pass it at all so
|
||||
# that under Python 2 we can still work with older versions
|
||||
# of msgpack.
|
||||
return msgpack.dumps(msg, use_bin_type=use_bin_type)
|
||||
return msgpack.dumps(msg, default=ext_type_encoder, use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(msg)
|
||||
return msgpack.dumps(msg, default=ext_type_encoder)
|
||||
except (OverflowError, msgpack.exceptions.PackValueError):
|
||||
# msgpack can't handle the very long Python longs for jids
|
||||
# Convert any very long longs to strings
|
||||
# We borrow the technique used by TypeError below
|
||||
# msgpack<=0.4.6 don't call ext encoder on very long integers raising the error instead.
|
||||
# Convert any very long longs to strings and call dumps again.
|
||||
def verylong_encoder(obj):
|
||||
if isinstance(obj, dict):
|
||||
for key, value in six.iteritems(obj.copy()):
|
||||
|
@ -198,102 +226,18 @@ class Serial(object):
|
|||
for idx, entry in enumerate(obj):
|
||||
obj[idx] = verylong_encoder(entry)
|
||||
return obj
|
||||
# This is a spurious lint failure as we are gating this check
|
||||
# behind a check for six.PY2.
|
||||
if six.PY2 and isinstance(obj, long) and long > pow(2, 64): # pylint: disable=incompatible-py3-code
|
||||
return six.text_type(obj)
|
||||
elif six.PY3 and isinstance(obj, int) and int > pow(2, 64):
|
||||
# A value of an Integer object is limited from -(2^63) upto (2^64)-1 by MessagePack
|
||||
# spec. Here we care only of JIDs that are positive integers.
|
||||
if isinstance(obj, six.integer_types) and obj >= pow(2, 64):
|
||||
return six.text_type(obj)
|
||||
else:
|
||||
return obj
|
||||
|
||||
msg = verylong_encoder(msg)
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(verylong_encoder(msg), use_bin_type=use_bin_type)
|
||||
return msgpack.dumps(msg, default=ext_type_encoder, use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(verylong_encoder(msg))
|
||||
except TypeError as e:
|
||||
# msgpack doesn't support datetime.datetime datatype
|
||||
# So here we have converted datetime.datetime to custom datatype
|
||||
# This is msgpack Extended types numbered 78
|
||||
def default(obj):
|
||||
return msgpack.ExtType(78, obj)
|
||||
|
||||
def dt_encode(obj):
|
||||
datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.packb(datetime_str, default=default, use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.packb(datetime_str, default=default)
|
||||
|
||||
def datetime_encoder(obj):
|
||||
if isinstance(obj, dict):
|
||||
for key, value in six.iteritems(obj.copy()):
|
||||
encodedkey = datetime_encoder(key)
|
||||
if key != encodedkey:
|
||||
del obj[key]
|
||||
key = encodedkey
|
||||
obj[key] = datetime_encoder(value)
|
||||
return dict(obj)
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
obj = list(obj)
|
||||
for idx, entry in enumerate(obj):
|
||||
obj[idx] = datetime_encoder(entry)
|
||||
return obj
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return dt_encode(obj)
|
||||
else:
|
||||
return obj
|
||||
|
||||
def immutable_encoder(obj):
|
||||
log.debug('IMMUTABLE OBJ: %s', obj)
|
||||
if isinstance(obj, immutabletypes.ImmutableDict):
|
||||
return dict(obj)
|
||||
if isinstance(obj, immutabletypes.ImmutableList):
|
||||
return list(obj)
|
||||
if isinstance(obj, immutabletypes.ImmutableSet):
|
||||
return set(obj)
|
||||
|
||||
if "datetime.datetime" in six.text_type(e):
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(datetime_encoder(msg), use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(datetime_encoder(msg))
|
||||
elif "Immutable" in six.text_type(e):
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(msg, default=immutable_encoder, use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(msg, default=immutable_encoder)
|
||||
|
||||
if msgpack.version >= (0, 2, 0):
|
||||
# Should support OrderedDict serialization, so, let's
|
||||
# raise the exception
|
||||
raise
|
||||
|
||||
# msgpack is < 0.2.0, let's make its life easier
|
||||
# Since OrderedDict is identified as a dictionary, we can't
|
||||
# make use of msgpack custom types, we will need to convert by
|
||||
# hand.
|
||||
# This means iterating through all elements of a dictionary or
|
||||
# list/tuple
|
||||
def odict_encoder(obj):
|
||||
if isinstance(obj, dict):
|
||||
for key, value in six.iteritems(obj.copy()):
|
||||
obj[key] = odict_encoder(value)
|
||||
return dict(obj)
|
||||
elif isinstance(obj, (list, tuple)):
|
||||
obj = list(obj)
|
||||
for idx, entry in enumerate(obj):
|
||||
obj[idx] = odict_encoder(entry)
|
||||
return obj
|
||||
return obj
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(odict_encoder(msg), use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(odict_encoder(msg))
|
||||
except (SystemError, TypeError) as exc: # pylint: disable=W0705
|
||||
log.critical(
|
||||
'Unable to serialize message! Consider upgrading msgpack. '
|
||||
'Message which failed was %s, with exception %s', msg, exc
|
||||
)
|
||||
return msgpack.dumps(msg, default=ext_type_encoder)
|
||||
|
||||
def dump(self, msg, fn_):
|
||||
'''
|
||||
|
|
|
@ -168,7 +168,11 @@ def generate_minion_id():
|
|||
|
||||
:return:
|
||||
'''
|
||||
return _generate_minion_id().first() or 'localhost'
|
||||
try:
|
||||
ret = salt.utils.stringutils.to_unicode(_generate_minion_id().first())
|
||||
except TypeError:
|
||||
ret = None
|
||||
return ret or 'localhost'
|
||||
|
||||
|
||||
def get_socket(addr, type=socket.SOCK_STREAM, proto=0):
|
||||
|
|
|
@ -134,7 +134,8 @@ class Schedule(object):
|
|||
|
||||
def _get_schedule(self,
|
||||
include_opts=True,
|
||||
include_pillar=True):
|
||||
include_pillar=True,
|
||||
remove_hidden=False):
|
||||
'''
|
||||
Return the schedule data structure
|
||||
'''
|
||||
|
@ -150,6 +151,12 @@ class Schedule(object):
|
|||
raise ValueError('Schedule must be of type dict.')
|
||||
schedule.update(opts_schedule)
|
||||
|
||||
if remove_hidden:
|
||||
_schedule = copy.deepcopy(schedule)
|
||||
for job in _schedule:
|
||||
for item in _schedule[job]:
|
||||
if item.startswith('_'):
|
||||
del schedule[job][item]
|
||||
return schedule
|
||||
|
||||
def _check_max_running(self, func, data, opts):
|
||||
|
@ -210,7 +217,8 @@ class Schedule(object):
|
|||
|
||||
schedule_conf = os.path.join(minion_d_dir, '_schedule.conf')
|
||||
log.debug('Persisting schedule')
|
||||
schedule_data = self._get_schedule(include_pillar=False)
|
||||
schedule_data = self._get_schedule(include_pillar=False,
|
||||
remove_hidden=True)
|
||||
try:
|
||||
with salt.utils.files.fopen(schedule_conf, 'wb+') as fp_:
|
||||
fp_.write(
|
||||
|
@ -789,12 +797,16 @@ class Schedule(object):
|
|||
'''
|
||||
Evaluate and execute the schedule
|
||||
|
||||
:param int now: Override current time with a Unix timestamp``
|
||||
:param datetime now: Override current time with a datetime object instance``
|
||||
|
||||
'''
|
||||
|
||||
log.trace('==== evaluating schedule now %s =====', now)
|
||||
|
||||
loop_interval = self.opts['loop_interval']
|
||||
if not isinstance(loop_interval, datetime.timedelta):
|
||||
loop_interval = datetime.timedelta(seconds=loop_interval)
|
||||
|
||||
def _splay(splaytime):
|
||||
'''
|
||||
Calculate splaytime
|
||||
|
@ -904,20 +916,20 @@ class Schedule(object):
|
|||
if 'run_explicit' in data:
|
||||
_run_explicit = []
|
||||
for _run_time in data['run_explicit']:
|
||||
_run_explicit.append(datetime.datetime.strptime(_run_time['time'],
|
||||
_run_time['time_fmt']))
|
||||
|
||||
if isinstance(_run_explicit, six.integer_types):
|
||||
_run_explicit = [_run_explicit]
|
||||
if isinstance(_run_time, datetime.datetime):
|
||||
_run_explicit.append(_run_time)
|
||||
else:
|
||||
_run_explicit.append(datetime.datetime.strptime(_run_time['time'],
|
||||
_run_time['time_fmt']))
|
||||
|
||||
# Copy the list so we can loop through it
|
||||
for i in copy.deepcopy(_run_explicit):
|
||||
if len(_run_explicit) > 1:
|
||||
if i < now - datetime.timedelta(seconds=self.opts['loop_interval']):
|
||||
if i < now - loop_interval:
|
||||
_run_explicit.remove(i)
|
||||
|
||||
if _run_explicit:
|
||||
if _run_explicit[0] <= now < _run_explicit[0] + datetime.timedelta(seconds=self.opts['loop_interval']):
|
||||
if _run_explicit[0] <= now < _run_explicit[0] + loop_interval:
|
||||
run = True
|
||||
data['_next_fire_time'] = _run_explicit[0]
|
||||
|
||||
|
@ -940,24 +952,25 @@ class Schedule(object):
|
|||
|
||||
elif 'once' in data:
|
||||
if data['_next_fire_time']:
|
||||
if data['_next_fire_time'] < now - datetime.timedelta(seconds=self.opts['loop_interval']) or \
|
||||
if data['_next_fire_time'] < now - loop_interval or \
|
||||
data['_next_fire_time'] > now and \
|
||||
not data['_splay']:
|
||||
continue
|
||||
|
||||
if not data['_next_fire_time'] and \
|
||||
not data['_splay']:
|
||||
once_fmt = data.get('once_fmt', '%Y-%m-%dT%H:%M:%S')
|
||||
try:
|
||||
once = datetime.datetime.strptime(data['once'],
|
||||
once_fmt)
|
||||
except (TypeError, ValueError):
|
||||
log.error('Date string could not be parsed: %s, %s',
|
||||
data['once'], once_fmt)
|
||||
continue
|
||||
if not data['_next_fire_time'] and not data['_splay']:
|
||||
once = data['once']
|
||||
if not isinstance(once, datetime.datetime):
|
||||
once_fmt = data.get('once_fmt', '%Y-%m-%dT%H:%M:%S')
|
||||
try:
|
||||
once = datetime.datetime.strptime(data['once'],
|
||||
once_fmt)
|
||||
except (TypeError, ValueError):
|
||||
log.error('Date string could not be parsed: %s, %s',
|
||||
data['once'], once_fmt)
|
||||
continue
|
||||
# If _next_fire_time is less than now or greater
|
||||
# than now, continue.
|
||||
if once < now - datetime.timedelta(seconds=self.opts['loop_interval']):
|
||||
if once < now - loop_interval:
|
||||
continue
|
||||
else:
|
||||
data['_next_fire_time'] = once
|
||||
|
@ -1017,7 +1030,7 @@ class Schedule(object):
|
|||
# Copy the list so we can loop through it
|
||||
for i in copy.deepcopy(_when):
|
||||
if len(_when) > 1:
|
||||
if i < now - datetime.timedelta(seconds=self.opts['loop_interval']):
|
||||
if i < now - loop_interval:
|
||||
# Remove all missed schedules except the latest one.
|
||||
# We need it to detect if it was triggered previously.
|
||||
_when.remove(i)
|
||||
|
@ -1029,7 +1042,7 @@ class Schedule(object):
|
|||
|
||||
if '_run' not in data:
|
||||
# Prevent run of jobs from the past
|
||||
data['_run'] = bool(when >= now - datetime.timedelta(seconds=self.opts['loop_interval']))
|
||||
data['_run'] = bool(when >= now - loop_interval)
|
||||
|
||||
if not data['_next_fire_time']:
|
||||
data['_next_fire_time'] = when
|
||||
|
@ -1071,13 +1084,15 @@ class Schedule(object):
|
|||
log.error('Invalid date string. Ignoring')
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
when = dateutil_parser.parse(data['when'])
|
||||
except ValueError:
|
||||
log.error('Invalid date string. Ignoring')
|
||||
continue
|
||||
when = data['when']
|
||||
if not isinstance(when, datetime.datetime):
|
||||
try:
|
||||
when = dateutil_parser.parse(when)
|
||||
except ValueError:
|
||||
log.error('Invalid date string. Ignoring')
|
||||
continue
|
||||
|
||||
if when < now - datetime.timedelta(seconds=self.opts['loop_interval']) and \
|
||||
if when < now - loop_interval and \
|
||||
not data.get('_run', False) and \
|
||||
not run and \
|
||||
not data['_splay']:
|
||||
|
@ -1119,11 +1134,10 @@ class Schedule(object):
|
|||
interval = (now - data['_next_fire_time']).total_seconds()
|
||||
if interval >= 60 and interval < self.loop_interval:
|
||||
self.loop_interval = interval
|
||||
|
||||
else:
|
||||
continue
|
||||
|
||||
seconds = (data['_next_fire_time'] - now).total_seconds()
|
||||
seconds = int((data['_next_fire_time'] - now).total_seconds())
|
||||
|
||||
if 'splay' in data:
|
||||
# Got "splay" configured, make decision to run a job based on that
|
||||
|
@ -1151,7 +1165,7 @@ class Schedule(object):
|
|||
if seconds <= 0:
|
||||
run = True
|
||||
elif 'when' in data and data['_run']:
|
||||
if data['_next_fire_time'] <= now <= (data['_next_fire_time'] + datetime.timedelta(seconds=self.opts['loop_interval'])):
|
||||
if data['_next_fire_time'] <= now <= (data['_next_fire_time'] + loop_interval):
|
||||
data['_run'] = False
|
||||
run = True
|
||||
elif 'cron' in data:
|
||||
|
@ -1161,7 +1175,7 @@ class Schedule(object):
|
|||
data['_next_fire_time'] = None
|
||||
run = True
|
||||
elif 'once' in data:
|
||||
if data['_next_fire_time'] <= now <= (data['_next_fire_time'] + datetime.timedelta(seconds=self.opts['loop_interval'])):
|
||||
if data['_next_fire_time'] <= now <= (data['_next_fire_time'] + loop_interval):
|
||||
run = True
|
||||
elif seconds == 0:
|
||||
run = True
|
||||
|
@ -1257,7 +1271,7 @@ class Schedule(object):
|
|||
data['run_explicit'] = []
|
||||
# Add a run_explicit for immediately after the
|
||||
# skip_during_range ends
|
||||
_run_immediate = (end + datetime.timedelta(seconds=self.opts['loop_interval'])).strftime('%Y-%m-%dT%H:%M:%S')
|
||||
_run_immediate = (end + loop_interval).strftime('%Y-%m-%dT%H:%M:%S')
|
||||
if _run_immediate not in data['run_explicit']:
|
||||
data['run_explicit'].append({'time': _run_immediate,
|
||||
'time_fmt': '%Y-%m-%dT%H:%M:%S'})
|
||||
|
@ -1292,18 +1306,19 @@ class Schedule(object):
|
|||
if 'skip_explicit' in data:
|
||||
_skip_explicit = []
|
||||
for _skip_time in data['skip_explicit']:
|
||||
_skip_explicit.append(datetime.datetime.strptime(_skip_time['time'],
|
||||
_skip_time['time_fmt']))
|
||||
if isinstance(_skip_explicit, six.string_types):
|
||||
_skip_explicit = [_skip_explicit]
|
||||
if isinstance(_skip_time, datetime.datetime):
|
||||
_skip_explicit.append(_skip_time)
|
||||
else:
|
||||
_skip_explicit.append(datetime.datetime.strptime(_skip_time['time'],
|
||||
_skip_time['time_fmt']))
|
||||
|
||||
# Copy the list so we can loop through it
|
||||
for i in copy.deepcopy(_skip_explicit):
|
||||
if i < now - datetime.timedelta(seconds=self.opts['loop_interval']):
|
||||
if i < now - loop_interval:
|
||||
_skip_explicit.remove(i)
|
||||
|
||||
if _skip_explicit:
|
||||
if _skip_explicit[0] <= now <= (_skip_explicit[0] + datetime.timedelta(seconds=self.opts['loop_interval'])):
|
||||
if _skip_explicit[0] <= now <= (_skip_explicit[0] + loop_interval):
|
||||
if self.skip_function:
|
||||
run = True
|
||||
func = self.skip_function
|
||||
|
|
|
@ -193,10 +193,10 @@ class SSDPFactory(SSDPBase):
|
|||
|
||||
self.log.debug('Received "%s" from %s:%s', message, *addr)
|
||||
self._sendto(
|
||||
str('{0}:@:{1}').format( # future lint: disable=blacklisted-function
|
||||
salt.utils.stringutils.to_bytes(str('{0}:@:{1}').format( # future lint: disable=blacklisted-function
|
||||
self.signature,
|
||||
salt.utils.json.dumps(self.answer, _json_module=_json)
|
||||
),
|
||||
)),
|
||||
addr
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -16,6 +16,7 @@ import sys
|
|||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.utils.decorators.jinja import jinja_filter
|
||||
|
||||
|
@ -55,12 +56,13 @@ def get_user():
|
|||
Get the current user
|
||||
'''
|
||||
if HAS_PWD:
|
||||
return pwd.getpwuid(os.geteuid()).pw_name
|
||||
ret = pwd.getpwuid(os.geteuid()).pw_name
|
||||
elif HAS_WIN_FUNCTIONS and salt.utils.win_functions.HAS_WIN32:
|
||||
return salt.utils.win_functions.get_current_user()
|
||||
ret = salt.utils.win_functions.get_current_user()
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
'Required external library (pwd or win32api) not installed')
|
||||
return salt.utils.stringutils.to_unicode(ret)
|
||||
|
||||
|
||||
@jinja_filter('get_uid')
|
||||
|
|
|
@ -26,10 +26,15 @@ class SSHMasterTestCase(ModuleCase):
|
|||
def test_service(self):
|
||||
service = 'cron'
|
||||
os_family = self.run_function('grains.get', ['os_family'], minion_tgt='localhost')
|
||||
os_release = self.run_function('grains.get', ['osrelease'], minion_tgt='localhost')
|
||||
if os_family == 'RedHat':
|
||||
service = 'crond'
|
||||
elif os_family == 'Arch':
|
||||
service = 'sshd'
|
||||
elif os_family == 'MacOS':
|
||||
service = 'org.ntp.ntpd'
|
||||
if int(os_release.split('.')[1]) >= 13:
|
||||
service = 'com.apple.AirPlayXPCHelper'
|
||||
ret = self.run_function('service.get_all', minion_tgt='localhost')
|
||||
self.assertIn(service, ret)
|
||||
self.run_function('service.stop', [service], minion_tgt='localhost')
|
||||
|
@ -40,8 +45,12 @@ class SSHMasterTestCase(ModuleCase):
|
|||
self.assertTrue(ret)
|
||||
|
||||
def test_grains_items(self):
|
||||
os_family = self.run_function('grains.get', ['os_family'], minion_tgt='localhost')
|
||||
ret = self.run_function('grains.items', minion_tgt='localhost')
|
||||
self.assertEqual(ret['kernel'], 'Linux')
|
||||
if os_family == 'MacOS':
|
||||
self.assertEqual(ret['kernel'], 'Darwin')
|
||||
else:
|
||||
self.assertEqual(ret['kernel'], 'Linux')
|
||||
|
||||
def test_state_apply(self):
|
||||
ret = self.run_function('state.apply', ['core'], minion_tgt='localhost')
|
||||
|
|
|
@ -27,6 +27,7 @@ import salt.utils.files
|
|||
import salt.utils.network
|
||||
import salt.utils.platform
|
||||
import salt.utils.yaml
|
||||
from salt.ext import six
|
||||
from salt.syspaths import CONFIG_DIR
|
||||
from salt import config as sconfig
|
||||
from salt.exceptions import (
|
||||
|
@ -604,6 +605,91 @@ class ConfigTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
self.assertEqual(syndic_opts['_master_conf_file'], minion_conf_path)
|
||||
self.assertEqual(syndic_opts['_minion_conf_file'], syndic_conf_path)
|
||||
|
||||
def _get_tally(self, conf_func):
|
||||
'''
|
||||
This ensures that any strings which are loaded are unicode strings
|
||||
'''
|
||||
tally = {}
|
||||
|
||||
def _count_strings(config):
|
||||
if isinstance(config, dict):
|
||||
for key, val in six.iteritems(config):
|
||||
log.debug('counting strings in dict key: %s', key)
|
||||
log.debug('counting strings in dict val: %s', val)
|
||||
_count_strings(key)
|
||||
_count_strings(val)
|
||||
elif isinstance(config, list):
|
||||
log.debug('counting strings in list: %s', config)
|
||||
for item in config:
|
||||
_count_strings(item)
|
||||
else:
|
||||
if isinstance(config, six.string_types):
|
||||
if isinstance(config, six.text_type):
|
||||
tally['unicode'] = tally.get('unicode', 0) + 1
|
||||
else:
|
||||
# We will never reach this on PY3
|
||||
tally.setdefault('non_unicode', []).append(config)
|
||||
|
||||
fpath = salt.utils.files.mkstemp(dir=TMP)
|
||||
try:
|
||||
with salt.utils.files.fopen(fpath, 'w') as wfh:
|
||||
wfh.write(textwrap.dedent('''
|
||||
foo: bar
|
||||
mylist:
|
||||
- somestring
|
||||
- 9
|
||||
- 123.456
|
||||
- True
|
||||
- nested:
|
||||
- key: val
|
||||
- nestedlist:
|
||||
- foo
|
||||
- bar
|
||||
- baz
|
||||
mydict:
|
||||
- somestring: 9
|
||||
- 123.456: 789
|
||||
- True: False
|
||||
- nested:
|
||||
- key: val
|
||||
- nestedlist:
|
||||
- foo
|
||||
- bar
|
||||
- baz'''))
|
||||
if conf_func is sconfig.master_config:
|
||||
wfh.write('\n\n')
|
||||
wfh.write(textwrap.dedent('''
|
||||
rest_cherrypy:
|
||||
port: 8000
|
||||
disable_ssl: True
|
||||
app_path: /beacon_demo
|
||||
app: /srv/web/html/index.html
|
||||
static: /srv/web/static'''))
|
||||
config = conf_func(fpath)
|
||||
_count_strings(config)
|
||||
return tally
|
||||
finally:
|
||||
if os.path.isfile(fpath):
|
||||
os.unlink(fpath)
|
||||
|
||||
def test_conf_file_strings_are_unicode_for_master(self):
|
||||
'''
|
||||
This ensures that any strings which are loaded are unicode strings
|
||||
'''
|
||||
tally = self._get_tally(sconfig.master_config)
|
||||
non_unicode = tally.get('non_unicode', [])
|
||||
self.assertEqual(len(non_unicode), 8 if six.PY2 else 0, non_unicode)
|
||||
self.assertTrue(tally['unicode'] > 0)
|
||||
|
||||
def test_conf_file_strings_are_unicode_for_minion(self):
|
||||
'''
|
||||
This ensures that any strings which are loaded are unicode strings
|
||||
'''
|
||||
tally = self._get_tally(sconfig.minion_config)
|
||||
non_unicode = tally.get('non_unicode', [])
|
||||
self.assertEqual(len(non_unicode), 0, non_unicode)
|
||||
self.assertTrue(tally['unicode'] > 0)
|
||||
|
||||
# <---- Salt Cloud Configuration Tests ---------------------------------------------
|
||||
|
||||
# cloud_config tests
|
||||
|
|
47
tests/unit/ssh/test_ssh.py
Normal file
47
tests/unit/ssh/test_ssh.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Daniel Wallace <dwallace@saltstack.com`
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.case import ShellCase
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, MagicMock
|
||||
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
from salt.client import ssh
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SSHPasswordTests(ShellCase):
|
||||
def test_password_failure(self):
|
||||
'''
|
||||
Check password failures when trying to deploy keys
|
||||
'''
|
||||
opts = salt.config.client_config(self.get_config_file_path('master'))
|
||||
opts['list_hosts'] = False
|
||||
opts['argv'] = ['test.ping']
|
||||
opts['selected_target_option'] = 'glob'
|
||||
opts['tgt'] = 'localhost'
|
||||
opts['arg'] = []
|
||||
roster = os.path.join(self.get_config_dir(), 'roster')
|
||||
handle_ssh_ret = [
|
||||
{'localhost': {'retcode': 255, 'stderr': u'Permission denied (publickey).\r\n', 'stdout': ''}},
|
||||
]
|
||||
expected = {'localhost': 'Permission denied (publickey)'}
|
||||
display_output = MagicMock()
|
||||
with patch('salt.roster.get_roster_file', MagicMock(return_value=roster)), \
|
||||
patch('salt.client.ssh.SSH.handle_ssh', MagicMock(return_value=handle_ssh_ret)), \
|
||||
patch('salt.client.ssh.SSH.key_deploy', MagicMock(return_value=expected)), \
|
||||
patch('salt.output.display_output', display_output):
|
||||
client = ssh.SSH(opts)
|
||||
ret = next(client.run_iter())
|
||||
with self.assertRaises(SystemExit):
|
||||
client.run()
|
||||
display_output.assert_called_once_with(expected, 'nested', opts)
|
||||
self.assertIs(ret, handle_ssh_ret[0])
|
|
@ -7,24 +7,24 @@
|
|||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
'''
|
||||
|
||||
# Import Salt libs
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import time
|
||||
import errno
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.helpers import MockWraps
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import salt libs
|
||||
import salt.payload
|
||||
# Import Salt libs
|
||||
from salt.utils import immutabletypes
|
||||
from salt.utils.odict import OrderedDict
|
||||
import salt.exceptions
|
||||
import salt.payload
|
||||
|
||||
# Import 3rd-party libs
|
||||
import msgpack
|
||||
import zmq
|
||||
from salt.ext import six
|
||||
|
||||
|
@ -49,15 +49,109 @@ class PayloadTestCase(TestCase):
|
|||
self.assertNoOrderedDict(chunk)
|
||||
|
||||
def test_list_nested_odicts(self):
|
||||
with patch('msgpack.version', (0, 1, 13)):
|
||||
msgpack.dumps = MockWraps(
|
||||
msgpack.dumps, 1, TypeError('ODict TypeError Forced')
|
||||
)
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'pillar': [OrderedDict(environment='dev')]}
|
||||
odata = payload.loads(payload.dumps(idata.copy()))
|
||||
self.assertNoOrderedDict(odata)
|
||||
self.assertEqual(idata, odata)
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'pillar': [OrderedDict(environment='dev')]}
|
||||
odata = payload.loads(payload.dumps(idata.copy()))
|
||||
self.assertNoOrderedDict(odata)
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_datetime_dump_load(self):
|
||||
'''
|
||||
Check the custom datetime handler can understand itself
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
|
||||
idata = {dtvalue: dtvalue}
|
||||
sdata = payload.dumps(idata.copy())
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual(
|
||||
sdata,
|
||||
b'\x81\xc7\x18N20010203T04:05:06.000007\xc7\x18N20010203T04:05:06.000007')
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_verylong_dump_load(self):
|
||||
'''
|
||||
Test verylong encoder/decoder
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'jid': 20180227140750302662}
|
||||
sdata = payload.dumps(idata.copy())
|
||||
odata = payload.loads(sdata)
|
||||
idata['jid'] = '{0}'.format(idata['jid'])
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_immutable_dict_dump_load(self):
|
||||
'''
|
||||
Test immutable dict encoder/decoder
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'dict': {'key': 'value'}}
|
||||
sdata = payload.dumps({'dict': immutabletypes.ImmutableDict(idata['dict'])})
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_immutable_list_dump_load(self):
|
||||
'''
|
||||
Test immutable list encoder/decoder
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'list': [1, 2, 3]}
|
||||
sdata = payload.dumps({'list': immutabletypes.ImmutableList(idata['list'])})
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_immutable_set_dump_load(self):
|
||||
'''
|
||||
Test immutable set encoder/decoder
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
idata = {'set': ['red', 'green', 'blue']}
|
||||
sdata = payload.dumps({'set': immutabletypes.ImmutableSet(idata['set'])})
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual(idata, odata)
|
||||
|
||||
def test_odict_dump_load(self):
|
||||
'''
|
||||
Test odict just works. It wasn't until msgpack 0.2.0
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
data = OrderedDict()
|
||||
data['a'] = 'b'
|
||||
data['y'] = 'z'
|
||||
data['j'] = 'k'
|
||||
data['w'] = 'x'
|
||||
sdata = payload.dumps({'set': data})
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual({'set': dict(data)}, odata)
|
||||
|
||||
def test_mixed_dump_load(self):
|
||||
'''
|
||||
Test we can handle all exceptions at once
|
||||
'''
|
||||
payload = salt.payload.Serial('msgpack')
|
||||
dtvalue = datetime.datetime(2001, 2, 3, 4, 5, 6, 7)
|
||||
od = OrderedDict()
|
||||
od['a'] = 'b'
|
||||
od['y'] = 'z'
|
||||
od['j'] = 'k'
|
||||
od['w'] = 'x'
|
||||
idata = {dtvalue: dtvalue, # datetime
|
||||
'jid': 20180227140750302662, # long int
|
||||
'dict': immutabletypes.ImmutableDict({'key': 'value'}), # immutable dict
|
||||
'list': immutabletypes.ImmutableList([1, 2, 3]), # immutable list
|
||||
'set': immutabletypes.ImmutableSet(('red', 'green', 'blue')), # immutable set
|
||||
'odict': od, # odict
|
||||
}
|
||||
edata = {dtvalue: dtvalue, # datetime, == input
|
||||
'jid': '20180227140750302662', # string repr of long int
|
||||
'dict': {'key': 'value'}, # builtin dict
|
||||
'list': [1, 2, 3], # builtin list
|
||||
'set': ['red', 'green', 'blue'], # builtin set
|
||||
'odict': dict(od), # builtin dict
|
||||
}
|
||||
sdata = payload.dumps(idata)
|
||||
odata = payload.loads(sdata)
|
||||
self.assertEqual(edata, odata)
|
||||
|
||||
|
||||
class SREQTestCase(TestCase):
|
||||
|
|
|
@ -305,7 +305,7 @@ class SSDPFactoryTestCase(TestCase):
|
|||
assert factory.log.debug.called
|
||||
assert factory.disable_hidden
|
||||
assert factory._sendto.called
|
||||
assert factory._sendto.call_args[0][0] == "{}:@:{{}}".format(signature)
|
||||
assert factory._sendto.call_args[0][0] == salt.utils.stringutils.to_bytes("{}:@:{{}}".format(signature))
|
||||
assert 'Received "%s" from %s:%s' in factory.log.debug.call_args[0][0]
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue