mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Roll back use of explicit unicode literals
This commit is contained in:
parent
374d1ef4f8
commit
272044c688
66 changed files with 6380 additions and 6362 deletions
|
@ -7,7 +7,6 @@ Salt package
|
|||
from __future__ import absolute_import
|
||||
import warnings
|
||||
|
||||
# future lint: disable=non-unicode-string
|
||||
# All salt related deprecation warnings should be shown once each!
|
||||
warnings.filterwarnings(
|
||||
'once', # Show once
|
||||
|
@ -15,19 +14,18 @@ warnings.filterwarnings(
|
|||
DeprecationWarning, # This filter is for DeprecationWarnings
|
||||
r'^(salt|salt\.(.*))$' # Match module(s) 'salt' and 'salt.<whatever>'
|
||||
)
|
||||
# future lint: enable=non-unicode-string
|
||||
|
||||
# While we are supporting Python2.6, hide nested with-statements warnings
|
||||
warnings.filterwarnings(
|
||||
u'ignore',
|
||||
u'With-statements now directly support multiple context managers',
|
||||
'ignore',
|
||||
'With-statements now directly support multiple context managers',
|
||||
DeprecationWarning
|
||||
)
|
||||
|
||||
# Filter the backports package UserWarning about being re-imported
|
||||
warnings.filterwarnings(
|
||||
u'ignore',
|
||||
u'^Module backports was already imported from (.*), but (.*) is being added to sys.path$',
|
||||
'ignore',
|
||||
'^Module backports was already imported from (.*), but (.*) is being added to sys.path$',
|
||||
UserWarning
|
||||
)
|
||||
|
||||
|
@ -39,7 +37,7 @@ def __define_global_system_encoding_variable__():
|
|||
# and reset to None
|
||||
encoding = None
|
||||
|
||||
if not sys.platform.startswith(u'win') and sys.stdin is not None:
|
||||
if not sys.platform.startswith('win') and sys.stdin is not None:
|
||||
# On linux we can rely on sys.stdin for the encoding since it
|
||||
# most commonly matches the filesystem encoding. This however
|
||||
# does not apply to windows
|
||||
|
@ -65,16 +63,16 @@ def __define_global_system_encoding_variable__():
|
|||
# the way back to ascii
|
||||
encoding = sys.getdefaultencoding()
|
||||
if not encoding:
|
||||
if sys.platform.startswith(u'darwin'):
|
||||
if sys.platform.startswith('darwin'):
|
||||
# Mac OS X uses UTF-8
|
||||
encoding = u'utf-8'
|
||||
elif sys.platform.startswith(u'win'):
|
||||
encoding = 'utf-8'
|
||||
elif sys.platform.startswith('win'):
|
||||
# Windows uses a configurable encoding; on Windows, Python uses the name “mbcs”
|
||||
# to refer to whatever the currently configured encoding is.
|
||||
encoding = u'mbcs'
|
||||
encoding = 'mbcs'
|
||||
else:
|
||||
# On linux default to ascii as a last resort
|
||||
encoding = u'ascii'
|
||||
encoding = 'ascii'
|
||||
|
||||
# We can't use six.moves.builtins because these builtins get deleted sooner
|
||||
# than expected. See:
|
||||
|
@ -85,7 +83,7 @@ def __define_global_system_encoding_variable__():
|
|||
import builtins # pylint: disable=import-error
|
||||
|
||||
# Define the detected encoding as a built-in variable for ease of use
|
||||
setattr(builtins, u'__salt_system_encoding__', encoding)
|
||||
setattr(builtins, '__salt_system_encoding__', encoding)
|
||||
|
||||
# This is now garbage collectable
|
||||
del sys
|
||||
|
|
|
@ -46,7 +46,7 @@ else:
|
|||
|
||||
|
||||
if HAS_XML:
|
||||
if not hasattr(ElementTree, u'ParseError'):
|
||||
if not hasattr(ElementTree, 'ParseError'):
|
||||
class ParseError(Exception):
|
||||
'''
|
||||
older versions of ElementTree do not have ParseError
|
||||
|
@ -56,7 +56,7 @@ if HAS_XML:
|
|||
ElementTree.ParseError = ParseError
|
||||
|
||||
|
||||
def text_(s, encoding=u'latin-1', errors=u'strict'):
|
||||
def text_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
If ``s`` is an instance of ``binary_type``, return
|
||||
``s.decode(encoding, errors)``, otherwise return ``s``
|
||||
|
@ -66,7 +66,7 @@ def text_(s, encoding=u'latin-1', errors=u'strict'):
|
|||
return s
|
||||
|
||||
|
||||
def bytes_(s, encoding=u'latin-1', errors=u'strict'):
|
||||
def bytes_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(encoding, errors)``, otherwise return ``s``
|
||||
|
@ -79,25 +79,25 @@ def bytes_(s, encoding=u'latin-1', errors=u'strict'):
|
|||
if PY3:
|
||||
def ascii_native_(s):
|
||||
if isinstance(s, text_type):
|
||||
s = s.encode(u'ascii')
|
||||
return str(s, u'ascii', u'strict')
|
||||
s = s.encode('ascii')
|
||||
return str(s, 'ascii', 'strict')
|
||||
else:
|
||||
def ascii_native_(s):
|
||||
if isinstance(s, text_type):
|
||||
s = s.encode(u'ascii')
|
||||
s = s.encode('ascii')
|
||||
return str(s)
|
||||
|
||||
ascii_native_.__doc__ = '''
|
||||
Python 3: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(u'ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
|
||||
``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
|
||||
|
||||
Python 2: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(u'ascii')``, otherwise return ``str(s)``
|
||||
''' # future lint: disable=non-unicode-string
|
||||
``s.encode('ascii')``, otherwise return ``str(s)``
|
||||
'''
|
||||
|
||||
|
||||
if PY3:
|
||||
def native_(s, encoding=u'latin-1', errors=u'strict'):
|
||||
def native_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
If ``s`` is an instance of ``text_type``, return
|
||||
``s``, otherwise return ``str(s, encoding, errors)``
|
||||
|
@ -106,7 +106,7 @@ if PY3:
|
|||
return s
|
||||
return str(s, encoding, errors)
|
||||
else:
|
||||
def native_(s, encoding=u'latin-1', errors=u'strict'):
|
||||
def native_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(encoding, errors)``, otherwise return ``str(s)``
|
||||
|
@ -121,7 +121,7 @@ return ``str(s, encoding, errors)``
|
|||
|
||||
Python 2: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(encoding, errors)``, otherwise return ``str(s)``
|
||||
''' # future lint: disable=non-unicode-string
|
||||
'''
|
||||
|
||||
|
||||
def string_io(data=None): # cStringIO can't handle unicode
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -38,7 +38,7 @@ def tokenify(cmd, token=None):
|
|||
Otherwise return cmd
|
||||
'''
|
||||
if token is not None:
|
||||
cmd[u'token'] = token
|
||||
cmd['token'] = token
|
||||
return cmd
|
||||
|
||||
|
||||
|
@ -51,19 +51,19 @@ class APIClient(object):
|
|||
if not opts:
|
||||
opts = salt.config.client_config(
|
||||
os.environ.get(
|
||||
u'SALT_MASTER_CONFIG',
|
||||
os.path.join(syspaths.CONFIG_DIR, u'master')
|
||||
'SALT_MASTER_CONFIG',
|
||||
os.path.join(syspaths.CONFIG_DIR, 'master')
|
||||
)
|
||||
)
|
||||
self.opts = opts
|
||||
self.localClient = salt.client.get_local_client(self.opts[u'conf_file'])
|
||||
self.localClient = salt.client.get_local_client(self.opts['conf_file'])
|
||||
self.runnerClient = salt.runner.RunnerClient(self.opts)
|
||||
self.wheelClient = salt.wheel.Wheel(self.opts)
|
||||
self.resolver = salt.auth.Resolver(self.opts)
|
||||
self.event = salt.utils.event.get_event(
|
||||
u'master',
|
||||
self.opts[u'sock_dir'],
|
||||
self.opts[u'transport'],
|
||||
'master',
|
||||
self.opts['sock_dir'],
|
||||
self.opts['transport'],
|
||||
opts=self.opts,
|
||||
listen=listen)
|
||||
|
||||
|
@ -119,20 +119,20 @@ class APIClient(object):
|
|||
|
||||
'''
|
||||
cmd = dict(cmd) # make copy
|
||||
client = u'minion' # default to local minion client
|
||||
mode = cmd.get(u'mode', u'async') # default to 'async'
|
||||
client = 'minion' # default to local minion client
|
||||
mode = cmd.get('mode', 'async') # default to 'async'
|
||||
|
||||
# check for wheel or runner prefix to fun name to use wheel or runner client
|
||||
funparts = cmd.get(u'fun', u'').split(u'.')
|
||||
if len(funparts) > 2 and funparts[0] in [u'wheel', u'runner']: # master
|
||||
funparts = cmd.get('fun', '').split('.')
|
||||
if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: # master
|
||||
client = funparts[0]
|
||||
cmd[u'fun'] = u'.'.join(funparts[1:]) # strip prefix
|
||||
cmd['fun'] = '.'.join(funparts[1:]) # strip prefix
|
||||
|
||||
if not (u'token' in cmd or
|
||||
(u'eauth' in cmd and u'password' in cmd and u'username' in cmd)):
|
||||
raise EauthAuthenticationError(u'No authentication credentials given')
|
||||
if not ('token' in cmd or
|
||||
('eauth' in cmd and 'password' in cmd and 'username' in cmd)):
|
||||
raise EauthAuthenticationError('No authentication credentials given')
|
||||
|
||||
executor = getattr(self, u'{0}_{1}'.format(client, mode))
|
||||
executor = getattr(self, '{0}_{1}'.format(client, mode))
|
||||
result = executor(**cmd)
|
||||
return result
|
||||
|
||||
|
@ -205,9 +205,9 @@ class APIClient(object):
|
|||
|
||||
Adds client per the command.
|
||||
'''
|
||||
cmd[u'client'] = u'minion'
|
||||
if len(cmd[u'module'].split(u'.')) > 2 and cmd[u'module'].split(u'.')[0] in [u'runner', u'wheel']:
|
||||
cmd[u'client'] = u'master'
|
||||
cmd['client'] = 'minion'
|
||||
if len(cmd['module'].split('.')) > 2 and cmd['module'].split('.')[0] in ['runner', 'wheel']:
|
||||
cmd['client'] = 'master'
|
||||
return self._signature(cmd)
|
||||
|
||||
def _signature(self, cmd):
|
||||
|
@ -217,20 +217,20 @@ class APIClient(object):
|
|||
'''
|
||||
result = {}
|
||||
|
||||
client = cmd.get(u'client', u'minion')
|
||||
if client == u'minion':
|
||||
cmd[u'fun'] = u'sys.argspec'
|
||||
cmd[u'kwarg'] = dict(module=cmd[u'module'])
|
||||
client = cmd.get('client', 'minion')
|
||||
if client == 'minion':
|
||||
cmd['fun'] = 'sys.argspec'
|
||||
cmd['kwarg'] = dict(module=cmd['module'])
|
||||
result = self.run(cmd)
|
||||
elif client == u'master':
|
||||
parts = cmd[u'module'].split(u'.')
|
||||
elif client == 'master':
|
||||
parts = cmd['module'].split('.')
|
||||
client = parts[0]
|
||||
module = u'.'.join(parts[1:]) # strip prefix
|
||||
if client == u'wheel':
|
||||
module = '.'.join(parts[1:]) # strip prefix
|
||||
if client == 'wheel':
|
||||
functions = self.wheelClient.functions
|
||||
elif client == u'runner':
|
||||
elif client == 'runner':
|
||||
functions = self.runnerClient.functions
|
||||
result = {u'master': salt.utils.args.argspec_report(functions, module)}
|
||||
result = {'master': salt.utils.args.argspec_report(functions, module)}
|
||||
return result
|
||||
|
||||
def create_token(self, creds):
|
||||
|
@ -275,20 +275,20 @@ class APIClient(object):
|
|||
tokenage = self.resolver.mk_token(creds)
|
||||
except Exception as ex:
|
||||
raise EauthAuthenticationError(
|
||||
u"Authentication failed with {0}.".format(repr(ex)))
|
||||
"Authentication failed with {0}.".format(repr(ex)))
|
||||
|
||||
if u'token' not in tokenage:
|
||||
raise EauthAuthenticationError(u"Authentication failed with provided credentials.")
|
||||
if 'token' not in tokenage:
|
||||
raise EauthAuthenticationError("Authentication failed with provided credentials.")
|
||||
|
||||
# Grab eauth config for the current backend for the current user
|
||||
tokenage_eauth = self.opts[u'external_auth'][tokenage[u'eauth']]
|
||||
if tokenage[u'name'] in tokenage_eauth:
|
||||
tokenage[u'perms'] = tokenage_eauth[tokenage[u'name']]
|
||||
tokenage_eauth = self.opts['external_auth'][tokenage['eauth']]
|
||||
if tokenage['name'] in tokenage_eauth:
|
||||
tokenage['perms'] = tokenage_eauth[tokenage['name']]
|
||||
else:
|
||||
tokenage[u'perms'] = tokenage_eauth[u'*']
|
||||
tokenage['perms'] = tokenage_eauth['*']
|
||||
|
||||
tokenage[u'user'] = tokenage[u'name']
|
||||
tokenage[u'username'] = tokenage[u'name']
|
||||
tokenage['user'] = tokenage['name']
|
||||
tokenage['username'] = tokenage['name']
|
||||
|
||||
return tokenage
|
||||
|
||||
|
@ -301,11 +301,11 @@ class APIClient(object):
|
|||
result = self.resolver.get_token(token)
|
||||
except Exception as ex:
|
||||
raise EauthAuthenticationError(
|
||||
u"Token validation failed with {0}.".format(repr(ex)))
|
||||
"Token validation failed with {0}.".format(repr(ex)))
|
||||
|
||||
return result
|
||||
|
||||
def get_event(self, wait=0.25, tag=u'', full=False):
|
||||
def get_event(self, wait=0.25, tag='', full=False):
|
||||
'''
|
||||
Get a single salt event.
|
||||
If no events are available, then block for up to ``wait`` seconds.
|
||||
|
@ -323,4 +323,4 @@ class APIClient(object):
|
|||
Need to convert this to a master call with appropriate authentication
|
||||
|
||||
'''
|
||||
return self.event.fire_event(data, salt.utils.event.tagify(tag, u'wui'))
|
||||
return self.event.fire_event(data, salt.utils.event.tagify(tag, 'wui'))
|
||||
|
|
|
@ -38,18 +38,18 @@ import tornado.stack_context
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
CLIENT_INTERNAL_KEYWORDS = frozenset([
|
||||
u'client',
|
||||
u'cmd',
|
||||
u'eauth',
|
||||
u'fun',
|
||||
u'kwarg',
|
||||
u'match',
|
||||
u'token',
|
||||
u'__jid__',
|
||||
u'__tag__',
|
||||
u'__user__',
|
||||
u'username',
|
||||
u'password'
|
||||
'client',
|
||||
'cmd',
|
||||
'eauth',
|
||||
'fun',
|
||||
'kwarg',
|
||||
'match',
|
||||
'token',
|
||||
'__jid__',
|
||||
'__tag__',
|
||||
'__user__',
|
||||
'username',
|
||||
'password'
|
||||
])
|
||||
|
||||
|
||||
|
@ -81,9 +81,9 @@ class ClientFuncsDict(collections.MutableMapping):
|
|||
raise KeyError
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
low = {u'fun': key,
|
||||
u'args': args,
|
||||
u'kwargs': kwargs,
|
||||
low = {'fun': key,
|
||||
'args': args,
|
||||
'kwargs': kwargs,
|
||||
}
|
||||
pub_data = {}
|
||||
# Copy kwargs keys so we can iterate over and pop the pub data
|
||||
|
@ -91,18 +91,18 @@ class ClientFuncsDict(collections.MutableMapping):
|
|||
|
||||
# pull out pub_data if you have it
|
||||
for kwargs_key in kwargs_keys:
|
||||
if kwargs_key.startswith(u'__pub_'):
|
||||
if kwargs_key.startswith('__pub_'):
|
||||
pub_data[kwargs_key] = kwargs.pop(kwargs_key)
|
||||
|
||||
async_pub = self.client._gen_async_pub(pub_data.get(u'__pub_jid'))
|
||||
async_pub = self.client._gen_async_pub(pub_data.get('__pub_jid'))
|
||||
|
||||
user = salt.utils.user.get_specific_user()
|
||||
return self.client._proc_function(
|
||||
key,
|
||||
low,
|
||||
user,
|
||||
async_pub[u'tag'], # TODO: fix
|
||||
async_pub[u'jid'], # TODO: fix
|
||||
async_pub['tag'], # TODO: fix
|
||||
async_pub['jid'], # TODO: fix
|
||||
False, # Don't daemonize
|
||||
)
|
||||
return wrapper
|
||||
|
@ -133,14 +133,14 @@ class SyncClientMixin(object):
|
|||
Execute a function through the master network interface.
|
||||
'''
|
||||
load = kwargs
|
||||
load[u'cmd'] = self.client
|
||||
load['cmd'] = self.client
|
||||
channel = salt.transport.Channel.factory(self.opts,
|
||||
crypt=u'clear',
|
||||
usage=u'master_call')
|
||||
crypt='clear',
|
||||
usage='master_call')
|
||||
ret = channel.send(load)
|
||||
if isinstance(ret, collections.Mapping):
|
||||
if u'error' in ret:
|
||||
salt.utils.error.raise_error(**ret[u'error'])
|
||||
if 'error' in ret:
|
||||
salt.utils.error.raise_error(**ret['error'])
|
||||
return ret
|
||||
|
||||
def cmd_sync(self, low, timeout=None, full_return=False):
|
||||
|
@ -159,19 +159,19 @@ class SyncClientMixin(object):
|
|||
'eauth': 'pam',
|
||||
})
|
||||
'''
|
||||
event = salt.utils.event.get_master_event(self.opts, self.opts[u'sock_dir'], listen=True)
|
||||
event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=True)
|
||||
job = self.master_call(**low)
|
||||
ret_tag = salt.utils.event.tagify(u'ret', base=job[u'tag'])
|
||||
ret_tag = salt.utils.event.tagify('ret', base=job['tag'])
|
||||
|
||||
if timeout is None:
|
||||
timeout = self.opts.get(u'rest_timeout', 300)
|
||||
timeout = self.opts.get('rest_timeout', 300)
|
||||
ret = event.get_event(tag=ret_tag, full=True, wait=timeout, auto_reconnect=True)
|
||||
if ret is None:
|
||||
raise salt.exceptions.SaltClientTimeout(
|
||||
u"RunnerClient job '{0}' timed out".format(job[u'jid']),
|
||||
jid=job[u'jid'])
|
||||
"RunnerClient job '{0}' timed out".format(job['jid']),
|
||||
jid=job['jid'])
|
||||
|
||||
return ret if full_return else ret[u'data'][u'return']
|
||||
return ret if full_return else ret['data']['return']
|
||||
|
||||
def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False):
|
||||
'''
|
||||
|
@ -206,40 +206,40 @@ class SyncClientMixin(object):
|
|||
arg = tuple()
|
||||
if not isinstance(arg, list) and not isinstance(arg, tuple):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
u'arg must be formatted as a list/tuple'
|
||||
'arg must be formatted as a list/tuple'
|
||||
)
|
||||
if pub_data is None:
|
||||
pub_data = {}
|
||||
if not isinstance(pub_data, dict):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
u'pub_data must be formatted as a dictionary'
|
||||
'pub_data must be formatted as a dictionary'
|
||||
)
|
||||
if kwarg is None:
|
||||
kwarg = {}
|
||||
if not isinstance(kwarg, dict):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
u'kwarg must be formatted as a dictionary'
|
||||
'kwarg must be formatted as a dictionary'
|
||||
)
|
||||
arglist = salt.utils.args.parse_input(
|
||||
arg,
|
||||
no_parse=self.opts.get(u'no_parse', []))
|
||||
no_parse=self.opts.get('no_parse', []))
|
||||
|
||||
# if you were passed kwarg, add it to arglist
|
||||
if kwarg:
|
||||
kwarg[u'__kwarg__'] = True
|
||||
kwarg['__kwarg__'] = True
|
||||
arglist.append(kwarg)
|
||||
|
||||
args, kwargs = salt.minion.load_args_and_kwargs(
|
||||
self.functions[fun], arglist, pub_data
|
||||
)
|
||||
low = {u'fun': fun,
|
||||
u'arg': args,
|
||||
u'kwarg': kwargs}
|
||||
low = {'fun': fun,
|
||||
'arg': args,
|
||||
'kwarg': kwargs}
|
||||
return self.low(fun, low, print_event=print_event, full_return=full_return)
|
||||
|
||||
@property
|
||||
def mminion(self):
|
||||
if not hasattr(self, u'_mminion'):
|
||||
if not hasattr(self, '_mminion'):
|
||||
self._mminion = salt.minion.MasterMinion(self.opts, states=False, rend=False)
|
||||
return self._mminion
|
||||
|
||||
|
@ -248,15 +248,15 @@ class SyncClientMixin(object):
|
|||
Check for deprecated usage and allow until Salt Oxygen.
|
||||
'''
|
||||
msg = []
|
||||
if u'args' in low:
|
||||
msg.append(u'call with arg instead')
|
||||
low[u'arg'] = low.pop(u'args')
|
||||
if u'kwargs' in low:
|
||||
msg.append(u'call with kwarg instead')
|
||||
low[u'kwarg'] = low.pop(u'kwargs')
|
||||
if 'args' in low:
|
||||
msg.append('call with arg instead')
|
||||
low['arg'] = low.pop('args')
|
||||
if 'kwargs' in low:
|
||||
msg.append('call with kwarg instead')
|
||||
low['kwarg'] = low.pop('kwargs')
|
||||
|
||||
if msg:
|
||||
salt.utils.versions.warn_until(u'Oxygen', u' '.join(msg))
|
||||
salt.utils.versions.warn_until('Oxygen', ' '.join(msg))
|
||||
|
||||
return self._low(fun, low, print_event=print_event, full_return=full_return)
|
||||
|
||||
|
@ -270,13 +270,13 @@ class SyncClientMixin(object):
|
|||
class_name = self.__class__.__name__.lower()
|
||||
except AttributeError:
|
||||
log.warning(
|
||||
u'Unable to determine class name',
|
||||
'Unable to determine class name',
|
||||
exc_info_on_loglevel=logging.DEBUG
|
||||
)
|
||||
return True
|
||||
|
||||
try:
|
||||
return self.opts[u'{0}_returns'.format(class_name)]
|
||||
return self.opts['{0}_returns'.format(class_name)]
|
||||
except KeyError:
|
||||
# No such option, assume this isn't one we care about gating and
|
||||
# just return True.
|
||||
|
@ -299,24 +299,24 @@ class SyncClientMixin(object):
|
|||
# this is not to clutter the output with the module loading
|
||||
# if we have a high debug level.
|
||||
self.mminion # pylint: disable=W0104
|
||||
jid = low.get(u'__jid__', salt.utils.jid.gen_jid(self.opts))
|
||||
tag = low.get(u'__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix))
|
||||
jid = low.get('__jid__', salt.utils.jid.gen_jid(self.opts))
|
||||
tag = low.get('__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix))
|
||||
|
||||
data = {u'fun': u'{0}.{1}'.format(self.client, fun),
|
||||
u'jid': jid,
|
||||
u'user': low.get(u'__user__', u'UNKNOWN'),
|
||||
data = {'fun': '{0}.{1}'.format(self.client, fun),
|
||||
'jid': jid,
|
||||
'user': low.get('__user__', 'UNKNOWN'),
|
||||
}
|
||||
|
||||
event = salt.utils.event.get_event(
|
||||
u'master',
|
||||
self.opts[u'sock_dir'],
|
||||
self.opts[u'transport'],
|
||||
'master',
|
||||
self.opts['sock_dir'],
|
||||
self.opts['transport'],
|
||||
opts=self.opts,
|
||||
listen=False)
|
||||
|
||||
if print_event:
|
||||
print_func = self.print_async_event \
|
||||
if hasattr(self, u'print_async_event') \
|
||||
if hasattr(self, 'print_async_event') \
|
||||
else None
|
||||
else:
|
||||
# Suppress printing of return event (this keeps us from printing
|
||||
|
@ -331,12 +331,12 @@ class SyncClientMixin(object):
|
|||
|
||||
# TODO: document these, and test that they exist
|
||||
# TODO: Other things to inject??
|
||||
func_globals = {u'__jid__': jid,
|
||||
u'__user__': data[u'user'],
|
||||
u'__tag__': tag,
|
||||
func_globals = {'__jid__': jid,
|
||||
'__user__': data['user'],
|
||||
'__tag__': tag,
|
||||
# weak ref to avoid the Exception in interpreter
|
||||
# teardown of event
|
||||
u'__jid_event__': weakref.proxy(namespaced_event),
|
||||
'__jid_event__': weakref.proxy(namespaced_event),
|
||||
}
|
||||
|
||||
try:
|
||||
|
@ -348,9 +348,9 @@ class SyncClientMixin(object):
|
|||
completed_funcs = []
|
||||
|
||||
for mod_name in six.iterkeys(self_functions):
|
||||
if u'.' not in mod_name:
|
||||
if '.' not in mod_name:
|
||||
continue
|
||||
mod, _ = mod_name.split(u'.', 1)
|
||||
mod, _ = mod_name.split('.', 1)
|
||||
if mod in completed_funcs:
|
||||
continue
|
||||
completed_funcs.append(mod)
|
||||
|
@ -366,81 +366,81 @@ class SyncClientMixin(object):
|
|||
# we make the transition we will load "kwargs" using format_call if
|
||||
# there are no kwargs in the low object passed in.
|
||||
|
||||
if u'arg' in low and u'kwarg' in low:
|
||||
args = low[u'arg']
|
||||
kwargs = low[u'kwarg']
|
||||
if 'arg' in low and 'kwarg' in low:
|
||||
args = low['arg']
|
||||
kwargs = low['kwarg']
|
||||
else:
|
||||
f_call = salt.utils.args.format_call(
|
||||
self.functions[fun],
|
||||
low,
|
||||
expected_extra_kws=CLIENT_INTERNAL_KEYWORDS
|
||||
)
|
||||
args = f_call.get(u'args', ())
|
||||
kwargs = f_call.get(u'kwargs', {})
|
||||
args = f_call.get('args', ())
|
||||
kwargs = f_call.get('kwargs', {})
|
||||
|
||||
# Update the event data with loaded args and kwargs
|
||||
data[u'fun_args'] = list(args) + ([kwargs] if kwargs else [])
|
||||
func_globals[u'__jid_event__'].fire_event(data, u'new')
|
||||
data['fun_args'] = list(args) + ([kwargs] if kwargs else [])
|
||||
func_globals['__jid_event__'].fire_event(data, 'new')
|
||||
|
||||
# Initialize a context for executing the method.
|
||||
with tornado.stack_context.StackContext(self.functions.context_dict.clone):
|
||||
data[u'return'] = self.functions[fun](*args, **kwargs)
|
||||
data['return'] = self.functions[fun](*args, **kwargs)
|
||||
try:
|
||||
data[u'success'] = self.context.get(u'retcode', 0) == 0
|
||||
data['success'] = self.context.get('retcode', 0) == 0
|
||||
except AttributeError:
|
||||
# Assume a True result if no context attribute
|
||||
data[u'success'] = True
|
||||
if isinstance(data[u'return'], dict) and u'data' in data[u'return']:
|
||||
data['success'] = True
|
||||
if isinstance(data['return'], dict) and 'data' in data['return']:
|
||||
# some functions can return boolean values
|
||||
data[u'success'] = salt.utils.state.check_result(data[u'return'][u'data'])
|
||||
data['success'] = salt.utils.state.check_result(data['return']['data'])
|
||||
except (Exception, SystemExit) as ex:
|
||||
if isinstance(ex, salt.exceptions.NotImplemented):
|
||||
data[u'return'] = str(ex)
|
||||
data['return'] = str(ex)
|
||||
else:
|
||||
data[u'return'] = u'Exception occurred in {0} {1}: {2}'.format(
|
||||
data['return'] = 'Exception occurred in {0} {1}: {2}'.format(
|
||||
self.client,
|
||||
fun,
|
||||
traceback.format_exc(),
|
||||
)
|
||||
data[u'success'] = False
|
||||
data['success'] = False
|
||||
|
||||
if self.store_job:
|
||||
try:
|
||||
salt.utils.job.store_job(
|
||||
self.opts,
|
||||
{
|
||||
u'id': self.opts[u'id'],
|
||||
u'tgt': self.opts[u'id'],
|
||||
u'jid': data[u'jid'],
|
||||
u'return': data,
|
||||
'id': self.opts['id'],
|
||||
'tgt': self.opts['id'],
|
||||
'jid': data['jid'],
|
||||
'return': data,
|
||||
},
|
||||
event=None,
|
||||
mminion=self.mminion,
|
||||
)
|
||||
except salt.exceptions.SaltCacheError:
|
||||
log.error(u'Could not store job cache info. '
|
||||
u'Job details for this run may be unavailable.')
|
||||
log.error('Could not store job cache info. '
|
||||
'Job details for this run may be unavailable.')
|
||||
|
||||
# Outputters _can_ mutate data so write to the job cache first!
|
||||
namespaced_event.fire_event(data, u'ret')
|
||||
namespaced_event.fire_event(data, 'ret')
|
||||
|
||||
# if we fired an event, make sure to delete the event object.
|
||||
# This will ensure that we call destroy, which will do the 0MQ linger
|
||||
log.info(u'Runner completed: %s', data[u'jid'])
|
||||
log.info('Runner completed: %s', data['jid'])
|
||||
del event
|
||||
del namespaced_event
|
||||
return data if full_return else data[u'return']
|
||||
return data if full_return else data['return']
|
||||
|
||||
def get_docs(self, arg=None):
|
||||
'''
|
||||
Return a dictionary of functions and the inline documentation for each
|
||||
'''
|
||||
if arg:
|
||||
if u'*' in arg:
|
||||
if '*' in arg:
|
||||
target_mod = arg
|
||||
_use_fnmatch = True
|
||||
else:
|
||||
target_mod = arg + u'.' if not arg.endswith(u'.') else arg
|
||||
target_mod = arg + '.' if not arg.endswith('.') else arg
|
||||
_use_fnmatch = False
|
||||
if _use_fnmatch:
|
||||
docs = [(fun, self.functions[fun].__doc__)
|
||||
|
@ -478,9 +478,9 @@ class AsyncClientMixin(object):
|
|||
salt.log.setup.setup_multiprocessing_logging()
|
||||
|
||||
# pack a few things into low
|
||||
low[u'__jid__'] = jid
|
||||
low[u'__user__'] = user
|
||||
low[u'__tag__'] = tag
|
||||
low['__jid__'] = jid
|
||||
low['__user__'] = user
|
||||
low['__tag__'] = tag
|
||||
|
||||
return self.low(fun, low, full_return=False)
|
||||
|
||||
|
@ -508,9 +508,9 @@ class AsyncClientMixin(object):
|
|||
if jid is None:
|
||||
jid = salt.utils.jid.gen_jid(self.opts)
|
||||
tag = salt.utils.event.tagify(jid, prefix=self.tag_prefix)
|
||||
return {u'tag': tag, u'jid': jid}
|
||||
return {'tag': tag, 'jid': jid}
|
||||
|
||||
def async(self, fun, low, user=u'UNKNOWN', pub=None):
|
||||
def async(self, fun, low, user='UNKNOWN', pub=None):
|
||||
'''
|
||||
Execute the function in a multiprocess and return the event tag to use
|
||||
to watch for the return
|
||||
|
@ -519,7 +519,7 @@ class AsyncClientMixin(object):
|
|||
|
||||
proc = salt.utils.process.SignalHandlingMultiprocessingProcess(
|
||||
target=self._proc_function,
|
||||
args=(fun, low, user, async_pub[u'tag'], async_pub[u'jid']))
|
||||
args=(fun, low, user, async_pub['tag'], async_pub['jid']))
|
||||
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
|
||||
# Reset current signals before starting the process in
|
||||
# order not to inherit the current signal handlers
|
||||
|
@ -535,29 +535,29 @@ class AsyncClientMixin(object):
|
|||
return
|
||||
|
||||
# if we are "quiet", don't print
|
||||
if self.opts.get(u'quiet', False):
|
||||
if self.opts.get('quiet', False):
|
||||
return
|
||||
|
||||
# some suffixes we don't want to print
|
||||
if suffix in (u'new',):
|
||||
if suffix in ('new',):
|
||||
return
|
||||
|
||||
try:
|
||||
outputter = self.opts.get(u'output', event.get(u'outputter', None) or event.get(u'return').get(u'outputter'))
|
||||
outputter = self.opts.get('output', event.get('outputter', None) or event.get('return').get('outputter'))
|
||||
except AttributeError:
|
||||
outputter = None
|
||||
|
||||
# if this is a ret, we have our own set of rules
|
||||
if suffix == u'ret':
|
||||
if suffix == 'ret':
|
||||
# Check if outputter was passed in the return data. If this is the case,
|
||||
# then the return data will be a dict two keys: 'data' and 'outputter'
|
||||
if isinstance(event.get(u'return'), dict) \
|
||||
and set(event[u'return']) == set((u'data', u'outputter')):
|
||||
event_data = event[u'return'][u'data']
|
||||
outputter = event[u'return'][u'outputter']
|
||||
if isinstance(event.get('return'), dict) \
|
||||
and set(event['return']) == set(('data', 'outputter')):
|
||||
event_data = event['return']['data']
|
||||
outputter = event['return']['outputter']
|
||||
else:
|
||||
event_data = event[u'return']
|
||||
event_data = event['return']
|
||||
else:
|
||||
event_data = {u'suffix': suffix, u'event': event}
|
||||
event_data = {'suffix': suffix, 'event': event}
|
||||
|
||||
salt.output.display_output(event_data, outputter, self.opts)
|
||||
|
|
|
@ -20,7 +20,7 @@ class NetapiClient(object):
|
|||
'''
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
self.process_manager = salt.utils.process.ProcessManager(name=u'NetAPIProcessManager')
|
||||
self.process_manager = salt.utils.process.ProcessManager(name='NetAPIProcessManager')
|
||||
self.netapi = salt.loader.netapi(self.opts)
|
||||
|
||||
def run(self):
|
||||
|
@ -28,11 +28,11 @@ class NetapiClient(object):
|
|||
Load and start all available api modules
|
||||
'''
|
||||
if not len(self.netapi):
|
||||
log.error(u"Did not find any netapi configurations, nothing to start")
|
||||
log.error("Did not find any netapi configurations, nothing to start")
|
||||
|
||||
for fun in self.netapi:
|
||||
if fun.endswith(u'.start'):
|
||||
log.info(u'Starting %s netapi module', fun)
|
||||
if fun.endswith('.start'):
|
||||
log.info('Starting %s netapi module', fun)
|
||||
self.process_manager.add_process(self.netapi[fun])
|
||||
|
||||
# Install the SIGINT/SIGTERM handlers if not done so far
|
||||
|
|
|
@ -32,7 +32,7 @@ class LocalClient(salt.client.LocalClient):
|
|||
The RAET LocalClient
|
||||
'''
|
||||
def __init__(self,
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, u'master'),
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
|
||||
mopts=None):
|
||||
|
||||
salt.client.LocalClient.__init__(self, c_path, mopts)
|
||||
|
@ -41,22 +41,22 @@ class LocalClient(salt.client.LocalClient):
|
|||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
tgt_type=u'glob',
|
||||
ret=u'',
|
||||
jid=u'',
|
||||
tgt_type='glob',
|
||||
ret='',
|
||||
jid='',
|
||||
timeout=5,
|
||||
**kwargs):
|
||||
'''
|
||||
Publish the command!
|
||||
'''
|
||||
if u'expr_form' in kwargs:
|
||||
if 'expr_form' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'The target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'The target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = kwargs.pop(u'expr_form')
|
||||
tgt_type = kwargs.pop('expr_form')
|
||||
|
||||
payload_kwargs = self._prep_pub(
|
||||
tgt,
|
||||
|
@ -68,21 +68,21 @@ class LocalClient(salt.client.LocalClient):
|
|||
timeout=timeout,
|
||||
**kwargs)
|
||||
|
||||
kind = self.opts[u'__role']
|
||||
kind = self.opts['__role']
|
||||
if kind not in kinds.APPL_KINDS:
|
||||
emsg = (u"Invalid application kind = '{0}' for Raet LocalClient.".format(kind))
|
||||
log.error(emsg + u"\n")
|
||||
emsg = ("Invalid application kind = '{0}' for Raet LocalClient.".format(kind))
|
||||
log.error(emsg + "\n")
|
||||
raise ValueError(emsg)
|
||||
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.master],
|
||||
kinds.APPL_KIND_NAMES[kinds.applKinds.syndic]]:
|
||||
lanename = u'master'
|
||||
lanename = 'master'
|
||||
else:
|
||||
emsg = (u"Unsupported application kind '{0}' for Raet LocalClient.".format(kind))
|
||||
log.error(emsg + u'\n')
|
||||
emsg = ("Unsupported application kind '{0}' for Raet LocalClient.".format(kind))
|
||||
log.error(emsg + '\n')
|
||||
raise ValueError(emsg)
|
||||
|
||||
sockdirpath = self.opts[u'sock_dir']
|
||||
name = u'client' + nacling.uuid(size=18)
|
||||
sockdirpath = self.opts['sock_dir']
|
||||
name = 'client' + nacling.uuid(size=18)
|
||||
stack = LaneStack(
|
||||
name=name,
|
||||
lanename=lanename,
|
||||
|
@ -91,12 +91,12 @@ class LocalClient(salt.client.LocalClient):
|
|||
manor_yard = RemoteYard(
|
||||
stack=stack,
|
||||
lanename=lanename,
|
||||
name=u'manor',
|
||||
name='manor',
|
||||
dirpath=sockdirpath)
|
||||
stack.addRemote(manor_yard)
|
||||
route = {u'dst': (None, manor_yard.name, u'local_cmd'),
|
||||
u'src': (None, stack.local.name, None)}
|
||||
msg = {u'route': route, u'load': payload_kwargs}
|
||||
route = {'dst': (None, manor_yard.name, 'local_cmd'),
|
||||
'src': (None, stack.local.name, None)}
|
||||
msg = {'route': route, 'load': payload_kwargs}
|
||||
stack.transmit(msg)
|
||||
stack.serviceAll()
|
||||
while True:
|
||||
|
@ -104,9 +104,9 @@ class LocalClient(salt.client.LocalClient):
|
|||
stack.serviceAll()
|
||||
while stack.rxMsgs:
|
||||
msg, sender = stack.rxMsgs.popleft()
|
||||
ret = msg.get(u'return', {})
|
||||
if u'ret' in ret:
|
||||
ret = msg.get('return', {})
|
||||
if 'ret' in ret:
|
||||
stack.server.close()
|
||||
return ret[u'ret']
|
||||
return ret['ret']
|
||||
stack.server.close()
|
||||
return ret
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -23,7 +23,7 @@ class SSHClient(object):
|
|||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
def __init__(self,
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, u'master'),
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
|
||||
mopts=None,
|
||||
disable_custom_roster=False):
|
||||
if mopts:
|
||||
|
@ -31,14 +31,14 @@ class SSHClient(object):
|
|||
else:
|
||||
if os.path.isdir(c_path):
|
||||
log.warning(
|
||||
u'%s expects a file path not a directory path(%s) to '
|
||||
u'its \'c_path\' keyword argument',
|
||||
'%s expects a file path not a directory path(%s) to '
|
||||
'its \'c_path\' keyword argument',
|
||||
self.__class__.__name__, c_path
|
||||
)
|
||||
self.opts = salt.config.client_config(c_path)
|
||||
|
||||
# Salt API should never offer a custom roster!
|
||||
self.opts[u'__disable_custom_roster'] = disable_custom_roster
|
||||
self.opts['__disable_custom_roster'] = disable_custom_roster
|
||||
|
||||
def _prep_ssh(
|
||||
self,
|
||||
|
@ -46,30 +46,30 @@ class SSHClient(object):
|
|||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type=u'glob',
|
||||
tgt_type='glob',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
Prepare the arguments
|
||||
'''
|
||||
if u'expr_form' in kwargs:
|
||||
if 'expr_form' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'The target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'The target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = kwargs.pop(u'expr_form')
|
||||
tgt_type = kwargs.pop('expr_form')
|
||||
|
||||
opts = copy.deepcopy(self.opts)
|
||||
opts.update(kwargs)
|
||||
if timeout:
|
||||
opts[u'timeout'] = timeout
|
||||
opts['timeout'] = timeout
|
||||
arg = salt.utils.args.condition_input(arg, kwarg)
|
||||
opts[u'argv'] = [fun] + arg
|
||||
opts[u'selected_target_option'] = tgt_type
|
||||
opts[u'tgt'] = tgt
|
||||
opts[u'arg'] = arg
|
||||
opts['argv'] = [fun] + arg
|
||||
opts['selected_target_option'] = tgt_type
|
||||
opts['tgt'] = tgt
|
||||
opts['arg'] = arg
|
||||
return salt.client.ssh.SSH(opts)
|
||||
|
||||
def cmd_iter(
|
||||
|
@ -78,8 +78,8 @@ class SSHClient(object):
|
|||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type=u'glob',
|
||||
ret=u'',
|
||||
tgt_type='glob',
|
||||
ret='',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
|
@ -88,14 +88,14 @@ class SSHClient(object):
|
|||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
if u'expr_form' in kwargs:
|
||||
if 'expr_form' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'The target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'The target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = kwargs.pop(u'expr_form')
|
||||
tgt_type = kwargs.pop('expr_form')
|
||||
|
||||
ssh = self._prep_ssh(
|
||||
tgt,
|
||||
|
@ -105,7 +105,7 @@ class SSHClient(object):
|
|||
tgt_type,
|
||||
kwarg,
|
||||
**kwargs)
|
||||
for ret in ssh.run_iter(jid=kwargs.get(u'jid', None)):
|
||||
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||
yield ret
|
||||
|
||||
def cmd(self,
|
||||
|
@ -113,7 +113,7 @@ class SSHClient(object):
|
|||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type=u'glob',
|
||||
tgt_type='glob',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
|
@ -122,14 +122,14 @@ class SSHClient(object):
|
|||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
if u'expr_form' in kwargs:
|
||||
if 'expr_form' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'The target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'The target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = kwargs.pop(u'expr_form')
|
||||
tgt_type = kwargs.pop('expr_form')
|
||||
|
||||
ssh = self._prep_ssh(
|
||||
tgt,
|
||||
|
@ -140,7 +140,7 @@ class SSHClient(object):
|
|||
kwarg,
|
||||
**kwargs)
|
||||
final = {}
|
||||
for ret in ssh.run_iter(jid=kwargs.get(u'jid', None)):
|
||||
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||
final.update(ret)
|
||||
return final
|
||||
|
||||
|
@ -166,16 +166,16 @@ class SSHClient(object):
|
|||
|
||||
kwargs = copy.deepcopy(low)
|
||||
|
||||
for ignore in [u'tgt', u'fun', u'arg', u'timeout', u'tgt_type', u'kwarg']:
|
||||
for ignore in ['tgt', 'fun', 'arg', 'timeout', 'tgt_type', 'kwarg']:
|
||||
if ignore in kwargs:
|
||||
del kwargs[ignore]
|
||||
|
||||
return self.cmd(low[u'tgt'],
|
||||
low[u'fun'],
|
||||
low.get(u'arg', []),
|
||||
low.get(u'timeout'),
|
||||
low.get(u'tgt_type'),
|
||||
low.get(u'kwarg'),
|
||||
return self.cmd(low['tgt'],
|
||||
low['fun'],
|
||||
low.get('arg', []),
|
||||
low.get('timeout'),
|
||||
low.get('tgt_type'),
|
||||
low.get('kwarg'),
|
||||
**kwargs)
|
||||
|
||||
def cmd_async(self, low, timeout=None):
|
||||
|
@ -204,8 +204,8 @@ class SSHClient(object):
|
|||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type=u'glob',
|
||||
ret=u'',
|
||||
tgt_type='glob',
|
||||
ret='',
|
||||
kwarg=None,
|
||||
sub=3,
|
||||
**kwargs):
|
||||
|
@ -226,24 +226,24 @@ class SSHClient(object):
|
|||
|
||||
.. versionadded:: 2017.7.0
|
||||
'''
|
||||
if u'expr_form' in kwargs:
|
||||
if 'expr_form' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'The target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'The target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = kwargs.pop(u'expr_form')
|
||||
tgt_type = kwargs.pop('expr_form')
|
||||
minion_ret = self.cmd(tgt,
|
||||
u'sys.list_functions',
|
||||
'sys.list_functions',
|
||||
tgt_type=tgt_type,
|
||||
**kwargs)
|
||||
minions = list(minion_ret)
|
||||
random.shuffle(minions)
|
||||
f_tgt = []
|
||||
for minion in minions:
|
||||
if fun in minion_ret[minion][u'return']:
|
||||
if fun in minion_ret[minion]['return']:
|
||||
f_tgt.append(minion)
|
||||
if len(f_tgt) >= sub:
|
||||
break
|
||||
return self.cmd_iter(f_tgt, fun, arg, timeout, tgt_type=u'list', ret=ret, kwarg=kwarg, **kwargs)
|
||||
return self.cmd_iter(f_tgt, fun, arg, timeout, tgt_type='list', ret=ret, kwarg=kwarg, **kwargs)
|
||||
|
|
|
@ -20,12 +20,12 @@ import salt.utils.vt
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SSH_PASSWORD_PROMPT_RE = re.compile(r'(?:.*)[Pp]assword(?: for .*)?:', re.M) # future lint: disable=non-unicode-string
|
||||
KEY_VALID_RE = re.compile(r'.*\(yes\/no\).*') # future lint: disable=non-unicode-string
|
||||
SSH_PASSWORD_PROMPT_RE = re.compile(r'(?:.*)[Pp]assword(?: for .*)?:', re.M)
|
||||
KEY_VALID_RE = re.compile(r'.*\(yes\/no\).*')
|
||||
|
||||
# Keep these in sync with ./__init__.py
|
||||
RSTR = u'_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878'
|
||||
RSTR_RE = re.compile(r'(?:^|\r?\n)' + RSTR + r'(?:\r?\n|$)') # future lint: disable=non-unicode-string
|
||||
RSTR = '_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878'
|
||||
RSTR_RE = re.compile(r'(?:^|\r?\n)' + RSTR + r'(?:\r?\n|$)')
|
||||
|
||||
|
||||
class NoPasswdError(Exception):
|
||||
|
@ -40,7 +40,7 @@ def gen_key(path):
|
|||
'''
|
||||
Generate a key for use with salt-ssh
|
||||
'''
|
||||
cmd = u'ssh-keygen -P "" -f {0} -t rsa -q'.format(path)
|
||||
cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path)
|
||||
if not os.path.isdir(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
subprocess.call(cmd, shell=True)
|
||||
|
@ -50,12 +50,12 @@ def gen_shell(opts, **kwargs):
|
|||
'''
|
||||
Return the correct shell interface for the target system
|
||||
'''
|
||||
if kwargs[u'winrm']:
|
||||
if kwargs['winrm']:
|
||||
try:
|
||||
import saltwinshell
|
||||
shell = saltwinshell.Shell(opts, **kwargs)
|
||||
except ImportError:
|
||||
log.error(u'The saltwinshell library is not available')
|
||||
log.error('The saltwinshell library is not available')
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
else:
|
||||
shell = Shell(opts, **kwargs)
|
||||
|
@ -85,7 +85,7 @@ class Shell(object):
|
|||
ssh_options=None):
|
||||
self.opts = opts
|
||||
# ssh <ipv6>, but scp [<ipv6]:/path
|
||||
self.host = host.strip(u'[]')
|
||||
self.host = host.strip('[]')
|
||||
self.user = user
|
||||
self.port = port
|
||||
self.passwd = str(passwd) if passwd else passwd
|
||||
|
@ -96,18 +96,18 @@ class Shell(object):
|
|||
self.mods = mods
|
||||
self.identities_only = identities_only
|
||||
self.remote_port_forwards = remote_port_forwards
|
||||
self.ssh_options = u'' if ssh_options is None else ssh_options
|
||||
self.ssh_options = '' if ssh_options is None else ssh_options
|
||||
|
||||
def get_error(self, errstr):
|
||||
'''
|
||||
Parse out an error and return a targeted error string
|
||||
'''
|
||||
for line in errstr.split(u'\n'):
|
||||
if line.startswith(u'ssh:'):
|
||||
for line in errstr.split('\n'):
|
||||
if line.startswith('ssh:'):
|
||||
return line
|
||||
if line.startswith(u'Pseudo-terminal'):
|
||||
if line.startswith('Pseudo-terminal'):
|
||||
continue
|
||||
if u'to the list of known hosts.' in line:
|
||||
if 'to the list of known hosts.' in line:
|
||||
continue
|
||||
return line
|
||||
return errstr
|
||||
|
@ -117,36 +117,36 @@ class Shell(object):
|
|||
Return options for the ssh command base for Salt to call
|
||||
'''
|
||||
options = [
|
||||
u'KbdInteractiveAuthentication=no',
|
||||
'KbdInteractiveAuthentication=no',
|
||||
]
|
||||
if self.passwd:
|
||||
options.append(u'PasswordAuthentication=yes')
|
||||
options.append('PasswordAuthentication=yes')
|
||||
else:
|
||||
options.append(u'PasswordAuthentication=no')
|
||||
if self.opts.get(u'_ssh_version', (0,)) > (4, 9):
|
||||
options.append(u'GSSAPIAuthentication=no')
|
||||
options.append(u'ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get(u'ignore_host_keys'):
|
||||
options.append(u'StrictHostKeyChecking=no')
|
||||
if self.opts.get(u'no_host_keys'):
|
||||
options.extend([u'StrictHostKeyChecking=no',
|
||||
u'UserKnownHostsFile=/dev/null'])
|
||||
known_hosts = self.opts.get(u'known_hosts_file')
|
||||
options.append('PasswordAuthentication=no')
|
||||
if self.opts.get('_ssh_version', (0,)) > (4, 9):
|
||||
options.append('GSSAPIAuthentication=no')
|
||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get('ignore_host_keys'):
|
||||
options.append('StrictHostKeyChecking=no')
|
||||
if self.opts.get('no_host_keys'):
|
||||
options.extend(['StrictHostKeyChecking=no',
|
||||
'UserKnownHostsFile=/dev/null'])
|
||||
known_hosts = self.opts.get('known_hosts_file')
|
||||
if known_hosts and os.path.isfile(known_hosts):
|
||||
options.append(u'UserKnownHostsFile={0}'.format(known_hosts))
|
||||
options.append('UserKnownHostsFile={0}'.format(known_hosts))
|
||||
if self.port:
|
||||
options.append(u'Port={0}'.format(self.port))
|
||||
options.append('Port={0}'.format(self.port))
|
||||
if self.priv:
|
||||
options.append(u'IdentityFile={0}'.format(self.priv))
|
||||
options.append('IdentityFile={0}'.format(self.priv))
|
||||
if self.user:
|
||||
options.append(u'User={0}'.format(self.user))
|
||||
options.append('User={0}'.format(self.user))
|
||||
if self.identities_only:
|
||||
options.append(u'IdentitiesOnly=yes')
|
||||
options.append('IdentitiesOnly=yes')
|
||||
|
||||
ret = []
|
||||
for option in options:
|
||||
ret.append(u'-o {0} '.format(option))
|
||||
return u''.join(ret)
|
||||
ret.append('-o {0} '.format(option))
|
||||
return ''.join(ret)
|
||||
|
||||
def _passwd_opts(self):
|
||||
'''
|
||||
|
@ -155,41 +155,41 @@ class Shell(object):
|
|||
# TODO ControlMaster does not work without ControlPath
|
||||
# user could take advantage of it if they set ControlPath in their
|
||||
# ssh config. Also, ControlPersist not widely available.
|
||||
options = [u'ControlMaster=auto',
|
||||
u'StrictHostKeyChecking=no',
|
||||
options = ['ControlMaster=auto',
|
||||
'StrictHostKeyChecking=no',
|
||||
]
|
||||
if self.opts[u'_ssh_version'] > (4, 9):
|
||||
options.append(u'GSSAPIAuthentication=no')
|
||||
options.append(u'ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get(u'ignore_host_keys'):
|
||||
options.append(u'StrictHostKeyChecking=no')
|
||||
if self.opts.get(u'no_host_keys'):
|
||||
options.extend([u'StrictHostKeyChecking=no',
|
||||
u'UserKnownHostsFile=/dev/null'])
|
||||
if self.opts['_ssh_version'] > (4, 9):
|
||||
options.append('GSSAPIAuthentication=no')
|
||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get('ignore_host_keys'):
|
||||
options.append('StrictHostKeyChecking=no')
|
||||
if self.opts.get('no_host_keys'):
|
||||
options.extend(['StrictHostKeyChecking=no',
|
||||
'UserKnownHostsFile=/dev/null'])
|
||||
|
||||
if self.passwd:
|
||||
options.extend([u'PasswordAuthentication=yes',
|
||||
u'PubkeyAuthentication=yes'])
|
||||
options.extend(['PasswordAuthentication=yes',
|
||||
'PubkeyAuthentication=yes'])
|
||||
else:
|
||||
options.extend([u'PasswordAuthentication=no',
|
||||
u'PubkeyAuthentication=yes',
|
||||
u'KbdInteractiveAuthentication=no',
|
||||
u'ChallengeResponseAuthentication=no',
|
||||
u'BatchMode=yes'])
|
||||
options.extend(['PasswordAuthentication=no',
|
||||
'PubkeyAuthentication=yes',
|
||||
'KbdInteractiveAuthentication=no',
|
||||
'ChallengeResponseAuthentication=no',
|
||||
'BatchMode=yes'])
|
||||
if self.port:
|
||||
options.append(u'Port={0}'.format(self.port))
|
||||
options.append('Port={0}'.format(self.port))
|
||||
if self.user:
|
||||
options.append(u'User={0}'.format(self.user))
|
||||
options.append('User={0}'.format(self.user))
|
||||
if self.identities_only:
|
||||
options.append(u'IdentitiesOnly=yes')
|
||||
options.append('IdentitiesOnly=yes')
|
||||
|
||||
ret = []
|
||||
for option in options:
|
||||
ret.append(u'-o {0} '.format(option))
|
||||
return u''.join(ret)
|
||||
ret.append('-o {0} '.format(option))
|
||||
return ''.join(ret)
|
||||
|
||||
def _ssh_opts(self):
|
||||
return u' '.join([u'-o {0}'.format(opt)
|
||||
return ' '.join(['-o {0}'.format(opt)
|
||||
for opt in self.ssh_options])
|
||||
|
||||
def _copy_id_str_old(self):
|
||||
|
@ -199,9 +199,9 @@ class Shell(object):
|
|||
if self.passwd:
|
||||
# Using single quotes prevents shell expansion and
|
||||
# passwords containing '$'
|
||||
return u"{0} {1} '{2} -p {3} {4} {5}@{6}'".format(
|
||||
u'ssh-copy-id',
|
||||
u'-i {0}.pub'.format(self.priv),
|
||||
return "{0} {1} '{2} -p {3} {4} {5}@{6}'".format(
|
||||
'ssh-copy-id',
|
||||
'-i {0}.pub'.format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
|
@ -217,9 +217,9 @@ class Shell(object):
|
|||
if self.passwd:
|
||||
# Using single quotes prevents shell expansion and
|
||||
# passwords containing '$'
|
||||
return u"{0} {1} {2} -p {3} {4} {5}@{6}".format(
|
||||
u'ssh-copy-id',
|
||||
u'-i {0}.pub'.format(self.priv),
|
||||
return "{0} {1} {2} -p {3} {4} {5}@{6}".format(
|
||||
'ssh-copy-id',
|
||||
'-i {0}.pub'.format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
|
@ -232,11 +232,11 @@ class Shell(object):
|
|||
Execute ssh-copy-id to plant the id file on the target
|
||||
'''
|
||||
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_old())
|
||||
if salt.defaults.exitcodes.EX_OK != retcode and u'Usage' in stderr:
|
||||
if salt.defaults.exitcodes.EX_OK != retcode and 'Usage' in stderr:
|
||||
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_new())
|
||||
return stdout, stderr, retcode
|
||||
|
||||
def _cmd_str(self, cmd, ssh=u'ssh'):
|
||||
def _cmd_str(self, cmd, ssh='ssh'):
|
||||
'''
|
||||
Return the cmd string to execute
|
||||
'''
|
||||
|
@ -245,21 +245,21 @@ class Shell(object):
|
|||
# need to deliver the SHIM to the remote host and execute it there
|
||||
|
||||
command = [ssh]
|
||||
if ssh != u'scp':
|
||||
if ssh != 'scp':
|
||||
command.append(self.host)
|
||||
if self.tty and ssh == u'ssh':
|
||||
command.append(u'-t -t')
|
||||
if self.tty and ssh == 'ssh':
|
||||
command.append('-t -t')
|
||||
if self.passwd or self.priv:
|
||||
command.append(self.priv and self._key_opts() or self._passwd_opts())
|
||||
if ssh != u'scp' and self.remote_port_forwards:
|
||||
command.append(u' '.join([u'-R {0}'.format(item)
|
||||
for item in self.remote_port_forwards.split(u',')]))
|
||||
if ssh != 'scp' and self.remote_port_forwards:
|
||||
command.append(' '.join(['-R {0}'.format(item)
|
||||
for item in self.remote_port_forwards.split(',')]))
|
||||
if self.ssh_options:
|
||||
command.append(self._ssh_opts())
|
||||
|
||||
command.append(cmd)
|
||||
|
||||
return u' '.join(command)
|
||||
return ' '.join(command)
|
||||
|
||||
def _old_run_cmd(self, cmd):
|
||||
'''
|
||||
|
@ -276,7 +276,7 @@ class Shell(object):
|
|||
data = proc.communicate()
|
||||
return data[0], data[1], proc.returncode
|
||||
except Exception:
|
||||
return (u'local', u'Unknown Error', None)
|
||||
return ('local', 'Unknown Error', None)
|
||||
|
||||
def _run_nb_cmd(self, cmd):
|
||||
'''
|
||||
|
@ -300,7 +300,7 @@ class Shell(object):
|
|||
err = self.get_error(err)
|
||||
yield out, err, rcode
|
||||
except Exception:
|
||||
yield (u'', u'Unknown Error', None)
|
||||
yield ('', 'Unknown Error', None)
|
||||
|
||||
def exec_nb_cmd(self, cmd):
|
||||
'''
|
||||
|
@ -311,9 +311,9 @@ class Shell(object):
|
|||
rcode = None
|
||||
cmd = self._cmd_str(cmd)
|
||||
|
||||
logmsg = u'Executing non-blocking command: {0}'.format(cmd)
|
||||
logmsg = 'Executing non-blocking command: {0}'.format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, (u'*' * 6))
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
log.debug(logmsg)
|
||||
|
||||
for out, err, rcode in self._run_nb_cmd(cmd):
|
||||
|
@ -322,7 +322,7 @@ class Shell(object):
|
|||
if err is not None:
|
||||
r_err.append(err)
|
||||
yield None, None, None
|
||||
yield u''.join(r_out), u''.join(r_err), rcode
|
||||
yield ''.join(r_out), ''.join(r_err), rcode
|
||||
|
||||
def exec_cmd(self, cmd):
|
||||
'''
|
||||
|
@ -330,11 +330,11 @@ class Shell(object):
|
|||
'''
|
||||
cmd = self._cmd_str(cmd)
|
||||
|
||||
logmsg = u'Executing command: {0}'.format(cmd)
|
||||
logmsg = 'Executing command: {0}'.format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, (u'*' * 6))
|
||||
if u'decode("base64")' in logmsg or u'base64.b64decode(' in logmsg:
|
||||
log.debug(u'Executed SHIM command. Command logged to TRACE')
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
if 'decode("base64")' in logmsg or 'base64.b64decode(' in logmsg:
|
||||
log.debug('Executed SHIM command. Command logged to TRACE')
|
||||
log.trace(logmsg)
|
||||
else:
|
||||
log.debug(logmsg)
|
||||
|
@ -347,19 +347,19 @@ class Shell(object):
|
|||
scp a file or files to a remote system
|
||||
'''
|
||||
if makedirs:
|
||||
self.exec_cmd(u'mkdir -p {0}'.format(os.path.dirname(remote)))
|
||||
self.exec_cmd('mkdir -p {0}'.format(os.path.dirname(remote)))
|
||||
|
||||
# scp needs [<ipv6}
|
||||
host = self.host
|
||||
if u':' in host:
|
||||
host = u'[{0}]'.format(host)
|
||||
if ':' in host:
|
||||
host = '[{0}]'.format(host)
|
||||
|
||||
cmd = u'{0} {1}:{2}'.format(local, host, remote)
|
||||
cmd = self._cmd_str(cmd, ssh=u'scp')
|
||||
cmd = '{0} {1}:{2}'.format(local, host, remote)
|
||||
cmd = self._cmd_str(cmd, ssh='scp')
|
||||
|
||||
logmsg = u'Executing command: {0}'.format(cmd)
|
||||
logmsg = 'Executing command: {0}'.format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, (u'*' * 6))
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
log.debug(logmsg)
|
||||
|
||||
return self._run_cmd(cmd)
|
||||
|
@ -373,16 +373,16 @@ class Shell(object):
|
|||
cmd,
|
||||
shell=True,
|
||||
log_stdout=True,
|
||||
log_stdout_level=u'trace',
|
||||
log_stdout_level='trace',
|
||||
log_stderr=True,
|
||||
log_stderr_level=u'trace',
|
||||
log_stderr_level='trace',
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
sent_passwd = 0
|
||||
send_password = True
|
||||
ret_stdout = u''
|
||||
ret_stderr = u''
|
||||
old_stdout = u''
|
||||
ret_stdout = ''
|
||||
ret_stderr = ''
|
||||
old_stdout = ''
|
||||
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -399,26 +399,26 @@ class Shell(object):
|
|||
send_password = False
|
||||
if buff and SSH_PASSWORD_PROMPT_RE.search(buff) and send_password:
|
||||
if not self.passwd:
|
||||
return u'', u'Permission denied, no authentication information', 254
|
||||
return '', 'Permission denied, no authentication information', 254
|
||||
if sent_passwd < passwd_retries:
|
||||
term.sendline(self.passwd)
|
||||
sent_passwd += 1
|
||||
continue
|
||||
else:
|
||||
# asking for a password, and we can't seem to send it
|
||||
return u'', u'Password authentication failed', 254
|
||||
return '', 'Password authentication failed', 254
|
||||
elif buff and KEY_VALID_RE.search(buff):
|
||||
if key_accept:
|
||||
term.sendline(u'yes')
|
||||
term.sendline('yes')
|
||||
continue
|
||||
else:
|
||||
term.sendline(u'no')
|
||||
ret_stdout = (u'The host key needs to be accepted, to '
|
||||
u'auto accept run salt-ssh with the -i '
|
||||
u'flag:\n{0}').format(stdout)
|
||||
return ret_stdout, u'', 254
|
||||
elif buff and buff.endswith(u'_||ext_mods||_'):
|
||||
mods_raw = json.dumps(self.mods, separators=(u',', u':')) + u'|_E|0|'
|
||||
term.sendline('no')
|
||||
ret_stdout = ('The host key needs to be accepted, to '
|
||||
'auto accept run salt-ssh with the -i '
|
||||
'flag:\n{0}').format(stdout)
|
||||
return ret_stdout, '', 254
|
||||
elif buff and buff.endswith('_||ext_mods||_'):
|
||||
mods_raw = json.dumps(self.mods, separators=(',', ':')) + '|_E|0|'
|
||||
term.sendline(mods_raw)
|
||||
if stdout:
|
||||
old_stdout = stdout
|
||||
|
|
|
@ -18,8 +18,8 @@ import os
|
|||
import stat
|
||||
import subprocess
|
||||
|
||||
THIN_ARCHIVE = u'salt-thin.tgz'
|
||||
EXT_ARCHIVE = u'salt-ext_mods.tgz'
|
||||
THIN_ARCHIVE = 'salt-thin.tgz'
|
||||
EXT_ARCHIVE = 'salt-ext_mods.tgz'
|
||||
|
||||
# Keep these in sync with salt/defaults/exitcodes.py
|
||||
EX_THIN_DEPLOY = 11
|
||||
|
@ -54,7 +54,7 @@ def get_system_encoding():
|
|||
# and reset to None
|
||||
encoding = None
|
||||
|
||||
if not sys.platform.startswith(u'win') and sys.stdin is not None:
|
||||
if not sys.platform.startswith('win') and sys.stdin is not None:
|
||||
# On linux we can rely on sys.stdin for the encoding since it
|
||||
# most commonly matches the filesystem encoding. This however
|
||||
# does not apply to windows
|
||||
|
@ -80,16 +80,16 @@ def get_system_encoding():
|
|||
# the way back to ascii
|
||||
encoding = sys.getdefaultencoding()
|
||||
if not encoding:
|
||||
if sys.platform.startswith(u'darwin'):
|
||||
if sys.platform.startswith('darwin'):
|
||||
# Mac OS X uses UTF-8
|
||||
encoding = u'utf-8'
|
||||
elif sys.platform.startswith(u'win'):
|
||||
encoding = 'utf-8'
|
||||
elif sys.platform.startswith('win'):
|
||||
# Windows uses a configurable encoding; on Windows, Python uses the name "mbcs"
|
||||
# to refer to whatever the currently configured encoding is.
|
||||
encoding = u'mbcs'
|
||||
encoding = 'mbcs'
|
||||
else:
|
||||
# On linux default to ascii as a last resort
|
||||
encoding = u'ascii'
|
||||
encoding = 'ascii'
|
||||
return encoding
|
||||
|
||||
|
||||
|
@ -97,7 +97,7 @@ def is_windows():
|
|||
'''
|
||||
Simple function to return if a host is Windows or not
|
||||
'''
|
||||
return sys.platform.startswith(u'win')
|
||||
return sys.platform.startswith('win')
|
||||
|
||||
|
||||
def need_deployment():
|
||||
|
@ -121,35 +121,35 @@ def need_deployment():
|
|||
# Attack detected
|
||||
need_deployment()
|
||||
# If SUDOing then also give the super user group write permissions
|
||||
sudo_gid = os.environ.get(u'SUDO_GID')
|
||||
sudo_gid = os.environ.get('SUDO_GID')
|
||||
if sudo_gid:
|
||||
try:
|
||||
os.chown(OPTIONS.saltdir, -1, int(sudo_gid))
|
||||
stt = os.stat(OPTIONS.saltdir)
|
||||
os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP)
|
||||
except OSError:
|
||||
sys.stdout.write(u'\n\nUnable to set permissions on thin directory.\nIf sudo_user is set '
|
||||
u'and is not root, be certain the user is in the same group\nas the login user')
|
||||
sys.stdout.write('\n\nUnable to set permissions on thin directory.\nIf sudo_user is set '
|
||||
'and is not root, be certain the user is in the same group\nas the login user')
|
||||
sys.exit(1)
|
||||
|
||||
# Delimiter emitted on stdout *only* to indicate shim message to master.
|
||||
sys.stdout.write(u"{0}\ndeploy\n".format(OPTIONS.delimiter))
|
||||
sys.stdout.write("{0}\ndeploy\n".format(OPTIONS.delimiter))
|
||||
sys.exit(EX_THIN_DEPLOY)
|
||||
|
||||
|
||||
# Adapted from salt.utils.hashutils.get_hash()
|
||||
def get_hash(path, form=u'sha1', chunk_size=4096):
|
||||
def get_hash(path, form='sha1', chunk_size=4096):
|
||||
'''
|
||||
Generate a hash digest string for a file.
|
||||
'''
|
||||
try:
|
||||
hash_type = getattr(hashlib, form)
|
||||
except AttributeError:
|
||||
raise ValueError(u'Invalid hash type: {0}'.format(form))
|
||||
with open(path, u'rb') as ifile:
|
||||
raise ValueError('Invalid hash type: {0}'.format(form))
|
||||
with open(path, 'rb') as ifile:
|
||||
hash_obj = hash_type()
|
||||
# read the file in in chunks, not the entire file
|
||||
for chunk in iter(lambda: ifile.read(chunk_size), b''): # future lint: disable=non-unicode-string
|
||||
for chunk in iter(lambda: ifile.read(chunk_size), b''):
|
||||
hash_obj.update(chunk)
|
||||
return hash_obj.hexdigest()
|
||||
|
||||
|
@ -170,7 +170,7 @@ def need_ext():
|
|||
'''
|
||||
Signal that external modules need to be deployed.
|
||||
'''
|
||||
sys.stdout.write(u"{0}\next_mods\n".format(OPTIONS.delimiter))
|
||||
sys.stdout.write("{0}\next_mods\n".format(OPTIONS.delimiter))
|
||||
sys.exit(EX_MOD_DEPLOY)
|
||||
|
||||
|
||||
|
@ -180,20 +180,20 @@ def unpack_ext(ext_path):
|
|||
'''
|
||||
modcache = os.path.join(
|
||||
OPTIONS.saltdir,
|
||||
u'running_data',
|
||||
u'var',
|
||||
u'cache',
|
||||
u'salt',
|
||||
u'minion',
|
||||
u'extmods')
|
||||
'running_data',
|
||||
'var',
|
||||
'cache',
|
||||
'salt',
|
||||
'minion',
|
||||
'extmods')
|
||||
tfile = tarfile.TarFile.gzopen(ext_path)
|
||||
old_umask = os.umask(0o077)
|
||||
tfile.extractall(path=modcache)
|
||||
tfile.close()
|
||||
os.umask(old_umask)
|
||||
os.unlink(ext_path)
|
||||
ver_path = os.path.join(modcache, u'ext_version')
|
||||
ver_dst = os.path.join(OPTIONS.saltdir, u'ext_version')
|
||||
ver_path = os.path.join(modcache, 'ext_version')
|
||||
ver_dst = os.path.join(OPTIONS.saltdir, 'ext_version')
|
||||
shutil.move(ver_path, ver_dst)
|
||||
|
||||
|
||||
|
@ -208,8 +208,8 @@ def main(argv): # pylint: disable=W0613
|
|||
unpack_thin(thin_path)
|
||||
# Salt thin now is available to use
|
||||
else:
|
||||
if not sys.platform.startswith(u'win'):
|
||||
scpstat = subprocess.Popen([u'/bin/sh', u'-c', u'command -v scp']).wait()
|
||||
if not sys.platform.startswith('win'):
|
||||
scpstat = subprocess.Popen(['/bin/sh', '-c', 'command -v scp']).wait()
|
||||
if scpstat != 0:
|
||||
sys.exit(EX_SCP_NOT_FOUND)
|
||||
|
||||
|
@ -218,46 +218,46 @@ def main(argv): # pylint: disable=W0613
|
|||
|
||||
if not os.path.isdir(OPTIONS.saltdir):
|
||||
sys.stderr.write(
|
||||
u'ERROR: salt path "{0}" exists but is'
|
||||
u' not a directory\n'.format(OPTIONS.saltdir)
|
||||
'ERROR: salt path "{0}" exists but is'
|
||||
' not a directory\n'.format(OPTIONS.saltdir)
|
||||
)
|
||||
sys.exit(EX_CANTCREAT)
|
||||
|
||||
version_path = os.path.normpath(os.path.join(OPTIONS.saltdir, u'version'))
|
||||
version_path = os.path.normpath(os.path.join(OPTIONS.saltdir, 'version'))
|
||||
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
||||
sys.stderr.write(
|
||||
u'WARNING: Unable to locate current thin '
|
||||
u' version: {0}.\n'.format(version_path)
|
||||
'WARNING: Unable to locate current thin '
|
||||
' version: {0}.\n'.format(version_path)
|
||||
)
|
||||
need_deployment()
|
||||
with open(version_path, u'r') as vpo:
|
||||
with open(version_path, 'r') as vpo:
|
||||
cur_version = vpo.readline().strip()
|
||||
if cur_version != OPTIONS.version:
|
||||
sys.stderr.write(
|
||||
u'WARNING: current thin version {0}'
|
||||
u' is not up-to-date with {1}.\n'.format(
|
||||
'WARNING: current thin version {0}'
|
||||
' is not up-to-date with {1}.\n'.format(
|
||||
cur_version, OPTIONS.version
|
||||
)
|
||||
)
|
||||
need_deployment()
|
||||
# Salt thin exists and is up-to-date - fall through and use it
|
||||
|
||||
salt_call_path = os.path.join(OPTIONS.saltdir, u'salt-call')
|
||||
salt_call_path = os.path.join(OPTIONS.saltdir, 'salt-call')
|
||||
if not os.path.isfile(salt_call_path):
|
||||
sys.stderr.write(u'ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
||||
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
||||
need_deployment()
|
||||
|
||||
with open(os.path.join(OPTIONS.saltdir, u'minion'), u'w') as config:
|
||||
config.write(OPTIONS.config + u'\n')
|
||||
with open(os.path.join(OPTIONS.saltdir, 'minion'), 'w') as config:
|
||||
config.write(OPTIONS.config + '\n')
|
||||
if OPTIONS.ext_mods:
|
||||
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
|
||||
if os.path.exists(ext_path):
|
||||
unpack_ext(ext_path)
|
||||
else:
|
||||
version_path = os.path.join(OPTIONS.saltdir, u'ext_version')
|
||||
version_path = os.path.join(OPTIONS.saltdir, 'ext_version')
|
||||
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
||||
need_ext()
|
||||
with open(version_path, u'r') as vpo:
|
||||
with open(version_path, 'r') as vpo:
|
||||
cur_version = vpo.readline().strip()
|
||||
if cur_version != OPTIONS.ext_mods:
|
||||
need_ext()
|
||||
|
@ -270,38 +270,38 @@ def main(argv): # pylint: disable=W0613
|
|||
salt_argv = [
|
||||
sys.executable,
|
||||
salt_call_path,
|
||||
u'--retcode-passthrough',
|
||||
u'--local',
|
||||
u'--metadata',
|
||||
u'--out', u'json',
|
||||
u'-l', u'quiet',
|
||||
u'-c', OPTIONS.saltdir
|
||||
'--retcode-passthrough',
|
||||
'--local',
|
||||
'--metadata',
|
||||
'--out', 'json',
|
||||
'-l', 'quiet',
|
||||
'-c', OPTIONS.saltdir
|
||||
]
|
||||
|
||||
try:
|
||||
if argv_prepared[-1].startswith(u'--no-parse='):
|
||||
if argv_prepared[-1].startswith('--no-parse='):
|
||||
salt_argv.append(argv_prepared.pop(-1))
|
||||
except (IndexError, TypeError):
|
||||
pass
|
||||
|
||||
salt_argv.append(u'--')
|
||||
salt_argv.append('--')
|
||||
salt_argv.extend(argv_prepared)
|
||||
|
||||
sys.stderr.write(u'SALT_ARGV: {0}\n'.format(salt_argv))
|
||||
sys.stderr.write('SALT_ARGV: {0}\n'.format(salt_argv))
|
||||
|
||||
# Only emit the delimiter on *both* stdout and stderr when completely successful.
|
||||
# Yes, the flush() is necessary.
|
||||
sys.stdout.write(OPTIONS.delimiter + u'\n')
|
||||
sys.stdout.write(OPTIONS.delimiter + '\n')
|
||||
sys.stdout.flush()
|
||||
if not OPTIONS.tty:
|
||||
sys.stderr.write(OPTIONS.delimiter + u'\n')
|
||||
sys.stderr.write(OPTIONS.delimiter + '\n')
|
||||
sys.stderr.flush()
|
||||
if OPTIONS.cmd_umask is not None:
|
||||
old_umask = os.umask(OPTIONS.cmd_umask)
|
||||
if OPTIONS.tty:
|
||||
# Returns bytes instead of string on python 3
|
||||
stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
|
||||
sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors=u"replace"))
|
||||
sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace"))
|
||||
sys.stdout.flush()
|
||||
if OPTIONS.wipe:
|
||||
shutil.rmtree(OPTIONS.saltdir)
|
||||
|
@ -313,5 +313,5 @@ def main(argv): # pylint: disable=W0613
|
|||
if OPTIONS.cmd_umask is not None:
|
||||
os.umask(old_umask)
|
||||
|
||||
if __name__ == u'__main__':
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -85,33 +85,33 @@ class SSHHighState(salt.state.BaseHighState):
|
|||
'''
|
||||
Evaluate master_tops locally
|
||||
'''
|
||||
if u'id' not in self.opts:
|
||||
log.error(u'Received call for external nodes without an id')
|
||||
if 'id' not in self.opts:
|
||||
log.error('Received call for external nodes without an id')
|
||||
return {}
|
||||
if not salt.utils.verify.valid_id(self.opts, self.opts[u'id']):
|
||||
if not salt.utils.verify.valid_id(self.opts, self.opts['id']):
|
||||
return {}
|
||||
# Evaluate all configured master_tops interfaces
|
||||
|
||||
grains = {}
|
||||
ret = {}
|
||||
|
||||
if u'grains' in self.opts:
|
||||
grains = self.opts[u'grains']
|
||||
if 'grains' in self.opts:
|
||||
grains = self.opts['grains']
|
||||
for fun in self.tops:
|
||||
if fun not in self.opts.get(u'master_tops', {}):
|
||||
if fun not in self.opts.get('master_tops', {}):
|
||||
continue
|
||||
try:
|
||||
ret.update(self.tops[fun](opts=self.opts, grains=grains))
|
||||
except Exception as exc:
|
||||
# If anything happens in the top generation, log it and move on
|
||||
log.error(
|
||||
u'Top function %s failed with error %s for minion %s',
|
||||
fun, exc, self.opts[u'id']
|
||||
'Top function %s failed with error %s for minion %s',
|
||||
fun, exc, self.opts['id']
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def lowstate_file_refs(chunks, extras=u''):
|
||||
def lowstate_file_refs(chunks, extras=''):
|
||||
'''
|
||||
Create a list of file ref objects to reconcile
|
||||
'''
|
||||
|
@ -119,12 +119,12 @@ def lowstate_file_refs(chunks, extras=u''):
|
|||
for chunk in chunks:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
saltenv = u'base'
|
||||
saltenv = 'base'
|
||||
crefs = []
|
||||
for state in chunk:
|
||||
if state == u'__env__':
|
||||
if state == '__env__':
|
||||
saltenv = chunk[state]
|
||||
elif state.startswith(u'__'):
|
||||
elif state.startswith('__'):
|
||||
continue
|
||||
crefs.extend(salt_refs(chunk[state]))
|
||||
if crefs:
|
||||
|
@ -132,7 +132,7 @@ def lowstate_file_refs(chunks, extras=u''):
|
|||
refs[saltenv] = []
|
||||
refs[saltenv].append(crefs)
|
||||
if extras:
|
||||
extra_refs = extras.split(u',')
|
||||
extra_refs = extras.split(',')
|
||||
if extra_refs:
|
||||
for env in refs:
|
||||
for x in extra_refs:
|
||||
|
@ -144,7 +144,7 @@ def salt_refs(data, ret=None):
|
|||
'''
|
||||
Pull salt file references out of the states
|
||||
'''
|
||||
proto = u'salt://'
|
||||
proto = 'salt://'
|
||||
if ret is None:
|
||||
ret = []
|
||||
if isinstance(data, six.string_types):
|
||||
|
@ -166,38 +166,38 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
'''
|
||||
gendir = tempfile.mkdtemp()
|
||||
trans_tar = salt.utils.files.mkstemp()
|
||||
lowfn = os.path.join(gendir, u'lowstate.json')
|
||||
pillarfn = os.path.join(gendir, u'pillar.json')
|
||||
roster_grainsfn = os.path.join(gendir, u'roster_grains.json')
|
||||
lowfn = os.path.join(gendir, 'lowstate.json')
|
||||
pillarfn = os.path.join(gendir, 'pillar.json')
|
||||
roster_grainsfn = os.path.join(gendir, 'roster_grains.json')
|
||||
sync_refs = [
|
||||
[salt.utils.url.create(u'_modules')],
|
||||
[salt.utils.url.create(u'_states')],
|
||||
[salt.utils.url.create(u'_grains')],
|
||||
[salt.utils.url.create(u'_renderers')],
|
||||
[salt.utils.url.create(u'_returners')],
|
||||
[salt.utils.url.create(u'_output')],
|
||||
[salt.utils.url.create(u'_utils')],
|
||||
[salt.utils.url.create('_modules')],
|
||||
[salt.utils.url.create('_states')],
|
||||
[salt.utils.url.create('_grains')],
|
||||
[salt.utils.url.create('_renderers')],
|
||||
[salt.utils.url.create('_returners')],
|
||||
[salt.utils.url.create('_output')],
|
||||
[salt.utils.url.create('_utils')],
|
||||
]
|
||||
with salt.utils.files.fopen(lowfn, u'w+') as fp_:
|
||||
with salt.utils.files.fopen(lowfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(chunks))
|
||||
if pillar:
|
||||
with salt.utils.files.fopen(pillarfn, u'w+') as fp_:
|
||||
with salt.utils.files.fopen(pillarfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(pillar))
|
||||
if roster_grains:
|
||||
with salt.utils.files.fopen(roster_grainsfn, u'w+') as fp_:
|
||||
with salt.utils.files.fopen(roster_grainsfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(roster_grains))
|
||||
|
||||
if id_ is None:
|
||||
id_ = u''
|
||||
id_ = ''
|
||||
try:
|
||||
cachedir = os.path.join(u'salt-ssh', id_).rstrip(os.sep)
|
||||
cachedir = os.path.join('salt-ssh', id_).rstrip(os.sep)
|
||||
except AttributeError:
|
||||
# Minion ID should always be a str, but don't let an int break this
|
||||
cachedir = os.path.join(u'salt-ssh', str(id_)).rstrip(os.sep)
|
||||
cachedir = os.path.join('salt-ssh', str(id_)).rstrip(os.sep)
|
||||
|
||||
for saltenv in file_refs:
|
||||
# Location where files in this saltenv will be cached
|
||||
cache_dest_root = os.path.join(cachedir, u'files', saltenv)
|
||||
cache_dest_root = os.path.join(cachedir, 'files', saltenv)
|
||||
file_refs[saltenv].extend(sync_refs)
|
||||
env_root = os.path.join(gendir, saltenv)
|
||||
if not os.path.isdir(env_root):
|
||||
|
@ -209,7 +209,7 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
try:
|
||||
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
path = u''
|
||||
path = ''
|
||||
if path:
|
||||
tgt = os.path.join(env_root, short)
|
||||
tgt_dir = os.path.dirname(tgt)
|
||||
|
@ -220,10 +220,10 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
try:
|
||||
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
files = u''
|
||||
files = ''
|
||||
if files:
|
||||
for filename in files:
|
||||
fn = filename[len(file_client.get_cachedir(cache_dest)):].strip(u'/')
|
||||
fn = filename[len(file_client.get_cachedir(cache_dest)):].strip('/')
|
||||
tgt = os.path.join(
|
||||
env_root,
|
||||
short,
|
||||
|
@ -240,7 +240,7 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
except OSError:
|
||||
cwd = None
|
||||
os.chdir(gendir)
|
||||
with closing(tarfile.open(trans_tar, u'w:gz')) as tfp:
|
||||
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp:
|
||||
for root, dirs, files in os.walk(gendir):
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
|
|
|
@ -42,8 +42,8 @@ class FunctionWrapper(object):
|
|||
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
||||
self.opts = opts
|
||||
self.mods = mods if isinstance(mods, dict) else {}
|
||||
self.kwargs = {u'id_': id_,
|
||||
u'host': host}
|
||||
self.kwargs = {'id_': id_,
|
||||
'host': host}
|
||||
self.fsclient = fsclient
|
||||
self.kwargs.update(kwargs)
|
||||
self.aliases = aliases
|
||||
|
@ -67,14 +67,14 @@ class FunctionWrapper(object):
|
|||
'''
|
||||
Return the function call to simulate the salt local lookup system
|
||||
'''
|
||||
if u'.' not in cmd and not self.cmd_prefix:
|
||||
if '.' not in cmd and not self.cmd_prefix:
|
||||
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
|
||||
# containing only 'cmd' module calls, in that case. Create a new
|
||||
# FunctionWrapper which contains the prefix 'cmd' (again, for the
|
||||
# salt.cmd.run example)
|
||||
kwargs = copy.deepcopy(self.kwargs)
|
||||
id_ = kwargs.pop(u'id_')
|
||||
host = kwargs.pop(u'host')
|
||||
id_ = kwargs.pop('id_')
|
||||
host = kwargs.pop('host')
|
||||
return FunctionWrapper(self.opts,
|
||||
id_,
|
||||
host,
|
||||
|
@ -90,7 +90,7 @@ class FunctionWrapper(object):
|
|||
# We're in an inner FunctionWrapper as created by the code block
|
||||
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
||||
# then evaluate as normal
|
||||
cmd = u'{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
return self.wfuncs[cmd]
|
||||
|
@ -104,7 +104,7 @@ class FunctionWrapper(object):
|
|||
'''
|
||||
argv = [cmd]
|
||||
argv.extend([json.dumps(arg) for arg in args])
|
||||
argv.extend([u'{0}={1}'.format(key, json.dumps(val)) for key, val in six.iteritems(kwargs)])
|
||||
argv.extend(['{0}={1}'.format(key, json.dumps(val)) for key, val in six.iteritems(kwargs)])
|
||||
single = salt.client.ssh.Single(
|
||||
self.opts,
|
||||
argv,
|
||||
|
@ -115,21 +115,21 @@ class FunctionWrapper(object):
|
|||
**self.kwargs
|
||||
)
|
||||
stdout, stderr, retcode = single.cmd_block()
|
||||
if stderr.count(u'Permission Denied'):
|
||||
return {u'_error': u'Permission Denied',
|
||||
u'stdout': stdout,
|
||||
u'stderr': stderr,
|
||||
u'retcode': retcode}
|
||||
if stderr.count('Permission Denied'):
|
||||
return {'_error': 'Permission Denied',
|
||||
'stdout': stdout,
|
||||
'stderr': stderr,
|
||||
'retcode': retcode}
|
||||
try:
|
||||
ret = json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
if len(ret) < 2 and u'local' in ret:
|
||||
ret = ret[u'local']
|
||||
ret = ret.get(u'return', {})
|
||||
if len(ret) < 2 and 'local' in ret:
|
||||
ret = ret['local']
|
||||
ret = ret.get('return', {})
|
||||
except ValueError:
|
||||
ret = {u'_error': u'Failed to return clean data',
|
||||
u'stderr': stderr,
|
||||
u'stdout': stdout,
|
||||
u'retcode': retcode}
|
||||
ret = {'_error': 'Failed to return clean data',
|
||||
'stderr': stderr,
|
||||
'stdout': stdout,
|
||||
'retcode': retcode}
|
||||
return ret
|
||||
return caller
|
||||
|
||||
|
@ -137,18 +137,18 @@ class FunctionWrapper(object):
|
|||
'''
|
||||
Set aliases for functions
|
||||
'''
|
||||
if u'.' not in cmd and not self.cmd_prefix:
|
||||
if '.' not in cmd and not self.cmd_prefix:
|
||||
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
|
||||
# containing only 'cmd' module calls, in that case. We don't
|
||||
# support assigning directly to prefixes in this way
|
||||
raise KeyError(u'Cannot assign to module key {0} in the '
|
||||
u'FunctionWrapper'.format(cmd))
|
||||
raise KeyError('Cannot assign to module key {0} in the '
|
||||
'FunctionWrapper'.format(cmd))
|
||||
|
||||
if self.cmd_prefix:
|
||||
# We're in an inner FunctionWrapper as created by the first code
|
||||
# block in __getitem__. Reconstruct the original cmd in the form
|
||||
# 'cmd.run' and then evaluate as normal
|
||||
cmd = u'{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
self.wfuncs[cmd] = value
|
||||
|
|
|
@ -17,44 +17,44 @@ import salt.syspaths as syspaths
|
|||
from salt.ext import six
|
||||
|
||||
# Set up the default values for all systems
|
||||
DEFAULTS = {u'mongo.db': u'salt',
|
||||
u'mongo.host': u'salt',
|
||||
u'mongo.password': u'',
|
||||
u'mongo.port': 27017,
|
||||
u'mongo.user': u'',
|
||||
u'redis.db': u'0',
|
||||
u'redis.host': u'salt',
|
||||
u'redis.port': 6379,
|
||||
u'test.foo': u'unconfigured',
|
||||
u'ca.cert_base_path': u'/etc/pki',
|
||||
u'solr.cores': [],
|
||||
u'solr.host': u'localhost',
|
||||
u'solr.port': u'8983',
|
||||
u'solr.baseurl': u'/solr',
|
||||
u'solr.type': u'master',
|
||||
u'solr.request_timeout': None,
|
||||
u'solr.init_script': u'/etc/rc.d/solr',
|
||||
u'solr.dih.import_options': {u'clean': False, u'optimize': True,
|
||||
u'commit': True, u'verbose': False},
|
||||
u'solr.backup_path': None,
|
||||
u'solr.num_backups': 1,
|
||||
u'poudriere.config': u'/usr/local/etc/poudriere.conf',
|
||||
u'poudriere.config_dir': u'/usr/local/etc/poudriere.d',
|
||||
u'ldap.server': u'localhost',
|
||||
u'ldap.port': u'389',
|
||||
u'ldap.tls': False,
|
||||
u'ldap.scope': 2,
|
||||
u'ldap.attrs': None,
|
||||
u'ldap.binddn': u'',
|
||||
u'ldap.bindpw': u'',
|
||||
u'hosts.file': u'/etc/hosts',
|
||||
u'aliases.file': u'/etc/aliases',
|
||||
u'virt.images': os.path.join(syspaths.SRV_ROOT_DIR, u'salt-images'),
|
||||
u'virt.tunnel': False,
|
||||
DEFAULTS = {'mongo.db': 'salt',
|
||||
'mongo.host': 'salt',
|
||||
'mongo.password': '',
|
||||
'mongo.port': 27017,
|
||||
'mongo.user': '',
|
||||
'redis.db': '0',
|
||||
'redis.host': 'salt',
|
||||
'redis.port': 6379,
|
||||
'test.foo': 'unconfigured',
|
||||
'ca.cert_base_path': '/etc/pki',
|
||||
'solr.cores': [],
|
||||
'solr.host': 'localhost',
|
||||
'solr.port': '8983',
|
||||
'solr.baseurl': '/solr',
|
||||
'solr.type': 'master',
|
||||
'solr.request_timeout': None,
|
||||
'solr.init_script': '/etc/rc.d/solr',
|
||||
'solr.dih.import_options': {'clean': False, 'optimize': True,
|
||||
'commit': True, 'verbose': False},
|
||||
'solr.backup_path': None,
|
||||
'solr.num_backups': 1,
|
||||
'poudriere.config': '/usr/local/etc/poudriere.conf',
|
||||
'poudriere.config_dir': '/usr/local/etc/poudriere.d',
|
||||
'ldap.server': 'localhost',
|
||||
'ldap.port': '389',
|
||||
'ldap.tls': False,
|
||||
'ldap.scope': 2,
|
||||
'ldap.attrs': None,
|
||||
'ldap.binddn': '',
|
||||
'ldap.bindpw': '',
|
||||
'hosts.file': '/etc/hosts',
|
||||
'aliases.file': '/etc/aliases',
|
||||
'virt.images': os.path.join(syspaths.SRV_ROOT_DIR, 'salt-images'),
|
||||
'virt.tunnel': False,
|
||||
}
|
||||
|
||||
|
||||
def backup_mode(backup=u''):
|
||||
def backup_mode(backup=''):
|
||||
'''
|
||||
Return the backup mode
|
||||
|
||||
|
@ -66,7 +66,7 @@ def backup_mode(backup=u''):
|
|||
'''
|
||||
if backup:
|
||||
return backup
|
||||
return option(u'backup_mode')
|
||||
return option('backup_mode')
|
||||
|
||||
|
||||
def manage_mode(mode):
|
||||
|
@ -97,14 +97,14 @@ def valid_fileproto(uri):
|
|||
salt '*' config.valid_fileproto salt://path/to/file
|
||||
'''
|
||||
try:
|
||||
return bool(re.match(u'^(?:salt|https?|ftp)://', uri))
|
||||
return bool(re.match('^(?:salt|https?|ftp)://', uri))
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def option(
|
||||
value,
|
||||
default=u'',
|
||||
default='',
|
||||
omit_opts=False,
|
||||
omit_master=False,
|
||||
omit_pillar=False):
|
||||
|
@ -121,8 +121,8 @@ def option(
|
|||
if value in __opts__:
|
||||
return __opts__[value]
|
||||
if not omit_master:
|
||||
if value in __pillar__.get(u'master', {}):
|
||||
return __pillar__[u'master'][value]
|
||||
if value in __pillar__.get('master', {}):
|
||||
return __pillar__['master'][value]
|
||||
if not omit_pillar:
|
||||
if value in __pillar__:
|
||||
return __pillar__[value]
|
||||
|
@ -132,7 +132,7 @@ def option(
|
|||
|
||||
|
||||
def merge(value,
|
||||
default=u'',
|
||||
default='',
|
||||
omit_opts=False,
|
||||
omit_master=False,
|
||||
omit_pillar=False):
|
||||
|
@ -155,8 +155,8 @@ def merge(value,
|
|||
if isinstance(ret, six.string_types):
|
||||
return ret
|
||||
if not omit_master:
|
||||
if value in __pillar__.get(u'master', {}):
|
||||
tmp = __pillar__[u'master'][value]
|
||||
if value in __pillar__.get('master', {}):
|
||||
tmp = __pillar__['master'][value]
|
||||
if ret is None:
|
||||
ret = tmp
|
||||
if isinstance(ret, six.string_types):
|
||||
|
@ -185,7 +185,7 @@ def merge(value,
|
|||
return ret or default
|
||||
|
||||
|
||||
def get(key, default=u''):
|
||||
def get(key, default=''):
|
||||
'''
|
||||
.. versionadded: 0.14.0
|
||||
|
||||
|
@ -216,17 +216,17 @@ def get(key, default=u''):
|
|||
|
||||
salt '*' config.get pkg:apache
|
||||
'''
|
||||
ret = salt.utils.data.traverse_dict_and_list(__opts__, key, u'_|-')
|
||||
if ret != u'_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__opts__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__grains__, key, u'_|-')
|
||||
if ret != u'_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__grains__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, u'_|-')
|
||||
if ret != u'_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__.get(u'master', {}), key, u'_|-')
|
||||
if ret != u'_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__.get('master', {}), key, '_|-')
|
||||
if ret != '_|-':
|
||||
return ret
|
||||
return default
|
||||
|
||||
|
@ -243,10 +243,10 @@ def dot_vals(value):
|
|||
salt '*' config.dot_vals host
|
||||
'''
|
||||
ret = {}
|
||||
for key, val in six.iteritems(__pillar__.get(u'master', {})):
|
||||
if key.startswith(u'{0}.'.format(value)):
|
||||
for key, val in six.iteritems(__pillar__.get('master', {})):
|
||||
if key.startswith('{0}.'.format(value)):
|
||||
ret[key] = val
|
||||
for key, val in six.iteritems(__opts__):
|
||||
if key.startswith(u'{0}.'.format(value)):
|
||||
if key.startswith('{0}.'.format(value)):
|
||||
ret[key] = val
|
||||
return ret
|
||||
|
|
|
@ -18,7 +18,7 @@ log = logging.getLogger(__name__)
|
|||
|
||||
def get_file(path,
|
||||
dest,
|
||||
saltenv=u'base',
|
||||
saltenv='base',
|
||||
makedirs=False,
|
||||
template=None,
|
||||
gzip=None):
|
||||
|
@ -31,83 +31,83 @@ def get_file(path,
|
|||
cp.get_file. The argument is only accepted for interface compatibility.
|
||||
'''
|
||||
if gzip is not None:
|
||||
log.warning(u'The gzip argument to cp.get_file in salt-ssh is '
|
||||
u'unsupported')
|
||||
log.warning('The gzip argument to cp.get_file in salt-ssh is '
|
||||
'unsupported')
|
||||
|
||||
if template is not None:
|
||||
(path, dest) = _render_filenames(path, dest, saltenv, template)
|
||||
|
||||
src = __context__[u'fileclient'].cache_file(
|
||||
src = __context__['fileclient'].cache_file(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
u'',
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest, makedirs)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def get_dir(path, dest, saltenv=u'base'):
|
||||
def get_dir(path, dest, saltenv='base'):
|
||||
'''
|
||||
Transfer a directory down
|
||||
'''
|
||||
src = __context__[u'fileclient'].cache_dir(
|
||||
src = __context__['fileclient'].cache_dir(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
|
||||
src = u' '.join(src)
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
src = ' '.join(src)
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
u'',
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def get_url(path, dest, saltenv=u'base'):
|
||||
def get_url(path, dest, saltenv='base'):
|
||||
'''
|
||||
retrieve a URL
|
||||
'''
|
||||
src = __context__[u'fileclient'].cache_file(
|
||||
src = __context__['fileclient'].cache_file(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
u'',
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def list_states(saltenv=u'base'):
|
||||
def list_states(saltenv='base'):
|
||||
'''
|
||||
List all the available state modules in an environment
|
||||
'''
|
||||
return __context__[u'fileclient'].list_states(saltenv)
|
||||
return __context__['fileclient'].list_states(saltenv)
|
||||
|
||||
|
||||
def list_master(saltenv=u'base', prefix=u''):
|
||||
def list_master(saltenv='base', prefix=''):
|
||||
'''
|
||||
List all of the files stored on the master
|
||||
'''
|
||||
return __context__[u'fileclient'].file_list(saltenv, prefix)
|
||||
return __context__['fileclient'].file_list(saltenv, prefix)
|
||||
|
||||
|
||||
def list_master_dirs(saltenv=u'base', prefix=u''):
|
||||
def list_master_dirs(saltenv='base', prefix=''):
|
||||
'''
|
||||
List all of the directories stored on the master
|
||||
'''
|
||||
return __context__[u'fileclient'].dir_list(saltenv, prefix)
|
||||
return __context__['fileclient'].dir_list(saltenv, prefix)
|
||||
|
||||
|
||||
def list_master_symlinks(saltenv=u'base', prefix=u''):
|
||||
def list_master_symlinks(saltenv='base', prefix=''):
|
||||
'''
|
||||
List all of the symlinks stored on the master
|
||||
'''
|
||||
return __context__[u'fileclient'].symlink_list(saltenv, prefix)
|
||||
return __context__['fileclient'].symlink_list(saltenv, prefix)
|
||||
|
||||
|
||||
def _render_filenames(path, dest, saltenv, template):
|
||||
|
@ -122,16 +122,16 @@ def _render_filenames(path, dest, saltenv, template):
|
|||
# render the path as a template using path_template_engine as the engine
|
||||
if template not in salt.utils.templates.TEMPLATE_REGISTRY:
|
||||
raise CommandExecutionError(
|
||||
u'Attempted to render file paths with unavailable engine '
|
||||
u'{0}'.format(template)
|
||||
'Attempted to render file paths with unavailable engine '
|
||||
'{0}'.format(template)
|
||||
)
|
||||
|
||||
kwargs = {}
|
||||
kwargs[u'salt'] = __salt__
|
||||
kwargs[u'pillar'] = __pillar__
|
||||
kwargs[u'grains'] = __grains__
|
||||
kwargs[u'opts'] = __opts__
|
||||
kwargs[u'saltenv'] = saltenv
|
||||
kwargs['salt'] = __salt__
|
||||
kwargs['pillar'] = __pillar__
|
||||
kwargs['grains'] = __grains__
|
||||
kwargs['opts'] = __opts__
|
||||
kwargs['saltenv'] = saltenv
|
||||
|
||||
def _render(contents):
|
||||
'''
|
||||
|
@ -140,7 +140,7 @@ def _render_filenames(path, dest, saltenv, template):
|
|||
'''
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.files.fopen(tmp_path_fn, u'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
|
@ -148,15 +148,15 @@ def _render_filenames(path, dest, saltenv, template):
|
|||
**kwargs
|
||||
)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data[u'result']:
|
||||
if not data['result']:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
u'Failed to render file path with error: {0}'.format(
|
||||
data[u'data']
|
||||
'Failed to render file path with error: {0}'.format(
|
||||
data['data']
|
||||
)
|
||||
)
|
||||
else:
|
||||
return data[u'data']
|
||||
return data['data']
|
||||
|
||||
path = _render(path)
|
||||
dest = _render(dest)
|
||||
|
|
|
@ -29,28 +29,28 @@ def _serial_sanitizer(instr):
|
|||
'''
|
||||
length = len(instr)
|
||||
index = int(math.floor(length * .75))
|
||||
return u'{0}{1}'.format(instr[:index], u'X' * (length - index))
|
||||
return '{0}{1}'.format(instr[:index], 'X' * (length - index))
|
||||
|
||||
|
||||
_FQDN_SANITIZER = lambda x: u'MINION.DOMAINNAME'
|
||||
_HOSTNAME_SANITIZER = lambda x: u'MINION'
|
||||
_DOMAINNAME_SANITIZER = lambda x: u'DOMAINNAME'
|
||||
_FQDN_SANITIZER = lambda x: 'MINION.DOMAINNAME'
|
||||
_HOSTNAME_SANITIZER = lambda x: 'MINION'
|
||||
_DOMAINNAME_SANITIZER = lambda x: 'DOMAINNAME'
|
||||
|
||||
|
||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||
# is used when the 'sanitize' flag is given.
|
||||
_SANITIZERS = {
|
||||
u'serialnumber': _serial_sanitizer,
|
||||
u'domain': _DOMAINNAME_SANITIZER,
|
||||
u'fqdn': _FQDN_SANITIZER,
|
||||
u'id': _FQDN_SANITIZER,
|
||||
u'host': _HOSTNAME_SANITIZER,
|
||||
u'localhost': _HOSTNAME_SANITIZER,
|
||||
u'nodename': _HOSTNAME_SANITIZER,
|
||||
'serialnumber': _serial_sanitizer,
|
||||
'domain': _DOMAINNAME_SANITIZER,
|
||||
'fqdn': _FQDN_SANITIZER,
|
||||
'id': _FQDN_SANITIZER,
|
||||
'host': _HOSTNAME_SANITIZER,
|
||||
'localhost': _HOSTNAME_SANITIZER,
|
||||
'nodename': _HOSTNAME_SANITIZER,
|
||||
}
|
||||
|
||||
|
||||
def get(key, default=u'', delimiter=DEFAULT_TARGET_DELIM, ordered=True):
|
||||
def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True):
|
||||
'''
|
||||
Attempt to retrieve the named value from grains, if the named value is not
|
||||
available return the passed default. The default return is an empty string.
|
||||
|
@ -154,7 +154,7 @@ def item(*args, **kwargs):
|
|||
ret[arg] = __grains__[arg]
|
||||
except KeyError:
|
||||
pass
|
||||
if salt.utils.data.is_true(kwargs.get(u'sanitize')):
|
||||
if salt.utils.data.is_true(kwargs.get('sanitize')):
|
||||
for arg, func in six.iteritems(_SANITIZERS):
|
||||
if arg in ret:
|
||||
ret[arg] = func(ret[arg])
|
||||
|
@ -175,9 +175,9 @@ def ls(): # pylint: disable=C0103
|
|||
|
||||
|
||||
def filter_by(lookup_dict,
|
||||
grain=u'os_family',
|
||||
grain='os_family',
|
||||
merge=None,
|
||||
default=u'default',
|
||||
default='default',
|
||||
base=None):
|
||||
'''
|
||||
.. versionadded:: 0.17.0
|
||||
|
@ -271,12 +271,12 @@ def filter_by(lookup_dict,
|
|||
|
||||
elif isinstance(base_values, collections.Mapping):
|
||||
if not isinstance(ret, collections.Mapping):
|
||||
raise SaltException(u'filter_by default and look-up values must both be dictionaries.')
|
||||
raise SaltException('filter_by default and look-up values must both be dictionaries.')
|
||||
ret = salt.utils.dictupdate.update(copy.deepcopy(base_values), ret)
|
||||
|
||||
if merge:
|
||||
if not isinstance(merge, collections.Mapping):
|
||||
raise SaltException(u'filter_by merge argument must be a dictionary.')
|
||||
raise SaltException('filter_by merge argument must be a dictionary.')
|
||||
else:
|
||||
if ret is None:
|
||||
ret = merge
|
||||
|
|
|
@ -14,7 +14,7 @@ import copy
|
|||
import salt.client.ssh
|
||||
|
||||
|
||||
def get(tgt, fun, tgt_type=u'glob', roster=u'flat'):
|
||||
def get(tgt, fun, tgt_type='glob', roster='flat'):
|
||||
'''
|
||||
Get data from the mine based on the target, function and tgt_type
|
||||
|
||||
|
@ -36,15 +36,15 @@ def get(tgt, fun, tgt_type=u'glob', roster=u'flat'):
|
|||
salt-ssh '*' mine.get '192.168.5.0' network.ipaddrs roster=scan
|
||||
'''
|
||||
# Set up opts for the SSH object
|
||||
opts = copy.deepcopy(__context__[u'master_opts'])
|
||||
opts = copy.deepcopy(__context__['master_opts'])
|
||||
minopts = copy.deepcopy(__opts__)
|
||||
opts.update(minopts)
|
||||
if roster:
|
||||
opts[u'roster'] = roster
|
||||
opts[u'argv'] = [fun]
|
||||
opts[u'selected_target_option'] = tgt_type
|
||||
opts[u'tgt'] = tgt
|
||||
opts[u'arg'] = []
|
||||
opts['roster'] = roster
|
||||
opts['argv'] = [fun]
|
||||
opts['selected_target_option'] = tgt_type
|
||||
opts['tgt'] = tgt
|
||||
opts['arg'] = []
|
||||
|
||||
# Create the SSH object to handle the actual call
|
||||
ssh = salt.client.ssh.SSH(opts)
|
||||
|
@ -56,8 +56,8 @@ def get(tgt, fun, tgt_type=u'glob', roster=u'flat'):
|
|||
|
||||
cret = {}
|
||||
for host in rets:
|
||||
if u'return' in rets[host]:
|
||||
cret[host] = rets[host][u'return']
|
||||
if 'return' in rets[host]:
|
||||
cret[host] = rets[host]['return']
|
||||
else:
|
||||
cret[host] = rets[host]
|
||||
return cret
|
||||
|
|
|
@ -14,7 +14,7 @@ import salt.utils.dictupdate
|
|||
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||
|
||||
|
||||
def get(key, default=u'', merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
||||
def get(key, default='', merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
||||
'''
|
||||
.. versionadded:: 0.14
|
||||
|
||||
|
@ -132,10 +132,10 @@ def keys(key, delimiter=DEFAULT_TARGET_DELIM):
|
|||
__pillar__, key, KeyError, delimiter)
|
||||
|
||||
if ret is KeyError:
|
||||
raise KeyError(u"Pillar key not found: {0}".format(key))
|
||||
raise KeyError("Pillar key not found: {0}".format(key))
|
||||
|
||||
if not isinstance(ret, dict):
|
||||
raise ValueError(u"Pillar value in key {0} is not a dict".format(key))
|
||||
raise ValueError("Pillar value in key {0} is not a dict".format(key))
|
||||
|
||||
return ret.keys()
|
||||
|
||||
|
|
|
@ -26,10 +26,10 @@ log = logging.getLogger(__name__)
|
|||
def _publish(tgt,
|
||||
fun,
|
||||
arg=None,
|
||||
tgt_type=u'glob',
|
||||
returner=u'',
|
||||
tgt_type='glob',
|
||||
returner='',
|
||||
timeout=None,
|
||||
form=u'clean',
|
||||
form='clean',
|
||||
roster=None):
|
||||
'''
|
||||
Publish a command "from the minion out to other minions". In reality, the
|
||||
|
@ -55,13 +55,13 @@ def _publish(tgt,
|
|||
|
||||
salt-ssh system.example.com publish.publish '*' cmd.run 'ls -la /tmp'
|
||||
'''
|
||||
if fun.startswith(u'publish.'):
|
||||
log.info(u'Cannot publish publish calls. Returning {}')
|
||||
if fun.startswith('publish.'):
|
||||
log.info('Cannot publish publish calls. Returning {}')
|
||||
return {}
|
||||
|
||||
# TODO: implement returners? Do they make sense for salt-ssh calls?
|
||||
if returner:
|
||||
log.warning(u'Returners currently not supported in salt-ssh publish')
|
||||
log.warning('Returners currently not supported in salt-ssh publish')
|
||||
|
||||
# Make sure args have been processed
|
||||
if arg is None:
|
||||
|
@ -74,17 +74,17 @@ def _publish(tgt,
|
|||
arg = []
|
||||
|
||||
# Set up opts for the SSH object
|
||||
opts = copy.deepcopy(__context__[u'master_opts'])
|
||||
opts = copy.deepcopy(__context__['master_opts'])
|
||||
minopts = copy.deepcopy(__opts__)
|
||||
opts.update(minopts)
|
||||
if roster:
|
||||
opts[u'roster'] = roster
|
||||
opts['roster'] = roster
|
||||
if timeout:
|
||||
opts[u'timeout'] = timeout
|
||||
opts[u'argv'] = [fun] + arg
|
||||
opts[u'selected_target_option'] = tgt_type
|
||||
opts[u'tgt'] = tgt
|
||||
opts[u'arg'] = arg
|
||||
opts['timeout'] = timeout
|
||||
opts['argv'] = [fun] + arg
|
||||
opts['selected_target_option'] = tgt_type
|
||||
opts['tgt'] = tgt
|
||||
opts['arg'] = arg
|
||||
|
||||
# Create the SSH object to handle the actual call
|
||||
ssh = salt.client.ssh.SSH(opts)
|
||||
|
@ -94,11 +94,11 @@ def _publish(tgt,
|
|||
for ret in ssh.run_iter():
|
||||
rets.update(ret)
|
||||
|
||||
if form == u'clean':
|
||||
if form == 'clean':
|
||||
cret = {}
|
||||
for host in rets:
|
||||
if u'return' in rets[host]:
|
||||
cret[host] = rets[host][u'return']
|
||||
if 'return' in rets[host]:
|
||||
cret[host] = rets[host]['return']
|
||||
else:
|
||||
cret[host] = rets[host]
|
||||
return cret
|
||||
|
@ -109,8 +109,8 @@ def _publish(tgt,
|
|||
def publish(tgt,
|
||||
fun,
|
||||
arg=None,
|
||||
tgt_type=u'glob',
|
||||
returner=u'',
|
||||
tgt_type='glob',
|
||||
returner='',
|
||||
timeout=5,
|
||||
roster=None,
|
||||
expr_form=None):
|
||||
|
@ -176,10 +176,10 @@ def publish(tgt,
|
|||
# performing the cleanup on this deprecation.
|
||||
if expr_form is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'the target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'the target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = expr_form
|
||||
|
||||
|
@ -189,15 +189,15 @@ def publish(tgt,
|
|||
tgt_type=tgt_type,
|
||||
returner=returner,
|
||||
timeout=timeout,
|
||||
form=u'clean',
|
||||
form='clean',
|
||||
roster=roster)
|
||||
|
||||
|
||||
def full_data(tgt,
|
||||
fun,
|
||||
arg=None,
|
||||
tgt_type=u'glob',
|
||||
returner=u'',
|
||||
tgt_type='glob',
|
||||
returner='',
|
||||
timeout=5,
|
||||
roster=None,
|
||||
expr_form=None):
|
||||
|
@ -226,10 +226,10 @@ def full_data(tgt,
|
|||
# performing the cleanup on this deprecation.
|
||||
if expr_form is not None:
|
||||
salt.utils.versions.warn_until(
|
||||
u'Fluorine',
|
||||
u'the target type should be passed using the \'tgt_type\' '
|
||||
u'argument instead of \'expr_form\'. Support for using '
|
||||
u'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
'Fluorine',
|
||||
'the target type should be passed using the \'tgt_type\' '
|
||||
'argument instead of \'expr_form\'. Support for using '
|
||||
'\'expr_form\' will be removed in Salt Fluorine.'
|
||||
)
|
||||
tgt_type = expr_form
|
||||
|
||||
|
@ -239,7 +239,7 @@ def full_data(tgt,
|
|||
tgt_type=tgt_type,
|
||||
returner=returner,
|
||||
timeout=timeout,
|
||||
form=u'full',
|
||||
form='full',
|
||||
roster=roster)
|
||||
|
||||
|
||||
|
@ -262,5 +262,5 @@ def runner(fun, arg=None, timeout=5):
|
|||
arg = []
|
||||
|
||||
# Create and run the runner
|
||||
runner = salt.runner.RunnerClient(__opts__[u'__master_opts__'])
|
||||
runner = salt.runner.RunnerClient(__opts__['__master_opts__'])
|
||||
return runner.cmd(fun, arg)
|
||||
|
|
|
@ -29,7 +29,7 @@ import salt.log
|
|||
from salt.ext import six
|
||||
|
||||
__func_alias__ = {
|
||||
u'apply_': u'apply'
|
||||
'apply_': 'apply'
|
||||
}
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -42,35 +42,35 @@ def _merge_extra_filerefs(*args):
|
|||
for arg in args:
|
||||
if isinstance(arg, six.string_types):
|
||||
if arg:
|
||||
ret.extend(arg.split(u','))
|
||||
ret.extend(arg.split(','))
|
||||
elif isinstance(arg, list):
|
||||
if arg:
|
||||
ret.extend(arg)
|
||||
return u','.join(ret)
|
||||
return ','.join(ret)
|
||||
|
||||
|
||||
def sls(mods, saltenv=u'base', test=None, exclude=None, **kwargs):
|
||||
def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
|
||||
'''
|
||||
Create the seed file for a state.sls run
|
||||
'''
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__opts__['grains'] = __grains__
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
if isinstance(mods, six.string_types):
|
||||
mods = mods.split(u',')
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
if exclude:
|
||||
if isinstance(exclude, six.string_types):
|
||||
exclude = exclude.split(u',')
|
||||
if u'__exclude__' in high_data:
|
||||
high_data[u'__exclude__'].extend(exclude)
|
||||
exclude = exclude.split(',')
|
||||
if '__exclude__' in high_data:
|
||||
high_data['__exclude__'].extend(exclude)
|
||||
else:
|
||||
high_data[u'__exclude__'] = exclude
|
||||
high_data['__exclude__'] = exclude
|
||||
high_data, ext_errors = st_.state.reconcile_extend(high_data)
|
||||
errors += ext_errors
|
||||
errors += st_.state.verify_high(high_data)
|
||||
|
@ -87,38 +87,38 @@ def sls(mods, saltenv=u'base', test=None, exclude=None, **kwargs):
|
|||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__['thin_dir'],
|
||||
test,
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
||||
# Clean up our tar
|
||||
|
@ -131,7 +131,7 @@ def sls(mods, saltenv=u'base', test=None, exclude=None, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
@ -150,51 +150,51 @@ def low(data, **kwargs):
|
|||
salt '*' state.low '{"state": "pkg", "fun": "installed", "name": "vi"}'
|
||||
'''
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
chunks = [data]
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
for chunk in chunks:
|
||||
chunk[u'__id__'] = chunk[u'name'] if not chunk.get(u'__id__') else chunk[u'__id__']
|
||||
chunk['__id__'] = chunk['name'] if not chunk.get('__id__') else chunk['__id__']
|
||||
err = st_.state.verify_data(data)
|
||||
if err:
|
||||
return err
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
cmd = u'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
cmd = 'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
|
||||
__opts__['thin_dir'],
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
||||
# Clean up our tar
|
||||
|
@ -207,7 +207,7 @@ def low(data, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
@ -225,49 +225,49 @@ def high(data, **kwargs):
|
|||
|
||||
salt '*' state.high '{"vim": {"pkg": ["installed"]}}'
|
||||
'''
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
chunks = st_.state.compile_high_data(data)
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
cmd = u'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
cmd = 'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
|
||||
__opts__['thin_dir'],
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
||||
# Clean up our tar
|
||||
|
@ -280,7 +280,7 @@ def high(data, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
@ -459,56 +459,56 @@ def highstate(test=None, **kwargs):
|
|||
salt '*' state.highstate exclude=sls_to_exclude
|
||||
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
|
||||
'''
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
chunks = st_.compile_low_chunks()
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
# Check for errors
|
||||
for chunk in chunks:
|
||||
if not isinstance(chunk, dict):
|
||||
__context__[u'retcode'] = 1
|
||||
__context__['retcode'] = 1
|
||||
return chunks
|
||||
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__['thin_dir'],
|
||||
test,
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
||||
# Clean up our tar
|
||||
|
@ -521,7 +521,7 @@ def highstate(test=None, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
@ -540,55 +540,55 @@ def top(topfn, test=None, **kwargs):
|
|||
salt '*' state.top reverse_top.sls exclude=sls_to_exclude
|
||||
salt '*' state.top reverse_top.sls exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
|
||||
'''
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
if salt.utils.args.test_mode(test=test, **kwargs):
|
||||
__opts__[u'test'] = True
|
||||
__opts__['test'] = True
|
||||
else:
|
||||
__opts__[u'test'] = __opts__.get(u'test', None)
|
||||
__opts__['test'] = __opts__.get('test', None)
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
st_.opts[u'state_top'] = os.path.join(u'salt://', topfn)
|
||||
__context__['fileclient'])
|
||||
st_.opts['state_top'] = os.path.join('salt://', topfn)
|
||||
chunks = st_.compile_low_chunks()
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__['thin_dir'],
|
||||
test,
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
||||
# Clean up our tar
|
||||
|
@ -601,7 +601,7 @@ def top(topfn, test=None, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
@ -618,12 +618,12 @@ def show_highstate():
|
|||
|
||||
salt '*' state.show_highstate
|
||||
'''
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
return st_.compile_highstate()
|
||||
|
||||
|
||||
|
@ -637,16 +637,16 @@ def show_lowstate():
|
|||
|
||||
salt '*' state.show_lowstate
|
||||
'''
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
return st_.compile_low_chunks()
|
||||
|
||||
|
||||
def show_sls(mods, saltenv=u'base', test=None, **kwargs):
|
||||
def show_sls(mods, saltenv='base', test=None, **kwargs):
|
||||
'''
|
||||
Display the state data from a specific sls or list of sls files on the
|
||||
master
|
||||
|
@ -657,20 +657,20 @@ def show_sls(mods, saltenv=u'base', test=None, **kwargs):
|
|||
|
||||
salt '*' state.show_sls core,edit.vim dev
|
||||
'''
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__opts__[u'grains'] = __grains__
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
__opts__['grains'] = __grains__
|
||||
opts = copy.copy(__opts__)
|
||||
if salt.utils.args.test_mode(test=test, **kwargs):
|
||||
opts[u'test'] = True
|
||||
opts['test'] = True
|
||||
else:
|
||||
opts[u'test'] = __opts__.get(u'test', None)
|
||||
opts['test'] = __opts__.get('test', None)
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
if isinstance(mods, six.string_types):
|
||||
mods = mods.split(u',')
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
high_data, ext_errors = st_.state.reconcile_extend(high_data)
|
||||
errors += ext_errors
|
||||
|
@ -686,7 +686,7 @@ def show_sls(mods, saltenv=u'base', test=None, **kwargs):
|
|||
return high_data
|
||||
|
||||
|
||||
def show_low_sls(mods, saltenv=u'base', test=None, **kwargs):
|
||||
def show_low_sls(mods, saltenv='base', test=None, **kwargs):
|
||||
'''
|
||||
Display the low state data from a specific sls or list of sls files on the
|
||||
master.
|
||||
|
@ -699,21 +699,21 @@ def show_low_sls(mods, saltenv=u'base', test=None, **kwargs):
|
|||
|
||||
salt '*' state.show_sls core,edit.vim dev
|
||||
'''
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__opts__[u'grains'] = __grains__
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
__opts__['grains'] = __grains__
|
||||
|
||||
opts = copy.copy(__opts__)
|
||||
if salt.utils.args.test_mode(test=test, **kwargs):
|
||||
opts[u'test'] = True
|
||||
opts['test'] = True
|
||||
else:
|
||||
opts[u'test'] = __opts__.get(u'test', None)
|
||||
opts['test'] = __opts__.get('test', None)
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
if isinstance(mods, six.string_types):
|
||||
mods = mods.split(u',')
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
high_data, ext_errors = st_.state.reconcile_extend(high_data)
|
||||
errors += ext_errors
|
||||
|
@ -740,12 +740,12 @@ def show_top():
|
|||
|
||||
salt '*' state.show_top
|
||||
'''
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
st_ = salt.client.ssh.state.SSHHighState(
|
||||
__opts__,
|
||||
__pillar__,
|
||||
__salt__,
|
||||
__context__[u'fileclient'])
|
||||
__context__['fileclient'])
|
||||
top_data = st_.get_top()
|
||||
errors = []
|
||||
errors += st_.verify_tops(top_data)
|
||||
|
@ -775,30 +775,30 @@ def single(fun, name, test=None, **kwargs):
|
|||
|
||||
'''
|
||||
st_kwargs = __salt__.kwargs
|
||||
__opts__[u'grains'] = __grains__
|
||||
__opts__['grains'] = __grains__
|
||||
|
||||
# state.fun -> [state, fun]
|
||||
comps = fun.split(u'.')
|
||||
comps = fun.split('.')
|
||||
if len(comps) < 2:
|
||||
__context__[u'retcode'] = 1
|
||||
return u'Invalid function passed'
|
||||
__context__['retcode'] = 1
|
||||
return 'Invalid function passed'
|
||||
|
||||
# Create the low chunk, using kwargs as a base
|
||||
kwargs.update({u'state': comps[0],
|
||||
u'fun': comps[1],
|
||||
u'__id__': name,
|
||||
u'name': name})
|
||||
kwargs.update({'state': comps[0],
|
||||
'fun': comps[1],
|
||||
'__id__': name,
|
||||
'name': name})
|
||||
|
||||
opts = copy.deepcopy(__opts__)
|
||||
|
||||
# Set test mode
|
||||
if salt.utils.args.test_mode(test=test, **kwargs):
|
||||
opts[u'test'] = True
|
||||
opts['test'] = True
|
||||
else:
|
||||
opts[u'test'] = __opts__.get(u'test', None)
|
||||
opts['test'] = __opts__.get('test', None)
|
||||
|
||||
# Get the override pillar data
|
||||
__pillar__.update(kwargs.get(u'pillar', {}))
|
||||
__pillar__.update(kwargs.get('pillar', {}))
|
||||
|
||||
# Create the State environment
|
||||
st_ = salt.client.ssh.state.SSHState(__opts__, __pillar__)
|
||||
|
@ -806,7 +806,7 @@ def single(fun, name, test=None, **kwargs):
|
|||
# Verify the low chunk
|
||||
err = st_.verify_data(kwargs)
|
||||
if err:
|
||||
__context__[u'retcode'] = 1
|
||||
__context__['retcode'] = 1
|
||||
return err
|
||||
|
||||
# Must be a list of low-chunks
|
||||
|
@ -817,46 +817,46 @@ def single(fun, name, test=None, **kwargs):
|
|||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get(u'extra_filerefs', u''),
|
||||
__opts__.get(u'extra_filerefs', u'')
|
||||
kwargs.get('extra_filerefs', ''),
|
||||
__opts__.get('extra_filerefs', '')
|
||||
)
|
||||
)
|
||||
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
|
||||
roster_grains = roster.opts[u'grains']
|
||||
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
|
||||
roster_grains = roster.opts['grains']
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__[u'fileclient'],
|
||||
__context__['fileclient'],
|
||||
chunks,
|
||||
file_refs,
|
||||
__pillar__,
|
||||
st_kwargs[u'id_'],
|
||||
st_kwargs['id_'],
|
||||
roster_grains)
|
||||
|
||||
# Create a hash so we can verify the tar on the target system
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__[u'hash_type'])
|
||||
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
|
||||
|
||||
# We use state.pkg to execute the "state package"
|
||||
cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__[u'thin_dir'],
|
||||
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
|
||||
__opts__['thin_dir'],
|
||||
test,
|
||||
trans_tar_sum,
|
||||
__opts__[u'hash_type'])
|
||||
__opts__['hash_type'])
|
||||
|
||||
# Create a salt-ssh Single object to actually do the ssh work
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
cmd,
|
||||
fsclient=__context__[u'fileclient'],
|
||||
fsclient=__context__['fileclient'],
|
||||
minion_opts=__salt__.minion_opts,
|
||||
**st_kwargs)
|
||||
|
||||
# Copy the tar down
|
||||
single.shell.send(
|
||||
trans_tar,
|
||||
u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
|
||||
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
|
||||
|
||||
# Run the state.pkg command on the target
|
||||
stdout, stderr, _ = single.cmd_block()
|
||||
|
@ -871,7 +871,7 @@ def single(fun, name, test=None, **kwargs):
|
|||
try:
|
||||
return json.loads(stdout, object_hook=salt.utils.data.decode_dict)
|
||||
except Exception as e:
|
||||
log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error("JSON Render failed for: %s\n%s", stdout, stderr)
|
||||
log.error(str(e))
|
||||
|
||||
# If for some reason the json load fails, return the stdout
|
||||
|
|
|
@ -3622,23 +3622,23 @@ def apply_minion_config(overrides=None,
|
|||
if overrides:
|
||||
opts.update(overrides)
|
||||
|
||||
if u'environment' in opts:
|
||||
if u'saltenv' in opts:
|
||||
if 'environment' in opts:
|
||||
if 'saltenv' in opts:
|
||||
log.warning(
|
||||
u'The \'saltenv\' and \'environment\' minion config options '
|
||||
u'cannot both be used. Ignoring \'environment\' in favor of '
|
||||
u'\'saltenv\'.',
|
||||
'The \'saltenv\' and \'environment\' minion config options '
|
||||
'cannot both be used. Ignoring \'environment\' in favor of '
|
||||
'\'saltenv\'.',
|
||||
)
|
||||
# Set environment to saltenv in case someone's custom module is
|
||||
# refrencing __opts__['environment']
|
||||
opts[u'environment'] = opts[u'saltenv']
|
||||
opts['environment'] = opts['saltenv']
|
||||
else:
|
||||
log.warning(
|
||||
u'The \'environment\' minion config option has been renamed '
|
||||
u'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
|
||||
opts[u'environment']
|
||||
'The \'environment\' minion config option has been renamed '
|
||||
'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
|
||||
opts['environment']
|
||||
)
|
||||
opts[u'saltenv'] = opts[u'environment']
|
||||
opts['saltenv'] = opts['environment']
|
||||
|
||||
opts['__cli'] = os.path.basename(sys.argv[0])
|
||||
|
||||
|
@ -3792,23 +3792,23 @@ def apply_master_config(overrides=None, defaults=None):
|
|||
if overrides:
|
||||
opts.update(overrides)
|
||||
|
||||
if u'environment' in opts:
|
||||
if u'saltenv' in opts:
|
||||
if 'environment' in opts:
|
||||
if 'saltenv' in opts:
|
||||
log.warning(
|
||||
u'The \'saltenv\' and \'environment\' master config options '
|
||||
u'cannot both be used. Ignoring \'environment\' in favor of '
|
||||
u'\'saltenv\'.',
|
||||
'The \'saltenv\' and \'environment\' master config options '
|
||||
'cannot both be used. Ignoring \'environment\' in favor of '
|
||||
'\'saltenv\'.',
|
||||
)
|
||||
# Set environment to saltenv in case someone's custom runner is
|
||||
# refrencing __opts__['environment']
|
||||
opts[u'environment'] = opts[u'saltenv']
|
||||
opts['environment'] = opts['saltenv']
|
||||
else:
|
||||
log.warning(
|
||||
u'The \'environment\' master config option has been renamed '
|
||||
u'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
|
||||
opts[u'environment']
|
||||
'The \'environment\' master config option has been renamed '
|
||||
'to \'saltenv\'. Using %s as the \'saltenv\' config value.',
|
||||
opts['environment']
|
||||
)
|
||||
opts[u'saltenv'] = opts[u'environment']
|
||||
opts['saltenv'] = opts['environment']
|
||||
|
||||
if len(opts['sock_dir']) > len(opts['cachedir']) + 10:
|
||||
opts['sock_dir'] = os.path.join(opts['cachedir'], '.salt-unix')
|
||||
|
|
648
salt/crypt.py
648
salt/crypt.py
File diff suppressed because it is too large
Load diff
|
@ -2,7 +2,7 @@
|
|||
'''
|
||||
This module is a central location for all salt exceptions
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import copy
|
||||
|
@ -31,16 +31,34 @@ def get_error_message(error):
|
|||
'''
|
||||
Get human readable message from Python Exception
|
||||
'''
|
||||
return error.args[0] if error.args else u''
|
||||
return error.args[0] if error.args else ''
|
||||
|
||||
|
||||
class SaltException(Exception):
|
||||
'''
|
||||
Base exception class; all Salt-specific exceptions should subclass this
|
||||
'''
|
||||
def __init__(self, message=u''):
|
||||
super(SaltException, self).__init__(message)
|
||||
self.strerror = message
|
||||
def __init__(self, message=''):
|
||||
# Avoid circular import
|
||||
import salt.utils.stringutils
|
||||
if six.PY3 or isinstance(message, unicode): # pylint: disable=incompatible-py3-code
|
||||
super(SaltException, self).__init__(
|
||||
salt.utils.stringutils.to_str(message)
|
||||
)
|
||||
self.message = self.strerror = message
|
||||
elif isinstance(message, str):
|
||||
super(SaltException, self).__init__(message)
|
||||
self.message = self.strerror = \
|
||||
salt.utils.stringutils.to_unicode(message)
|
||||
else:
|
||||
# Some non-string input was passed. Run the parent dunder init with
|
||||
# a str version, and convert the passed value to unicode for the
|
||||
# message/strerror attributes.
|
||||
super(SaltException, self).__init__(str(message))
|
||||
self.message = self.strerror = unicode(message) # pylint: disable=incompatible-py3-code
|
||||
|
||||
def __unicode__(self):
|
||||
return self.strerror
|
||||
|
||||
def pack(self):
|
||||
'''
|
||||
|
@ -49,7 +67,7 @@ class SaltException(Exception):
|
|||
'''
|
||||
if six.PY3:
|
||||
# The message should be a str type, not a unicode
|
||||
return {u'message': str(self), u'args': self.args}
|
||||
return {'message': str(self), 'args': self.args}
|
||||
return dict(message=self.__unicode__(), args=self.args)
|
||||
|
||||
|
||||
|
@ -100,16 +118,18 @@ class CommandExecutionError(SaltException):
|
|||
Used when a module runs a command which returns an error and wants
|
||||
to show the user the output gracefully instead of dying
|
||||
'''
|
||||
def __init__(self, message=u'', info=None):
|
||||
self.error = exc_str_prefix = message
|
||||
def __init__(self, message='', info=None):
|
||||
# Avoid circular import
|
||||
import salt.utils.stringutils
|
||||
self.error = exc_str_prefix = salt.utils.stringutils.to_unicode(message)
|
||||
self.info = info
|
||||
if self.info:
|
||||
if exc_str_prefix:
|
||||
if exc_str_prefix[-1] not in u'.?!':
|
||||
exc_str_prefix += u'.'
|
||||
exc_str_prefix += u' '
|
||||
if exc_str_prefix[-1] not in '.?!':
|
||||
exc_str_prefix += '.'
|
||||
exc_str_prefix += ' '
|
||||
|
||||
exc_str_prefix += u'Additional info follows:\n\n'
|
||||
exc_str_prefix += 'Additional info follows:\n\n'
|
||||
# NOTE: exc_str will be passed to the parent class' constructor and
|
||||
# become self.strerror.
|
||||
exc_str = exc_str_prefix + _nested_output(self.info)
|
||||
|
@ -120,7 +140,7 @@ class CommandExecutionError(SaltException):
|
|||
# this information would be redundant).
|
||||
if isinstance(self.info, dict):
|
||||
info_without_changes = copy.deepcopy(self.info)
|
||||
info_without_changes.pop(u'changes', None)
|
||||
info_without_changes.pop('changes', None)
|
||||
if info_without_changes:
|
||||
self.strerror_without_changes = \
|
||||
exc_str_prefix + _nested_output(info_without_changes)
|
||||
|
@ -134,6 +154,9 @@ class CommandExecutionError(SaltException):
|
|||
else:
|
||||
self.strerror_without_changes = exc_str = self.error
|
||||
|
||||
# We call the parent __init__ last instead of first because we need the
|
||||
# logic above to derive the message string to use for the exception
|
||||
# message.
|
||||
super(CommandExecutionError, self).__init__(exc_str)
|
||||
|
||||
|
||||
|
@ -165,13 +188,13 @@ class FileLockError(SaltException):
|
|||
'''
|
||||
Used when an error occurs obtaining a file lock
|
||||
'''
|
||||
def __init__(self, msg, time_start=None, *args, **kwargs):
|
||||
super(FileLockError, self).__init__(msg, *args, **kwargs)
|
||||
def __init__(self, message, time_start=None, *args, **kwargs):
|
||||
super(FileLockError, self).__init__(message, *args, **kwargs)
|
||||
if time_start is None:
|
||||
log.warning(
|
||||
u'time_start should be provided when raising a FileLockError. '
|
||||
u'Defaulting to current time as a fallback, but this may '
|
||||
u'result in an inaccurate timeout.'
|
||||
'time_start should be provided when raising a FileLockError. '
|
||||
'Defaulting to current time as a fallback, but this may '
|
||||
'result in an inaccurate timeout.'
|
||||
)
|
||||
self.time_start = time.time()
|
||||
else:
|
||||
|
@ -188,10 +211,9 @@ class GitLockError(SaltException):
|
|||
this exception class can be caught in a try/except without being caught as
|
||||
an OSError.
|
||||
'''
|
||||
def __init__(self, errno, strerror, *args, **kwargs):
|
||||
super(GitLockError, self).__init__(strerror, *args, **kwargs)
|
||||
def __init__(self, errno, message, *args, **kwargs):
|
||||
super(GitLockError, self).__init__(message, *args, **kwargs)
|
||||
self.errno = errno
|
||||
self.strerror = strerror
|
||||
|
||||
|
||||
class GitRemoteError(SaltException):
|
||||
|
@ -224,28 +246,29 @@ class SaltRenderError(SaltException):
|
|||
def __init__(self,
|
||||
message,
|
||||
line_num=None,
|
||||
buf=u'',
|
||||
marker=u' <======================',
|
||||
buf='',
|
||||
marker=' <======================',
|
||||
trace=None):
|
||||
# Avoid circular import
|
||||
import salt.utils.stringutils
|
||||
self.error = message
|
||||
exc_str = copy.deepcopy(message)
|
||||
exc_str = salt.utils.stringutils.to_unicode(message)
|
||||
self.line_num = line_num
|
||||
self.buffer = buf
|
||||
self.context = u''
|
||||
self.context = ''
|
||||
if trace:
|
||||
exc_str += u'\n{0}\n'.format(trace)
|
||||
exc_str += '\n{0}\n'.format(trace)
|
||||
if self.line_num and self.buffer:
|
||||
# Avoid circular import
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.templates
|
||||
self.context = salt.utils.templates.get_context(
|
||||
self.buffer,
|
||||
self.line_num,
|
||||
marker=marker
|
||||
)
|
||||
exc_str += '; line {0}\n\n{1}'.format( # future lint: disable=non-unicode-string
|
||||
exc_str += '; line {0}\n\n{1}'.format(
|
||||
self.line_num,
|
||||
salt.utils.stringutils.to_str(self.context),
|
||||
salt.utils.stringutils.to_unicode(self.context),
|
||||
)
|
||||
super(SaltRenderError, self).__init__(exc_str)
|
||||
|
||||
|
@ -256,8 +279,8 @@ class SaltClientTimeout(SaltException):
|
|||
|
||||
Takes the ``jid`` as a parameter
|
||||
'''
|
||||
def __init__(self, msg, jid=None, *args, **kwargs):
|
||||
super(SaltClientTimeout, self).__init__(msg, *args, **kwargs)
|
||||
def __init__(self, message, jid=None, *args, **kwargs):
|
||||
super(SaltClientTimeout, self).__init__(message, *args, **kwargs)
|
||||
self.jid = jid
|
||||
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -15,7 +15,7 @@ be in the :conf_master:`fileserver_backend` list to enable this backend.
|
|||
Fileserver environments are defined using the :conf_master:`file_roots`
|
||||
configuration option.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
@ -30,6 +30,7 @@ import salt.utils.gzip_util
|
|||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.versions
|
||||
from salt.ext import six
|
||||
|
||||
|
@ -229,7 +230,7 @@ def file_hash(load, fnd):
|
|||
cache_path = os.path.join(__opts__['cachedir'],
|
||||
'roots/hash',
|
||||
load['saltenv'],
|
||||
u'{0}.hash.{1}'.format(fnd['rel'],
|
||||
'{0}.hash.{1}'.format(fnd['rel'],
|
||||
__opts__['hash_type']))
|
||||
# if we have a cache, serve that if the mtime hasn't changed
|
||||
if os.path.exists(cache_path):
|
||||
|
@ -386,7 +387,7 @@ def _file_lists(load, form):
|
|||
|
||||
for path in __opts__['file_roots'][load['saltenv']]:
|
||||
for root, dirs, files in os.walk(
|
||||
path,
|
||||
salt.utils.stringutils.to_unicode(path),
|
||||
followlinks=__opts__['fileserver_followsymlinks']):
|
||||
_add_to(ret['dirs'], path, root, dirs)
|
||||
_add_to(ret['files'], path, root, files)
|
||||
|
|
692
salt/key.py
692
salt/key.py
File diff suppressed because it is too large
Load diff
638
salt/loader.py
638
salt/loader.py
File diff suppressed because it is too large
Load diff
1078
salt/master.py
1078
salt/master.py
File diff suppressed because it is too large
Load diff
2092
salt/minion.py
2092
salt/minion.py
File diff suppressed because it is too large
Load diff
|
@ -46,15 +46,15 @@ except ImportError:
|
|||
except ImportError:
|
||||
# TODO: Come up with a sane way to get a configured logfile
|
||||
# and write to the logfile when this error is hit also
|
||||
LOG_FORMAT = u'[%(levelname)-8s] %(message)s'
|
||||
LOG_FORMAT = '[%(levelname)-8s] %(message)s'
|
||||
salt.log.setup_console_logger(log_format=LOG_FORMAT)
|
||||
log.fatal(u'Unable to import msgpack or msgpack_pure python modules')
|
||||
log.fatal('Unable to import msgpack or msgpack_pure python modules')
|
||||
# Don't exit if msgpack is not available, this is to make local mode
|
||||
# work without msgpack
|
||||
#sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
|
||||
|
||||
if HAS_MSGPACK and not hasattr(msgpack, u'exceptions'):
|
||||
if HAS_MSGPACK and not hasattr(msgpack, 'exceptions'):
|
||||
class PackValueError(Exception):
|
||||
'''
|
||||
older versions of msgpack do not have PackValueError
|
||||
|
@ -89,11 +89,11 @@ def format_payload(enc, **kwargs):
|
|||
Pass in the required arguments for a payload, the enc type and the cmd,
|
||||
then a list of keyword args to generate the body of the load dict.
|
||||
'''
|
||||
payload = {u'enc': enc}
|
||||
payload = {'enc': enc}
|
||||
load = {}
|
||||
for key in kwargs:
|
||||
load[key] = kwargs[key]
|
||||
payload[u'load'] = load
|
||||
payload['load'] = load
|
||||
return package(payload)
|
||||
|
||||
|
||||
|
@ -104,11 +104,11 @@ class Serial(object):
|
|||
'''
|
||||
def __init__(self, opts):
|
||||
if isinstance(opts, dict):
|
||||
self.serial = opts.get(u'serial', u'msgpack')
|
||||
self.serial = opts.get('serial', 'msgpack')
|
||||
elif isinstance(opts, six.string_types):
|
||||
self.serial = opts
|
||||
else:
|
||||
self.serial = u'msgpack'
|
||||
self.serial = 'msgpack'
|
||||
|
||||
def loads(self, msg, encoding=None, raw=False):
|
||||
'''
|
||||
|
@ -141,12 +141,12 @@ class Serial(object):
|
|||
ret = salt.transport.frame.decode_embedded_strs(ret)
|
||||
except Exception as exc:
|
||||
log.critical(
|
||||
u'Could not deserialize msgpack message. This often happens '
|
||||
u'when trying to read a file not in binary mode. '
|
||||
u'To see message payload, enable debug logging and retry. '
|
||||
u'Exception: %s', exc
|
||||
'Could not deserialize msgpack message. This often happens '
|
||||
'when trying to read a file not in binary mode. '
|
||||
'To see message payload, enable debug logging and retry. '
|
||||
'Exception: %s', exc
|
||||
)
|
||||
log.debug(u'Msgpack deserialization failure on message: %s', msg)
|
||||
log.debug('Msgpack deserialization failure on message: %s', msg)
|
||||
gc.collect()
|
||||
raise
|
||||
finally:
|
||||
|
@ -161,7 +161,7 @@ class Serial(object):
|
|||
fn_.close()
|
||||
if data:
|
||||
if six.PY3:
|
||||
return self.loads(data, encoding=u'utf-8')
|
||||
return self.loads(data, encoding='utf-8')
|
||||
else:
|
||||
return self.loads(data)
|
||||
|
||||
|
@ -218,7 +218,7 @@ class Serial(object):
|
|||
return msgpack.ExtType(78, obj)
|
||||
|
||||
def dt_encode(obj):
|
||||
datetime_str = obj.strftime(u"%Y%m%dT%H:%M:%S.%f")
|
||||
datetime_str = obj.strftime("%Y%m%dT%H:%M:%S.%f")
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.packb(datetime_str, default=default, use_bin_type=use_bin_type)
|
||||
else:
|
||||
|
@ -244,7 +244,7 @@ class Serial(object):
|
|||
return obj
|
||||
|
||||
def immutable_encoder(obj):
|
||||
log.debug(u'IMMUTABLE OBJ: %s', obj)
|
||||
log.debug('IMMUTABLE OBJ: %s', obj)
|
||||
if isinstance(obj, immutabletypes.ImmutableDict):
|
||||
return dict(obj)
|
||||
if isinstance(obj, immutabletypes.ImmutableList):
|
||||
|
@ -252,12 +252,12 @@ class Serial(object):
|
|||
if isinstance(obj, immutabletypes.ImmutableSet):
|
||||
return set(obj)
|
||||
|
||||
if u"datetime.datetime" in str(e):
|
||||
if "datetime.datetime" in str(e):
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(datetime_encoder(msg), use_bin_type=use_bin_type)
|
||||
else:
|
||||
return msgpack.dumps(datetime_encoder(msg))
|
||||
elif u"Immutable" in str(e):
|
||||
elif "Immutable" in str(e):
|
||||
if msgpack.version >= (0, 4, 0):
|
||||
return msgpack.dumps(msg, default=immutable_encoder, use_bin_type=use_bin_type)
|
||||
else:
|
||||
|
@ -291,8 +291,8 @@ class Serial(object):
|
|||
return msgpack.dumps(odict_encoder(msg))
|
||||
except (SystemError, TypeError) as exc: # pylint: disable=W0705
|
||||
log.critical(
|
||||
u'Unable to serialize message! Consider upgrading msgpack. '
|
||||
u'Message which failed was %s, with exception %s', msg, exc
|
||||
'Unable to serialize message! Consider upgrading msgpack. '
|
||||
'Message which failed was %s, with exception %s', msg, exc
|
||||
)
|
||||
|
||||
def dump(self, msg, fn_):
|
||||
|
@ -313,7 +313,7 @@ class SREQ(object):
|
|||
'''
|
||||
Create a generic interface to wrap salt zeromq req calls.
|
||||
'''
|
||||
def __init__(self, master, id_=u'', serial=u'msgpack', linger=0, opts=None):
|
||||
def __init__(self, master, id_='', serial='msgpack', linger=0, opts=None):
|
||||
self.master = master
|
||||
self.id_ = id_
|
||||
self.serial = Serial(serial)
|
||||
|
@ -327,20 +327,20 @@ class SREQ(object):
|
|||
'''
|
||||
Lazily create the socket.
|
||||
'''
|
||||
if not hasattr(self, u'_socket'):
|
||||
if not hasattr(self, '_socket'):
|
||||
# create a new one
|
||||
self._socket = self.context.socket(zmq.REQ)
|
||||
if hasattr(zmq, u'RECONNECT_IVL_MAX'):
|
||||
if hasattr(zmq, 'RECONNECT_IVL_MAX'):
|
||||
self._socket.setsockopt(
|
||||
zmq.RECONNECT_IVL_MAX, 5000
|
||||
)
|
||||
|
||||
self._set_tcp_keepalive()
|
||||
if self.master.startswith(u'tcp://['):
|
||||
if self.master.startswith('tcp://['):
|
||||
# Hint PF type if bracket enclosed IPv6 address
|
||||
if hasattr(zmq, u'IPV6'):
|
||||
if hasattr(zmq, 'IPV6'):
|
||||
self._socket.setsockopt(zmq.IPV6, 1)
|
||||
elif hasattr(zmq, u'IPV4ONLY'):
|
||||
elif hasattr(zmq, 'IPV4ONLY'):
|
||||
self._socket.setsockopt(zmq.IPV4ONLY, 0)
|
||||
self._socket.linger = self.linger
|
||||
if self.id_:
|
||||
|
@ -349,37 +349,37 @@ class SREQ(object):
|
|||
return self._socket
|
||||
|
||||
def _set_tcp_keepalive(self):
|
||||
if hasattr(zmq, u'TCP_KEEPALIVE') and self.opts:
|
||||
if u'tcp_keepalive' in self.opts:
|
||||
if hasattr(zmq, 'TCP_KEEPALIVE') and self.opts:
|
||||
if 'tcp_keepalive' in self.opts:
|
||||
self._socket.setsockopt(
|
||||
zmq.TCP_KEEPALIVE, self.opts[u'tcp_keepalive']
|
||||
zmq.TCP_KEEPALIVE, self.opts['tcp_keepalive']
|
||||
)
|
||||
if u'tcp_keepalive_idle' in self.opts:
|
||||
if 'tcp_keepalive_idle' in self.opts:
|
||||
self._socket.setsockopt(
|
||||
zmq.TCP_KEEPALIVE_IDLE, self.opts[u'tcp_keepalive_idle']
|
||||
zmq.TCP_KEEPALIVE_IDLE, self.opts['tcp_keepalive_idle']
|
||||
)
|
||||
if u'tcp_keepalive_cnt' in self.opts:
|
||||
if 'tcp_keepalive_cnt' in self.opts:
|
||||
self._socket.setsockopt(
|
||||
zmq.TCP_KEEPALIVE_CNT, self.opts[u'tcp_keepalive_cnt']
|
||||
zmq.TCP_KEEPALIVE_CNT, self.opts['tcp_keepalive_cnt']
|
||||
)
|
||||
if u'tcp_keepalive_intvl' in self.opts:
|
||||
if 'tcp_keepalive_intvl' in self.opts:
|
||||
self._socket.setsockopt(
|
||||
zmq.TCP_KEEPALIVE_INTVL, self.opts[u'tcp_keepalive_intvl']
|
||||
zmq.TCP_KEEPALIVE_INTVL, self.opts['tcp_keepalive_intvl']
|
||||
)
|
||||
|
||||
def clear_socket(self):
|
||||
'''
|
||||
delete socket if you have it
|
||||
'''
|
||||
if hasattr(self, u'_socket'):
|
||||
if hasattr(self, '_socket'):
|
||||
if isinstance(self.poller.sockets, dict):
|
||||
sockets = list(self.poller.sockets.keys())
|
||||
for socket in sockets:
|
||||
log.trace(u'Unregistering socket: %s', socket)
|
||||
log.trace('Unregistering socket: %s', socket)
|
||||
self.poller.unregister(socket)
|
||||
else:
|
||||
for socket in self.poller.sockets:
|
||||
log.trace(u'Unregistering socket: %s', socket)
|
||||
log.trace('Unregistering socket: %s', socket)
|
||||
self.poller.unregister(socket[0])
|
||||
del self._socket
|
||||
|
||||
|
@ -387,8 +387,8 @@ class SREQ(object):
|
|||
'''
|
||||
Takes two arguments, the encryption type and the base payload
|
||||
'''
|
||||
payload = {u'enc': enc}
|
||||
payload[u'load'] = load
|
||||
payload = {'enc': enc}
|
||||
payload['load'] = load
|
||||
pkg = self.serial.dumps(payload)
|
||||
self.socket.send(pkg)
|
||||
self.poller.register(self.socket, zmq.POLLIN)
|
||||
|
@ -400,14 +400,14 @@ class SREQ(object):
|
|||
break
|
||||
if tries > 1:
|
||||
log.info(
|
||||
u'SaltReqTimeoutError: after %s seconds. (Try %s of %s)',
|
||||
'SaltReqTimeoutError: after %s seconds. (Try %s of %s)',
|
||||
timeout, tried, tries
|
||||
)
|
||||
if tried >= tries:
|
||||
self.clear_socket()
|
||||
raise SaltReqTimeoutError(
|
||||
u'SaltReqTimeoutError: after {0} seconds, ran {1} '
|
||||
u'tries'.format(timeout * tried, tried)
|
||||
'SaltReqTimeoutError: after {0} seconds, ran {1} '
|
||||
'tries'.format(timeout * tried, tried)
|
||||
)
|
||||
return self.serial.loads(self.socket.recv())
|
||||
|
||||
|
@ -415,8 +415,8 @@ class SREQ(object):
|
|||
'''
|
||||
Detect the encryption type based on the payload
|
||||
'''
|
||||
enc = payload.get(u'enc', u'clear')
|
||||
load = payload.get(u'load', {})
|
||||
enc = payload.get('enc', 'clear')
|
||||
load = payload.get('load', {})
|
||||
return self.send(enc, load, tries, timeout)
|
||||
|
||||
def destroy(self):
|
||||
|
|
|
@ -354,7 +354,7 @@ def render(template, saltenv='base', sls='', tmplpath=None, rendered_sls=None, *
|
|||
# is compiled to.
|
||||
|
||||
# __name__ can't be assigned a unicode
|
||||
mod.__name__ = str(sls) # future lint: disable=non-unicode-string
|
||||
mod.__name__ = str(sls) # future lint: disable=blacklisted-function
|
||||
|
||||
# to workaround state.py's use of copy.deepcopy(chunk)
|
||||
mod.__deepcopy__ = lambda x: mod
|
||||
|
|
126
salt/runner.py
126
salt/runner.py
|
@ -38,8 +38,8 @@ class RunnerClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object):
|
|||
eauth user must be authorized to execute runner modules: (``@runner``).
|
||||
Only the :py:meth:`master_call` below supports eauth.
|
||||
'''
|
||||
client = u'runner'
|
||||
tag_prefix = u'run'
|
||||
client = 'runner'
|
||||
tag_prefix = 'run'
|
||||
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
|
@ -47,8 +47,8 @@ class RunnerClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object):
|
|||
|
||||
@property
|
||||
def functions(self):
|
||||
if not hasattr(self, u'_functions'):
|
||||
if not hasattr(self, u'utils'):
|
||||
if not hasattr(self, '_functions'):
|
||||
if not hasattr(self, 'utils'):
|
||||
self.utils = salt.loader.utils(self.opts)
|
||||
# Must be self.functions for mixin to work correctly :-/
|
||||
try:
|
||||
|
@ -73,19 +73,19 @@ class RunnerClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object):
|
|||
New-style: ``{'fun': 'jobs.lookup_jid', 'kwarg': {'jid': '1234'}}``
|
||||
CLI-style: ``{'fun': 'jobs.lookup_jid', 'arg': ['jid="1234"']}``
|
||||
'''
|
||||
fun = low.pop(u'fun')
|
||||
fun = low.pop('fun')
|
||||
verify_fun(self.functions, fun)
|
||||
|
||||
eauth_creds = dict([(i, low.pop(i)) for i in [
|
||||
u'username', u'password', u'eauth', u'token', u'client', u'user', u'key',
|
||||
'username', 'password', 'eauth', 'token', 'client', 'user', 'key',
|
||||
] if i in low])
|
||||
|
||||
# Run name=value args through parse_input. We don't need to run kwargs
|
||||
# through because there is no way to send name=value strings in the low
|
||||
# dict other than by including an `arg` array.
|
||||
_arg, _kwarg = salt.utils.args.parse_input(
|
||||
low.pop(u'arg', []), condition=False)
|
||||
_kwarg.update(low.pop(u'kwarg', {}))
|
||||
low.pop('arg', []), condition=False)
|
||||
_kwarg.update(low.pop('kwarg', {}))
|
||||
|
||||
# If anything hasn't been pop()'ed out of low by this point it must be
|
||||
# an old-style kwarg.
|
||||
|
@ -102,7 +102,7 @@ class RunnerClient(mixins.SyncClientMixin, mixins.AsyncClientMixin, object):
|
|||
munged,
|
||||
ignore_invalid=True)
|
||||
|
||||
return dict(fun=fun, kwarg={u'kwarg': kwarg, u'arg': arg},
|
||||
return dict(fun=fun, kwarg={'kwarg': kwarg, 'arg': arg},
|
||||
**eauth_creds)
|
||||
|
||||
def cmd_async(self, low):
|
||||
|
@ -169,10 +169,10 @@ class Runner(RunnerClient):
|
|||
'''
|
||||
Print out the documentation!
|
||||
'''
|
||||
arg = self.opts.get(u'fun', None)
|
||||
arg = self.opts.get('fun', None)
|
||||
docs = super(Runner, self).get_docs(arg)
|
||||
for fun in sorted(docs):
|
||||
display_output(u'{0}:'.format(fun), u'text', self.opts)
|
||||
display_output('{0}:'.format(fun), 'text', self.opts)
|
||||
print(docs[fun])
|
||||
|
||||
# TODO: move to mixin whenever we want a salt-wheel cli
|
||||
|
@ -182,115 +182,115 @@ class Runner(RunnerClient):
|
|||
'''
|
||||
import salt.minion
|
||||
ret = {}
|
||||
if self.opts.get(u'doc', False):
|
||||
if self.opts.get('doc', False):
|
||||
self.print_docs()
|
||||
else:
|
||||
low = {u'fun': self.opts[u'fun']}
|
||||
low = {'fun': self.opts['fun']}
|
||||
try:
|
||||
# Allocate a jid
|
||||
async_pub = self._gen_async_pub()
|
||||
self.jid = async_pub[u'jid']
|
||||
self.jid = async_pub['jid']
|
||||
|
||||
fun_args = salt.utils.args.parse_input(
|
||||
self.opts[u'arg'],
|
||||
no_parse=self.opts.get(u'no_parse', []))
|
||||
self.opts['arg'],
|
||||
no_parse=self.opts.get('no_parse', []))
|
||||
|
||||
verify_fun(self.functions, low[u'fun'])
|
||||
verify_fun(self.functions, low['fun'])
|
||||
args, kwargs = salt.minion.load_args_and_kwargs(
|
||||
self.functions[low[u'fun']],
|
||||
self.functions[low['fun']],
|
||||
fun_args)
|
||||
low[u'arg'] = args
|
||||
low[u'kwarg'] = kwargs
|
||||
low['arg'] = args
|
||||
low['kwarg'] = kwargs
|
||||
|
||||
if self.opts.get(u'eauth'):
|
||||
if u'token' in self.opts:
|
||||
if self.opts.get('eauth'):
|
||||
if 'token' in self.opts:
|
||||
try:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts[u'key_dir'], u'.root_key'), u'r') as fp_:
|
||||
low[u'key'] = fp_.readline()
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['key_dir'], '.root_key'), 'r') as fp_:
|
||||
low['key'] = fp_.readline()
|
||||
except IOError:
|
||||
low[u'token'] = self.opts[u'token']
|
||||
low['token'] = self.opts['token']
|
||||
|
||||
# If using eauth and a token hasn't already been loaded into
|
||||
# low, prompt the user to enter auth credentials
|
||||
if u'token' not in low and u'key' not in low and self.opts[u'eauth']:
|
||||
if 'token' not in low and 'key' not in low and self.opts['eauth']:
|
||||
# This is expensive. Don't do it unless we need to.
|
||||
import salt.auth
|
||||
resolver = salt.auth.Resolver(self.opts)
|
||||
res = resolver.cli(self.opts[u'eauth'])
|
||||
if self.opts[u'mktoken'] and res:
|
||||
res = resolver.cli(self.opts['eauth'])
|
||||
if self.opts['mktoken'] and res:
|
||||
tok = resolver.token_cli(
|
||||
self.opts[u'eauth'],
|
||||
self.opts['eauth'],
|
||||
res
|
||||
)
|
||||
if tok:
|
||||
low[u'token'] = tok.get(u'token', u'')
|
||||
low['token'] = tok.get('token', '')
|
||||
if not res:
|
||||
log.error(u'Authentication failed')
|
||||
log.error('Authentication failed')
|
||||
return ret
|
||||
low.update(res)
|
||||
low[u'eauth'] = self.opts[u'eauth']
|
||||
low['eauth'] = self.opts['eauth']
|
||||
else:
|
||||
user = salt.utils.user.get_specific_user()
|
||||
|
||||
if low[u'fun'] == u'state.orchestrate':
|
||||
low[u'kwarg'][u'orchestration_jid'] = async_pub[u'jid']
|
||||
if low['fun'] == 'state.orchestrate':
|
||||
low['kwarg']['orchestration_jid'] = async_pub['jid']
|
||||
|
||||
# Run the runner!
|
||||
if self.opts.get(u'async', False):
|
||||
if self.opts.get(u'eauth'):
|
||||
if self.opts.get('async', False):
|
||||
if self.opts.get('eauth'):
|
||||
async_pub = self.cmd_async(low)
|
||||
else:
|
||||
async_pub = self.async(self.opts[u'fun'],
|
||||
async_pub = self.async(self.opts['fun'],
|
||||
low,
|
||||
user=user,
|
||||
pub=async_pub)
|
||||
# by default: info will be not enougth to be printed out !
|
||||
log.warning(
|
||||
u'Running in async mode. Results of this execution may '
|
||||
u'be collected by attaching to the master event bus or '
|
||||
u'by examing the master job cache, if configured. '
|
||||
u'This execution is running under tag %s', async_pub[u'tag']
|
||||
'Running in async mode. Results of this execution may '
|
||||
'be collected by attaching to the master event bus or '
|
||||
'by examing the master job cache, if configured. '
|
||||
'This execution is running under tag %s', async_pub['tag']
|
||||
)
|
||||
return async_pub[u'jid'] # return the jid
|
||||
return async_pub['jid'] # return the jid
|
||||
|
||||
# otherwise run it in the main process
|
||||
if self.opts.get(u'eauth'):
|
||||
if self.opts.get('eauth'):
|
||||
ret = self.cmd_sync(low)
|
||||
if isinstance(ret, dict) and set(ret) == set((u'data', u'outputter')):
|
||||
outputter = ret[u'outputter']
|
||||
ret = ret[u'data']
|
||||
if isinstance(ret, dict) and set(ret) == set(('data', 'outputter')):
|
||||
outputter = ret['outputter']
|
||||
ret = ret['data']
|
||||
else:
|
||||
outputter = None
|
||||
display_output(ret, outputter, self.opts)
|
||||
else:
|
||||
ret = self._proc_function(self.opts[u'fun'],
|
||||
ret = self._proc_function(self.opts['fun'],
|
||||
low,
|
||||
user,
|
||||
async_pub[u'tag'],
|
||||
async_pub[u'jid'],
|
||||
async_pub['tag'],
|
||||
async_pub['jid'],
|
||||
daemonize=False)
|
||||
except salt.exceptions.SaltException as exc:
|
||||
evt = salt.utils.event.get_event(u'master', opts=self.opts)
|
||||
evt.fire_event({u'success': False,
|
||||
u'return': u'{0}'.format(exc),
|
||||
u'retcode': 254,
|
||||
u'fun': self.opts[u'fun'],
|
||||
u'fun_args': fun_args,
|
||||
u'jid': self.jid},
|
||||
tag=u'salt/run/{0}/ret'.format(self.jid))
|
||||
evt = salt.utils.event.get_event('master', opts=self.opts)
|
||||
evt.fire_event({'success': False,
|
||||
'return': '{0}'.format(exc),
|
||||
'retcode': 254,
|
||||
'fun': self.opts['fun'],
|
||||
'fun_args': fun_args,
|
||||
'jid': self.jid},
|
||||
tag='salt/run/{0}/ret'.format(self.jid))
|
||||
# Attempt to grab documentation
|
||||
if u'fun' in low:
|
||||
ret = self.get_docs(u'{0}*'.format(low[u'fun']))
|
||||
if 'fun' in low:
|
||||
ret = self.get_docs('{0}*'.format(low['fun']))
|
||||
else:
|
||||
ret = None
|
||||
|
||||
# If we didn't get docs returned then
|
||||
# return the `not availble` message.
|
||||
if not ret:
|
||||
ret = u'{0}'.format(exc)
|
||||
if not self.opts.get(u'quiet', False):
|
||||
display_output(ret, u'nested', self.opts)
|
||||
ret = '{0}'.format(exc)
|
||||
if not self.opts.get('quiet', False):
|
||||
display_output(ret, 'nested', self.opts)
|
||||
else:
|
||||
log.debug(u'Runner return: %s', ret)
|
||||
log.debug('Runner return: %s', ret)
|
||||
|
||||
return ret
|
||||
|
|
|
@ -24,7 +24,7 @@ import salt.defaults.exitcodes # pylint: disable=unused-import
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _handle_interrupt(exc, original_exc, hardfail=False, trace=u''):
|
||||
def _handle_interrupt(exc, original_exc, hardfail=False, trace=''):
|
||||
'''
|
||||
if hardfailing:
|
||||
If we got the original stacktrace, log it
|
||||
|
@ -50,16 +50,16 @@ def _handle_signals(client, signum, sigframe):
|
|||
hardcrash = False
|
||||
|
||||
if signum == signal.SIGINT:
|
||||
exit_msg = u'\nExiting gracefully on Ctrl-c'
|
||||
exit_msg = '\nExiting gracefully on Ctrl-c'
|
||||
try:
|
||||
jid = client.local_client.pub_data[u'jid']
|
||||
jid = client.local_client.pub_data['jid']
|
||||
exit_msg += (
|
||||
u'\n'
|
||||
u'This job\'s jid is: {0}\n'
|
||||
u'The minions may not have all finished running and any remaining '
|
||||
u'minions will return upon completion. To look up the return data '
|
||||
u'for this job later, run the following command:\n\n'
|
||||
u'salt-run jobs.lookup_jid {0}'.format(jid)
|
||||
'\n'
|
||||
'This job\'s jid is: {0}\n'
|
||||
'The minions may not have all finished running and any remaining '
|
||||
'minions will return upon completion. To look up the return data '
|
||||
'for this job later, run the following command:\n\n'
|
||||
'salt-run jobs.lookup_jid {0}'.format(jid)
|
||||
)
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
|
@ -68,7 +68,7 @@ def _handle_signals(client, signum, sigframe):
|
|||
|
||||
_handle_interrupt(
|
||||
SystemExit(exit_msg),
|
||||
Exception(u'\nExiting with hard crash on Ctrl-c'),
|
||||
Exception('\nExiting with hard crash on Ctrl-c'),
|
||||
hardcrash, trace=trace)
|
||||
|
||||
|
||||
|
@ -102,7 +102,7 @@ def minion_process():
|
|||
|
||||
# salt_minion spawns this function in a new process
|
||||
|
||||
salt.utils.process.appendproctitle(u'KeepAlive')
|
||||
salt.utils.process.appendproctitle('KeepAlive')
|
||||
|
||||
def handle_hup(manager, sig, frame):
|
||||
manager.minion.reload()
|
||||
|
@ -123,7 +123,7 @@ def minion_process():
|
|||
except OSError as exc:
|
||||
# forcibly exit, regular sys.exit raises an exception-- which
|
||||
# isn't sufficient in a thread
|
||||
log.error(u'Minion process encountered exception: %s', exc)
|
||||
log.error('Minion process encountered exception: %s', exc)
|
||||
os._exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
|
||||
if not salt.utils.platform.is_windows():
|
||||
|
@ -138,13 +138,13 @@ def minion_process():
|
|||
try:
|
||||
minion.start()
|
||||
except (SaltClientError, SaltReqTimeoutError, SaltSystemExit) as exc:
|
||||
log.warning(u'Fatal functionality error caught by minion handler:\n', exc_info=True)
|
||||
log.warning(u'** Restarting minion **')
|
||||
log.warning('Fatal functionality error caught by minion handler:\n', exc_info=True)
|
||||
log.warning('** Restarting minion **')
|
||||
delay = 60
|
||||
if minion is not None and hasattr(minion, u'config'):
|
||||
delay = minion.config.get(u'random_reauth_delay', 60)
|
||||
if minion is not None and hasattr(minion, 'config'):
|
||||
delay = minion.config.get('random_reauth_delay', 60)
|
||||
delay = randint(1, delay)
|
||||
log.info(u'waiting random_reauth_delay %ss', delay)
|
||||
log.info('waiting random_reauth_delay %ss', delay)
|
||||
time.sleep(delay)
|
||||
sys.exit(salt.defaults.exitcodes.SALT_KEEPALIVE)
|
||||
|
||||
|
@ -162,16 +162,16 @@ def salt_minion():
|
|||
|
||||
import salt.cli.daemons
|
||||
import multiprocessing
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
minion = salt.cli.daemons.Minion()
|
||||
minion.start()
|
||||
return
|
||||
|
||||
if u'--disable-keepalive' in sys.argv:
|
||||
sys.argv.remove(u'--disable-keepalive')
|
||||
if '--disable-keepalive' in sys.argv:
|
||||
sys.argv.remove('--disable-keepalive')
|
||||
minion = salt.cli.daemons.Minion()
|
||||
minion.start()
|
||||
return
|
||||
|
@ -263,7 +263,7 @@ def proxy_minion_process(queue):
|
|||
proxyminion = salt.cli.daemons.ProxyMinion()
|
||||
proxyminion.start()
|
||||
except (Exception, SaltClientError, SaltReqTimeoutError, SaltSystemExit) as exc:
|
||||
log.error(u'Proxy Minion failed to start: ', exc_info=True)
|
||||
log.error('Proxy Minion failed to start: ', exc_info=True)
|
||||
restart = True
|
||||
# status is superfluous since the process will be restarted
|
||||
status = salt.defaults.exitcodes.SALT_KEEPALIVE
|
||||
|
@ -272,13 +272,13 @@ def proxy_minion_process(queue):
|
|||
status = exc.code
|
||||
|
||||
if restart is True:
|
||||
log.warning(u'** Restarting proxy minion **')
|
||||
log.warning('** Restarting proxy minion **')
|
||||
delay = 60
|
||||
if proxyminion is not None:
|
||||
if hasattr(proxyminion, u'config'):
|
||||
delay = proxyminion.config.get(u'random_reauth_delay', 60)
|
||||
if hasattr(proxyminion, 'config'):
|
||||
delay = proxyminion.config.get('random_reauth_delay', 60)
|
||||
random_delay = randint(1, delay)
|
||||
log.info(u'Sleeping random_reauth_delay of %s seconds', random_delay)
|
||||
log.info('Sleeping random_reauth_delay of %s seconds', random_delay)
|
||||
# preform delay after minion resources have been cleaned
|
||||
queue.put(random_delay)
|
||||
else:
|
||||
|
@ -293,16 +293,16 @@ def salt_proxy():
|
|||
import salt.cli.daemons
|
||||
import salt.utils.platform
|
||||
import multiprocessing
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
proxyminion = salt.cli.daemons.ProxyMinion()
|
||||
proxyminion.start()
|
||||
return
|
||||
|
||||
if u'--disable-keepalive' in sys.argv:
|
||||
sys.argv.remove(u'--disable-keepalive')
|
||||
if '--disable-keepalive' in sys.argv:
|
||||
sys.argv.remove('--disable-keepalive')
|
||||
proxyminion = salt.cli.daemons.ProxyMinion()
|
||||
proxyminion.start()
|
||||
return
|
||||
|
@ -368,7 +368,7 @@ def salt_key():
|
|||
_install_signal_handlers(client)
|
||||
client.run()
|
||||
except Exception as err:
|
||||
sys.stderr.write(u"Error: {0}\n".format(err))
|
||||
sys.stderr.write("Error: {0}\n".format(err))
|
||||
|
||||
|
||||
def salt_cp():
|
||||
|
@ -388,8 +388,8 @@ def salt_call():
|
|||
salt minion to run.
|
||||
'''
|
||||
import salt.cli.call
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
client = salt.cli.call.SaltCall()
|
||||
_install_signal_handlers(client)
|
||||
client.run()
|
||||
|
@ -400,8 +400,8 @@ def salt_run():
|
|||
Execute a salt convenience routine.
|
||||
'''
|
||||
import salt.cli.run
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
client = salt.cli.run.SaltRun()
|
||||
_install_signal_handlers(client)
|
||||
client.run()
|
||||
|
@ -412,8 +412,8 @@ def salt_ssh():
|
|||
Execute the salt-ssh system
|
||||
'''
|
||||
import salt.cli.ssh
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
try:
|
||||
client = salt.cli.ssh.SaltSSH()
|
||||
_install_signal_handlers(client)
|
||||
|
@ -444,11 +444,11 @@ def salt_cloud():
|
|||
import salt.cloud.cli
|
||||
except ImportError as e:
|
||||
# No salt cloud on Windows
|
||||
log.error(u'Error importing salt cloud: %s', e)
|
||||
print(u'salt-cloud is not available in this system')
|
||||
log.error('Error importing salt cloud: %s', e)
|
||||
print('salt-cloud is not available in this system')
|
||||
sys.exit(salt.defaults.exitcodes.EX_UNAVAILABLE)
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
|
||||
client = salt.cloud.cli.SaltCloud()
|
||||
_install_signal_handlers(client)
|
||||
|
@ -473,8 +473,8 @@ def salt_main():
|
|||
master.
|
||||
'''
|
||||
import salt.cli.salt
|
||||
if u'' in sys.path:
|
||||
sys.path.remove(u'')
|
||||
if '' in sys.path:
|
||||
sys.path.remove('')
|
||||
client = salt.cli.salt.SaltCMD()
|
||||
_install_signal_handlers(client)
|
||||
client.run()
|
||||
|
|
|
@ -79,7 +79,7 @@ def serialize(obj, **options):
|
|||
|
||||
class EncryptedString(str):
|
||||
|
||||
yaml_tag = u'!encrypted'
|
||||
yaml_tag = '!encrypted'
|
||||
|
||||
@staticmethod
|
||||
def yaml_constructor(loader, tag, node):
|
||||
|
|
|
@ -215,11 +215,11 @@ class Loader(BaseLoader): # pylint: disable=W0232
|
|||
|
||||
# !reset instruction applies on document only.
|
||||
# It tells to reset previous decoded value for this present key.
|
||||
reset = key_node.tag == u'!reset'
|
||||
reset = key_node.tag == '!reset'
|
||||
|
||||
# even if !aggregate tag apply only to values and not keys
|
||||
# it's a reason to act as a such nazi.
|
||||
if key_node.tag == u'!aggregate':
|
||||
if key_node.tag == '!aggregate':
|
||||
log.warning('!aggregate applies on values only, not on keys')
|
||||
value_node.tag = key_node.tag
|
||||
key_node.tag = self.resolve_sls_tag(key_node)[0]
|
||||
|
|
2422
salt/state.py
2422
salt/state.py
File diff suppressed because it is too large
Load diff
|
@ -64,7 +64,7 @@ def exists(
|
|||
ret['comment'] = created['stderr']
|
||||
|
||||
else:
|
||||
ret['comment'] = u'{0} exists in {1}'.format(name, region)
|
||||
ret['comment'] = '{0} exists in {1}'.format(name, region)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -106,6 +106,6 @@ def absent(
|
|||
ret['result'] = False
|
||||
ret['comment'] = removed['stderr']
|
||||
else:
|
||||
ret['comment'] = u'{0} does not exist in {1}'.format(name, region)
|
||||
ret['comment'] = '{0} does not exist in {1}'.format(name, region)
|
||||
|
||||
return ret
|
||||
|
|
|
@ -898,7 +898,7 @@ def run(name,
|
|||
|
||||
ret['changes'] = cmd_all
|
||||
ret['result'] = not bool(cmd_all['retcode'])
|
||||
ret['comment'] = u'Command "{0}" run'.format(name)
|
||||
ret['comment'] = 'Command "{0}" run'.format(name)
|
||||
|
||||
# Ignore timeout errors if asked (for nohups) and treat cmd as a success
|
||||
if ignore_timeout:
|
||||
|
|
|
@ -1517,11 +1517,11 @@ def exists(name,
|
|||
'result': True,
|
||||
'comment': ''}
|
||||
if not name:
|
||||
return _error(ret, u'Must provide name to file.exists')
|
||||
return _error(ret, 'Must provide name to file.exists')
|
||||
if not os.path.exists(name):
|
||||
return _error(ret, u'Specified path {0} does not exist'.format(name))
|
||||
return _error(ret, 'Specified path {0} does not exist'.format(name))
|
||||
|
||||
ret['comment'] = u'Path {0} exists'.format(name)
|
||||
ret['comment'] = 'Path {0} exists'.format(name)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -1542,11 +1542,11 @@ def missing(name,
|
|||
'result': True,
|
||||
'comment': ''}
|
||||
if not name:
|
||||
return _error(ret, u'Must provide name to file.missing')
|
||||
return _error(ret, 'Must provide name to file.missing')
|
||||
if os.path.exists(name):
|
||||
return _error(ret, u'Specified path {0} exists'.format(name))
|
||||
return _error(ret, 'Specified path {0} exists'.format(name))
|
||||
|
||||
ret['comment'] = u'Path {0} is missing'.format(name)
|
||||
ret['comment'] = 'Path {0} is missing'.format(name)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -2311,8 +2311,8 @@ def managed(name,
|
|||
if not create:
|
||||
if not os.path.isfile(name):
|
||||
# Don't create a file that is not already present
|
||||
ret['comment'] = (u'File {0} is not present and is not set for '
|
||||
u'creation').format(name)
|
||||
ret['comment'] = ('File {0} is not present and is not set for '
|
||||
'creation').format(name)
|
||||
return ret
|
||||
u_check = _check_user(user, group)
|
||||
if u_check:
|
||||
|
@ -2320,10 +2320,10 @@ def managed(name,
|
|||
return _error(ret, u_check)
|
||||
if not os.path.isabs(name):
|
||||
return _error(
|
||||
ret, u'Specified file {0} is not an absolute path'.format(name))
|
||||
ret, 'Specified file {0} is not an absolute path'.format(name))
|
||||
|
||||
if os.path.isdir(name):
|
||||
ret['comment'] = u'Specified target {0} is a directory'.format(name)
|
||||
ret['comment'] = 'Specified target {0} is a directory'.format(name)
|
||||
ret['result'] = False
|
||||
return ret
|
||||
|
||||
|
@ -2351,10 +2351,10 @@ def managed(name,
|
|||
ret, _ = __salt__['file.check_perms'](
|
||||
name, ret, user, group, mode, attrs, follow_symlinks)
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} not updated'.format(name)
|
||||
ret['comment'] = 'File {0} not updated'.format(name)
|
||||
elif not ret['changes'] and ret['result']:
|
||||
ret['comment'] = (u'File {0} exists with proper permissions. '
|
||||
u'No changes made.'.format(name))
|
||||
ret['comment'] = ('File {0} exists with proper permissions. '
|
||||
'No changes made.'.format(name))
|
||||
return ret
|
||||
|
||||
accum_data, _ = _load_accumulators()
|
||||
|
@ -2399,12 +2399,12 @@ def managed(name,
|
|||
ret['result'], ret['comment'] = ret['pchanges']
|
||||
elif ret['pchanges']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = u'The file {0} is set to be changed'.format(name)
|
||||
ret['comment'] = 'The file {0} is set to be changed'.format(name)
|
||||
if 'diff' in ret['pchanges'] and not show_changes:
|
||||
ret['pchanges']['diff'] = '<show_changes=False>'
|
||||
else:
|
||||
ret['result'] = True
|
||||
ret['comment'] = u'The file {0} is in the correct state'.format(name)
|
||||
ret['comment'] = 'The file {0} is in the correct state'.format(name)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -3118,15 +3118,15 @@ def directory(name,
|
|||
removed = _clean_dir(name, list(keep), exclude_pat)
|
||||
if removed:
|
||||
ret['changes']['removed'] = removed
|
||||
ret['comment'] = u'Files cleaned from directory {0}'.format(name)
|
||||
ret['comment'] = 'Files cleaned from directory {0}'.format(name)
|
||||
|
||||
# issue 32707: reflect children_only selection in comments
|
||||
if not ret['comment']:
|
||||
if children_only:
|
||||
ret['comment'] = u'Directory {0}/* updated'.format(name)
|
||||
ret['comment'] = 'Directory {0}/* updated'.format(name)
|
||||
else:
|
||||
if ret['changes']:
|
||||
ret['comment'] = u'Directory {0} updated'.format(name)
|
||||
ret['comment'] = 'Directory {0} updated'.format(name)
|
||||
|
||||
if __opts__['test']:
|
||||
ret['comment'] = 'Directory {0} not updated'.format(name)
|
||||
|
@ -3135,9 +3135,9 @@ def directory(name,
|
|||
if ret['comment']:
|
||||
orig_comment = ret['comment']
|
||||
|
||||
ret['comment'] = u'Directory {0} is in the correct state'.format(name)
|
||||
ret['comment'] = 'Directory {0} is in the correct state'.format(name)
|
||||
if orig_comment:
|
||||
ret['comment'] = u'\n'.join([ret['comment'], orig_comment])
|
||||
ret['comment'] = '\n'.join([ret['comment'], orig_comment])
|
||||
|
||||
if errors:
|
||||
ret['result'] = False
|
||||
|
@ -3414,8 +3414,8 @@ def recurse(name,
|
|||
if x.startswith(srcpath + '/'))):
|
||||
ret['result'] = False
|
||||
ret['comment'] = (
|
||||
u'The directory \'{0}\' does not exist on the salt fileserver '
|
||||
u'in saltenv \'{1}\''.format(srcpath, senv)
|
||||
'The directory \'{0}\' does not exist on the salt fileserver '
|
||||
'in saltenv \'{1}\''.format(srcpath, senv)
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -3451,8 +3451,8 @@ def recurse(name,
|
|||
if clean and os.path.exists(path) and os.path.isdir(path) and replace:
|
||||
_ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
|
||||
if __opts__['test']:
|
||||
_ret['comment'] = u'Replacing directory {0} with a ' \
|
||||
u'file'.format(path)
|
||||
_ret['comment'] = 'Replacing directory {0} with a ' \
|
||||
'file'.format(path)
|
||||
_ret['result'] = None
|
||||
merge_ret(path, _ret)
|
||||
return
|
||||
|
@ -3492,7 +3492,7 @@ def recurse(name,
|
|||
if clean and os.path.exists(path) and not os.path.isdir(path):
|
||||
_ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
|
||||
if __opts__['test']:
|
||||
_ret['comment'] = u'Replacing {0} with a directory'.format(path)
|
||||
_ret['comment'] = 'Replacing {0} with a directory'.format(path)
|
||||
_ret['result'] = None
|
||||
merge_ret(path, _ret)
|
||||
return
|
||||
|
@ -3552,15 +3552,15 @@ def recurse(name,
|
|||
|
||||
# Flatten comments until salt command line client learns
|
||||
# to display structured comments in a readable fashion
|
||||
ret['comment'] = '\n'.join(u'\n#### {0} ####\n{1}'.format(
|
||||
ret['comment'] = '\n'.join('\n#### {0} ####\n{1}'.format(
|
||||
k, v if isinstance(v, six.string_types) else '\n'.join(v)
|
||||
) for (k, v) in six.iteritems(ret['comment'])).strip()
|
||||
|
||||
if not ret['comment']:
|
||||
ret['comment'] = u'Recursively updated {0}'.format(name)
|
||||
ret['comment'] = 'Recursively updated {0}'.format(name)
|
||||
|
||||
if not ret['changes'] and ret['result']:
|
||||
ret['comment'] = u'The directory {0} is in the correct state'.format(
|
||||
ret['comment'] = 'The directory {0} is in the correct state'.format(
|
||||
name
|
||||
)
|
||||
|
||||
|
@ -3744,13 +3744,13 @@ def retention_schedule(name, retain, strptime_format=None, timezone=None):
|
|||
|
||||
# TODO: track and report how much space was / would be reclaimed
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'{0} backups would have been removed from {1}.\n'.format(len(deletable_files), name)
|
||||
ret['comment'] = '{0} backups would have been removed from {1}.\n'.format(len(deletable_files), name)
|
||||
if deletable_files:
|
||||
ret['result'] = None
|
||||
else:
|
||||
for f in deletable_files:
|
||||
__salt__['file.remove'](os.path.join(name, f))
|
||||
ret['comment'] = u'{0} backups were removed from {1}.\n'.format(len(deletable_files), name)
|
||||
ret['comment'] = '{0} backups were removed from {1}.\n'.format(len(deletable_files), name)
|
||||
ret['changes'] = changes
|
||||
|
||||
return ret
|
||||
|
@ -4459,7 +4459,7 @@ def comment(name, regex, char='#', backup='.bak'):
|
|||
|
||||
ret['pchanges'][name] = 'updated'
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} is set to be updated'.format(name)
|
||||
ret['comment'] = 'File {0} is set to be updated'.format(name)
|
||||
ret['result'] = None
|
||||
return ret
|
||||
with salt.utils.files.fopen(name, 'rb') as fp_:
|
||||
|
@ -4566,7 +4566,7 @@ def uncomment(name, regex, char='#', backup='.bak'):
|
|||
|
||||
ret['pchanges'][name] = 'updated'
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} is set to be updated'.format(name)
|
||||
ret['comment'] = 'File {0} is set to be updated'.format(name)
|
||||
ret['result'] = None
|
||||
return ret
|
||||
|
||||
|
@ -4831,7 +4831,7 @@ def append(name,
|
|||
return _error(ret, 'No text found to append. Nothing appended')
|
||||
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} is set to be updated'.format(name)
|
||||
ret['comment'] = 'File {0} is set to be updated'.format(name)
|
||||
ret['result'] = None
|
||||
nlines = list(slines)
|
||||
nlines.extend(append_lines)
|
||||
|
@ -4844,7 +4844,7 @@ def append(name,
|
|||
'\n'.join(difflib.unified_diff(slines, nlines))
|
||||
)
|
||||
else:
|
||||
ret['comment'] = u'File {0} is in correct state'.format(name)
|
||||
ret['comment'] = 'File {0} is in correct state'.format(name)
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
|
@ -4852,7 +4852,7 @@ def append(name,
|
|||
__salt__['file.append'](name, args=append_lines)
|
||||
ret['comment'] = 'Appended {0} lines'.format(len(append_lines))
|
||||
else:
|
||||
ret['comment'] = u'File {0} is in correct state'.format(name)
|
||||
ret['comment'] = 'File {0} is in correct state'.format(name)
|
||||
|
||||
with salt.utils.files.fopen(name, 'rb') as fp_:
|
||||
nlines = fp_.read()
|
||||
|
@ -5019,7 +5019,7 @@ def prepend(name,
|
|||
|
||||
for line in lines:
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} is set to be updated'.format(name)
|
||||
ret['comment'] = 'File {0} is set to be updated'.format(name)
|
||||
ret['result'] = None
|
||||
test_lines.append('{0}\n'.format(line))
|
||||
else:
|
||||
|
@ -5038,7 +5038,7 @@ def prepend(name,
|
|||
)
|
||||
ret['result'] = None
|
||||
else:
|
||||
ret['comment'] = u'File {0} is in correct state'.format(name)
|
||||
ret['comment'] = 'File {0} is in correct state'.format(name)
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
|
@ -5083,7 +5083,7 @@ def prepend(name,
|
|||
if count:
|
||||
ret['comment'] = 'Prepended {0} lines'.format(count)
|
||||
else:
|
||||
ret['comment'] = u'File {0} is in correct state'.format(name)
|
||||
ret['comment'] = 'File {0} is in correct state'.format(name)
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
|
@ -5176,7 +5176,7 @@ def patch(name,
|
|||
# get cached file or copy it to cache
|
||||
cached_source_path = __salt__['cp.cache_file'](source, __env__)
|
||||
if not cached_source_path:
|
||||
ret['comment'] = (u'Unable to cache {0} from saltenv \'{1}\''
|
||||
ret['comment'] = ('Unable to cache {0} from saltenv \'{1}\''
|
||||
.format(source, __env__))
|
||||
return ret
|
||||
|
||||
|
@ -5190,7 +5190,7 @@ def patch(name,
|
|||
name, cached_source_path, options=options, dry_run=True
|
||||
)
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File {0} will be patched'.format(name)
|
||||
ret['comment'] = 'File {0} will be patched'.format(name)
|
||||
ret['result'] = None
|
||||
return ret
|
||||
if ret['changes']['retcode'] != 0:
|
||||
|
@ -5269,10 +5269,10 @@ def touch(name, atime=None, mtime=None, makedirs=False):
|
|||
|
||||
ret['result'] = __salt__['file.touch'](name, atime, mtime)
|
||||
if not extant and ret['result']:
|
||||
ret['comment'] = u'Created empty file {0}'.format(name)
|
||||
ret['comment'] = 'Created empty file {0}'.format(name)
|
||||
ret['changes']['new'] = name
|
||||
elif extant and ret['result']:
|
||||
ret['comment'] = u'Updated times on {0} {1}'.format(
|
||||
ret['comment'] = 'Updated times on {0} {1}'.format(
|
||||
'directory' if os.path.isdir(name) else 'file', name
|
||||
)
|
||||
ret['changes']['touched'] = name
|
||||
|
@ -5360,7 +5360,7 @@ def copy(
|
|||
ret = {
|
||||
'name': name,
|
||||
'changes': {},
|
||||
'comment': u'Copied "{0}" to "{1}"'.format(source, name),
|
||||
'comment': 'Copied "{0}" to "{1}"'.format(source, name),
|
||||
'result': True}
|
||||
if not name:
|
||||
return _error(ret, 'Must provide name to file.copy')
|
||||
|
@ -5430,20 +5430,20 @@ def copy(
|
|||
|
||||
if __opts__['test']:
|
||||
if changed:
|
||||
ret['comment'] = u'File "{0}" is set to be copied to "{1}"'.format(
|
||||
ret['comment'] = 'File "{0}" is set to be copied to "{1}"'.format(
|
||||
source,
|
||||
name
|
||||
)
|
||||
ret['result'] = None
|
||||
else:
|
||||
ret['comment'] = (u'The target file "{0}" exists and will not be '
|
||||
u'overwritten'.format(name))
|
||||
ret['comment'] = ('The target file "{0}" exists and will not be '
|
||||
'overwritten'.format(name))
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
if not changed:
|
||||
ret['comment'] = (u'The target file "{0}" exists and will not be '
|
||||
u'overwritten'.format(name))
|
||||
ret['comment'] = ('The target file "{0}" exists and will not be '
|
||||
'overwritten'.format(name))
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
|
@ -5512,17 +5512,17 @@ def rename(name, source, force=False, makedirs=False):
|
|||
|
||||
if not os.path.isabs(name):
|
||||
return _error(
|
||||
ret, u'Specified file {0} is not an absolute path'.format(name))
|
||||
ret, 'Specified file {0} is not an absolute path'.format(name))
|
||||
|
||||
if not os.path.lexists(source):
|
||||
ret['comment'] = (u'Source file "{0}" has already been moved out of '
|
||||
u'place').format(source)
|
||||
ret['comment'] = ('Source file "{0}" has already been moved out of '
|
||||
'place').format(source)
|
||||
return ret
|
||||
|
||||
if os.path.lexists(source) and os.path.lexists(name):
|
||||
if not force:
|
||||
ret['comment'] = (u'The target file "{0}" exists and will not be '
|
||||
u'overwritten'.format(name))
|
||||
ret['comment'] = ('The target file "{0}" exists and will not be '
|
||||
'overwritten'.format(name))
|
||||
ret['result'] = False
|
||||
return ret
|
||||
elif not __opts__['test']:
|
||||
|
@ -5532,12 +5532,12 @@ def rename(name, source, force=False, makedirs=False):
|
|||
except (IOError, OSError):
|
||||
return _error(
|
||||
ret,
|
||||
u'Failed to delete "{0}" in preparation for '
|
||||
u'forced move'.format(name)
|
||||
'Failed to delete "{0}" in preparation for '
|
||||
'forced move'.format(name)
|
||||
)
|
||||
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'File "{0}" is set to be moved to "{1}"'.format(
|
||||
ret['comment'] = 'File "{0}" is set to be moved to "{1}"'.format(
|
||||
source,
|
||||
name
|
||||
)
|
||||
|
@ -5552,7 +5552,7 @@ def rename(name, source, force=False, makedirs=False):
|
|||
else:
|
||||
return _error(
|
||||
ret,
|
||||
u'The target directory {0} is not present'.format(dname))
|
||||
'The target directory {0} is not present'.format(dname))
|
||||
# All tests pass, move the file into place
|
||||
try:
|
||||
if os.path.islink(source):
|
||||
|
@ -5563,9 +5563,9 @@ def rename(name, source, force=False, makedirs=False):
|
|||
shutil.move(source, name)
|
||||
except (IOError, OSError):
|
||||
return _error(
|
||||
ret, u'Failed to move "{0}" to "{1}"'.format(source, name))
|
||||
ret, 'Failed to move "{0}" to "{1}"'.format(source, name))
|
||||
|
||||
ret['comment'] = u'Moved "{0}" to "{1}"'.format(source, name)
|
||||
ret['comment'] = 'Moved "{0}" to "{1}"'.format(source, name)
|
||||
ret['changes'] = {name: source}
|
||||
return ret
|
||||
|
||||
|
@ -5644,7 +5644,7 @@ def accumulated(name, filename, text, **kwargs):
|
|||
deps = require_in + watch_in
|
||||
if not [x for x in deps if 'file' in x]:
|
||||
ret['result'] = False
|
||||
ret['comment'] = u'Orphaned accumulator {0} in {1}:{2}'.format(
|
||||
ret['comment'] = 'Orphaned accumulator {0} in {1}:{2}'.format(
|
||||
name,
|
||||
__low__['__sls__'],
|
||||
__low__['__id__']
|
||||
|
@ -5668,8 +5668,8 @@ def accumulated(name, filename, text, **kwargs):
|
|||
for chunk in text:
|
||||
if chunk not in accum_data[filename][name]:
|
||||
accum_data[filename][name].append(chunk)
|
||||
ret['comment'] = (u'Accumulator {0} for file {1} '
|
||||
u'was charged by text'.format(name, filename))
|
||||
ret['comment'] = ('Accumulator {0} for file {1} '
|
||||
'was charged by text'.format(name, filename))
|
||||
_persist_accummulators(accum_data, accum_deps)
|
||||
return ret
|
||||
|
||||
|
@ -5825,8 +5825,8 @@ def serialize(name,
|
|||
if not create:
|
||||
if not os.path.isfile(name):
|
||||
# Don't create a file that is not already present
|
||||
ret['comment'] = (u'File {0} is not present and is not set for '
|
||||
u'creation').format(name)
|
||||
ret['comment'] = ('File {0} is not present and is not set for '
|
||||
'creation').format(name)
|
||||
return ret
|
||||
|
||||
formatter = kwargs.pop('formatter', 'yaml').lower()
|
||||
|
@ -5845,8 +5845,8 @@ def serialize(name,
|
|||
if salt.utils.platform.is_windows():
|
||||
if group is not None:
|
||||
log.warning(
|
||||
u'The group argument for %s has been ignored as this '
|
||||
u'is a Windows system.', name
|
||||
'The group argument for %s has been ignored as this '
|
||||
'is a Windows system.', name
|
||||
)
|
||||
group = user
|
||||
|
||||
|
@ -5855,7 +5855,7 @@ def serialize(name,
|
|||
|
||||
if serializer_name not in __serializers__:
|
||||
return {'changes': {},
|
||||
'comment': u'{0} format is not supported'.format(
|
||||
'comment': '{0} format is not supported'.format(
|
||||
formatter.capitalize()),
|
||||
'name': name,
|
||||
'result': False
|
||||
|
@ -5865,7 +5865,7 @@ def serialize(name,
|
|||
if os.path.isfile(name):
|
||||
if '{0}.deserialize'.format(formatter) not in __serializers__:
|
||||
return {'changes': {},
|
||||
'comment': (u'{0} format is not supported for merging'
|
||||
'comment': ('{0} format is not supported for merging'
|
||||
.format(formatter.capitalize())),
|
||||
'name': name,
|
||||
'result': False}
|
||||
|
@ -5877,7 +5877,7 @@ def serialize(name,
|
|||
merged_data = salt.utils.dictupdate.merge_recurse(existing_data, dataset)
|
||||
if existing_data == merged_data:
|
||||
ret['result'] = True
|
||||
ret['comment'] = u'The file {0} is in the correct state'.format(name)
|
||||
ret['comment'] = 'The file {0} is in the correct state'.format(name)
|
||||
return ret
|
||||
dataset = merged_data
|
||||
contents = __serializers__[serializer_name](dataset, **default_serializer_opts.get(serializer_name, {}))
|
||||
|
@ -5908,14 +5908,14 @@ def serialize(name,
|
|||
|
||||
if ret['changes']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = u'Dataset will be serialized and stored into {0}'.format(
|
||||
ret['comment'] = 'Dataset will be serialized and stored into {0}'.format(
|
||||
name)
|
||||
|
||||
if not show_changes:
|
||||
ret['changes']['diff'] = '<show_changes=False>'
|
||||
else:
|
||||
ret['result'] = True
|
||||
ret['comment'] = u'The file {0} is in the correct state'.format(name)
|
||||
ret['comment'] = 'The file {0} is in the correct state'.format(name)
|
||||
return ret
|
||||
|
||||
return __salt__['file.manage_file'](name=name,
|
||||
|
@ -6020,15 +6020,15 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
# Check for file existence
|
||||
if __salt__['file.file_exists'](name):
|
||||
ret['comment'] = (
|
||||
u'File {0} exists and is not a character device. Refusing '
|
||||
u'to continue'.format(name)
|
||||
'File {0} exists and is not a character device. Refusing '
|
||||
'to continue'.format(name)
|
||||
)
|
||||
|
||||
# Check if it is a character device
|
||||
elif not __salt__['file.is_chrdev'](name):
|
||||
if __opts__['test']:
|
||||
ret['comment'] = \
|
||||
u'Character device {0} is set to be created'.format(name)
|
||||
'Character device {0} is set to be created'.format(name)
|
||||
ret['result'] = None
|
||||
else:
|
||||
ret = __salt__['file.mknod'](name,
|
||||
|
@ -6044,8 +6044,8 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
devmaj, devmin = __salt__['file.get_devmm'](name)
|
||||
if (major, minor) != (devmaj, devmin):
|
||||
ret['comment'] = (
|
||||
u'Character device {0} exists and has a different '
|
||||
u'major/minor {1}/{2}. Refusing to continue'
|
||||
'Character device {0} exists and has a different '
|
||||
'major/minor {1}/{2}. Refusing to continue'
|
||||
.format(name, devmaj, devmin)
|
||||
)
|
||||
# Check the perms
|
||||
|
@ -6057,7 +6057,7 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
mode)[0]
|
||||
if not ret['changes']:
|
||||
ret['comment'] = (
|
||||
u'Character device {0} is in the correct state'.format(
|
||||
'Character device {0} is in the correct state'.format(
|
||||
name
|
||||
)
|
||||
)
|
||||
|
@ -6066,14 +6066,14 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
# Check for file existence
|
||||
if __salt__['file.file_exists'](name):
|
||||
ret['comment'] = (
|
||||
u'File {0} exists and is not a block device. Refusing to '
|
||||
u'continue'.format(name)
|
||||
'File {0} exists and is not a block device. Refusing to '
|
||||
'continue'.format(name)
|
||||
)
|
||||
|
||||
# Check if it is a block device
|
||||
elif not __salt__['file.is_blkdev'](name):
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'Block device {0} is set to be created'.format(name)
|
||||
ret['comment'] = 'Block device {0} is set to be created'.format(name)
|
||||
ret['result'] = None
|
||||
else:
|
||||
ret = __salt__['file.mknod'](name,
|
||||
|
@ -6089,8 +6089,8 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
devmaj, devmin = __salt__['file.get_devmm'](name)
|
||||
if (major, minor) != (devmaj, devmin):
|
||||
ret['comment'] = (
|
||||
u'Block device {0} exists and has a different major/minor '
|
||||
u'{1}/{2}. Refusing to continue'.format(
|
||||
'Block device {0} exists and has a different major/minor '
|
||||
'{1}/{2}. Refusing to continue'.format(
|
||||
name, devmaj, devmin
|
||||
)
|
||||
)
|
||||
|
@ -6103,21 +6103,21 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
mode)[0]
|
||||
if not ret['changes']:
|
||||
ret['comment'] = (
|
||||
u'Block device {0} is in the correct state'.format(name)
|
||||
'Block device {0} is in the correct state'.format(name)
|
||||
)
|
||||
|
||||
elif ntype == 'p':
|
||||
# Check for file existence
|
||||
if __salt__['file.file_exists'](name):
|
||||
ret['comment'] = (
|
||||
u'File {0} exists and is not a fifo pipe. Refusing to '
|
||||
u'continue'.format(name)
|
||||
'File {0} exists and is not a fifo pipe. Refusing to '
|
||||
'continue'.format(name)
|
||||
)
|
||||
|
||||
# Check if it is a fifo
|
||||
elif not __salt__['file.is_fifo'](name):
|
||||
if __opts__['test']:
|
||||
ret['comment'] = u'Fifo pipe {0} is set to be created'.format(
|
||||
ret['comment'] = 'Fifo pipe {0} is set to be created'.format(
|
||||
name
|
||||
)
|
||||
ret['result'] = None
|
||||
|
@ -6139,7 +6139,7 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
|
|||
mode)[0]
|
||||
if not ret['changes']:
|
||||
ret['comment'] = (
|
||||
u'Fifo pipe {0} is in the correct state'.format(name)
|
||||
'Fifo pipe {0} is in the correct state'.format(name)
|
||||
)
|
||||
|
||||
else:
|
||||
|
@ -6495,18 +6495,18 @@ def shortcut(
|
|||
else:
|
||||
if _check_shortcut_ownership(name, user):
|
||||
# The shortcut looks good!
|
||||
ret['comment'] = (u'Shortcut {0} is present and owned by '
|
||||
u'{1}'.format(name, user))
|
||||
ret['comment'] = ('Shortcut {0} is present and owned by '
|
||||
'{1}'.format(name, user))
|
||||
else:
|
||||
if _set_shortcut_ownership(name, user):
|
||||
ret['comment'] = (u'Set ownership of shortcut {0} to '
|
||||
u'{1}'.format(name, user))
|
||||
ret['comment'] = ('Set ownership of shortcut {0} to '
|
||||
'{1}'.format(name, user))
|
||||
ret['changes']['ownership'] = '{0}'.format(user)
|
||||
else:
|
||||
ret['result'] = False
|
||||
ret['comment'] += (
|
||||
u'Failed to set ownership of shortcut {0} to '
|
||||
u'{1}'.format(name, user)
|
||||
'Failed to set ownership of shortcut {0} to '
|
||||
'{1}'.format(name, user)
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -6525,12 +6525,12 @@ def shortcut(
|
|||
scut.Save()
|
||||
except (AttributeError, pywintypes.com_error) as exc:
|
||||
ret['result'] = False
|
||||
ret['comment'] = (u'Unable to create new shortcut {0} -> '
|
||||
u'{1}: {2}'.format(name, target, exc))
|
||||
ret['comment'] = ('Unable to create new shortcut {0} -> '
|
||||
'{1}: {2}'.format(name, target, exc))
|
||||
return ret
|
||||
else:
|
||||
ret['comment'] = (u'Created new shortcut {0} -> '
|
||||
u'{1}'.format(name, target))
|
||||
ret['comment'] = ('Created new shortcut {0} -> '
|
||||
'{1}'.format(name, target))
|
||||
ret['changes']['new'] = name
|
||||
|
||||
if not _check_shortcut_ownership(name, user):
|
||||
|
|
|
@ -68,9 +68,9 @@ def _represent_yaml_str(self, node):
|
|||
Represent for yaml
|
||||
'''
|
||||
return self.represent_scalar(node)
|
||||
YamlDumper.add_representer(u'tag:yaml.org,2002:str',
|
||||
YamlDumper.add_representer('tag:yaml.org,2002:str',
|
||||
_represent_yaml_str)
|
||||
YamlDumper.add_representer(u'tag:yaml.org,2002:timestamp',
|
||||
YamlDumper.add_representer('tag:yaml.org,2002:timestamp',
|
||||
_represent_yaml_str)
|
||||
|
||||
|
||||
|
@ -79,7 +79,7 @@ def _construct_yaml_str(self, node):
|
|||
Construct for yaml
|
||||
'''
|
||||
return self.construct_scalar(node)
|
||||
YamlLoader.add_constructor(u'tag:yaml.org,2002:timestamp',
|
||||
YamlLoader.add_constructor('tag:yaml.org,2002:timestamp',
|
||||
_construct_yaml_str)
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
|
|
@ -18,9 +18,8 @@ In the minion configuration file, the following block is required:
|
|||
|
||||
.. versionadded:: 2017.7.0
|
||||
'''
|
||||
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# import python std lib
|
||||
import time
|
||||
|
|
|
@ -495,7 +495,7 @@ def present(name,
|
|||
val = 'XXX-REDACTED-XXX'
|
||||
elif key == 'group' and not remove_groups:
|
||||
key = 'ensure groups'
|
||||
ret['comment'] += u'{0}: {1}\n'.format(key, val)
|
||||
ret['comment'] += '{0}: {1}\n'.format(key, val)
|
||||
return ret
|
||||
# The user is present
|
||||
if 'shadow.info' in __salt__:
|
||||
|
|
|
@ -180,14 +180,14 @@ class PyWinUpdater(object):
|
|||
try:
|
||||
for update in self.search_results.Updates:
|
||||
if update.InstallationBehavior.CanRequestUserInput:
|
||||
log.debug(u'Skipped update {0}'.format(update.title))
|
||||
log.debug('Skipped update {0}'.format(update.title))
|
||||
continue
|
||||
for category in update.Categories:
|
||||
if self.skipDownloaded and update.IsDownloaded:
|
||||
continue
|
||||
if self.categories is None or category.Name in self.categories:
|
||||
self.download_collection.Add(update)
|
||||
log.debug(u'added update {0}'.format(update.title))
|
||||
log.debug('added update {0}'.format(update.title))
|
||||
self.foundCategories = _gather_update_categories(self.download_collection)
|
||||
return True
|
||||
except Exception as exc:
|
||||
|
@ -260,7 +260,7 @@ class PyWinUpdater(object):
|
|||
try:
|
||||
for update in self.search_results.Updates:
|
||||
if not update.EulaAccepted:
|
||||
log.debug(u'Accepting EULA: {0}'.format(update.Title))
|
||||
log.debug('Accepting EULA: {0}'.format(update.Title))
|
||||
update.AcceptEula()
|
||||
except Exception as exc:
|
||||
log.info('Accepting Eula failed: {0}'.format(exc))
|
||||
|
|
113
salt/syspaths.py
113
salt/syspaths.py
|
@ -22,9 +22,6 @@ from __future__ import absolute_import
|
|||
import sys
|
||||
import os.path
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.locales import sdecode
|
||||
|
||||
__PLATFORM = sys.platform.lower()
|
||||
|
||||
|
||||
|
@ -45,99 +42,99 @@ except ImportError:
|
|||
|
||||
|
||||
# Let's find out the path of this module
|
||||
if u'SETUP_DIRNAME' in globals():
|
||||
if 'SETUP_DIRNAME' in globals():
|
||||
# This is from the exec() call in Salt's setup.py
|
||||
__THIS_FILE = os.path.join(SETUP_DIRNAME, u'salt', u'syspaths.py') # pylint: disable=E0602
|
||||
__THIS_FILE = os.path.join(SETUP_DIRNAME, 'salt', 'syspaths.py') # pylint: disable=E0602
|
||||
else:
|
||||
__THIS_FILE = __file__
|
||||
|
||||
|
||||
# These values are always relative to salt's installation directory
|
||||
INSTALL_DIR = os.path.dirname(os.path.realpath(__THIS_FILE))
|
||||
CLOUD_DIR = os.path.join(INSTALL_DIR, u'cloud')
|
||||
BOOTSTRAP = os.path.join(CLOUD_DIR, u'deploy', u'bootstrap-salt.sh')
|
||||
CLOUD_DIR = os.path.join(INSTALL_DIR, 'cloud')
|
||||
BOOTSTRAP = os.path.join(CLOUD_DIR, 'deploy', 'bootstrap-salt.sh')
|
||||
|
||||
ROOT_DIR = __generated_syspaths.ROOT_DIR
|
||||
if ROOT_DIR is None:
|
||||
# The installation time value was not provided, let's define the default
|
||||
if __PLATFORM.startswith(u'win'):
|
||||
ROOT_DIR = sdecode(r'c:\salt') # future lint: disable=non-unicode-string
|
||||
if __PLATFORM.startswith('win'):
|
||||
ROOT_DIR = r'c:\salt'
|
||||
else:
|
||||
ROOT_DIR = u'/'
|
||||
ROOT_DIR = '/'
|
||||
|
||||
CONFIG_DIR = __generated_syspaths.CONFIG_DIR
|
||||
if CONFIG_DIR is None:
|
||||
if __PLATFORM.startswith(u'win'):
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, u'conf')
|
||||
elif u'freebsd' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, u'usr', u'local', u'etc', u'salt')
|
||||
elif u'netbsd' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, u'usr', u'pkg', u'etc', u'salt')
|
||||
elif u'sunos5' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, u'opt', u'local', u'etc', u'salt')
|
||||
if __PLATFORM.startswith('win'):
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'conf')
|
||||
elif 'freebsd' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'usr', 'local', 'etc', 'salt')
|
||||
elif 'netbsd' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'usr', 'pkg', 'etc', 'salt')
|
||||
elif 'sunos5' in __PLATFORM:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'opt', 'local', 'etc', 'salt')
|
||||
else:
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, u'etc', u'salt')
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'etc', 'salt')
|
||||
|
||||
SHARE_DIR = __generated_syspaths.SHARE_DIR
|
||||
if SHARE_DIR is None:
|
||||
if __PLATFORM.startswith(u'win'):
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, u'share')
|
||||
elif u'freebsd' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, u'usr', u'local', u'share', u'salt')
|
||||
elif u'netbsd' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, u'usr', u'share', u'salt')
|
||||
elif u'sunos5' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, u'usr', u'share', u'salt')
|
||||
if __PLATFORM.startswith('win'):
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'share')
|
||||
elif 'freebsd' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'usr', 'local', 'share', 'salt')
|
||||
elif 'netbsd' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'usr', 'share', 'salt')
|
||||
elif 'sunos5' in __PLATFORM:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'usr', 'share', 'salt')
|
||||
else:
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, u'usr', u'share', u'salt')
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'usr', 'share', 'salt')
|
||||
|
||||
CACHE_DIR = __generated_syspaths.CACHE_DIR
|
||||
if CACHE_DIR is None:
|
||||
CACHE_DIR = os.path.join(ROOT_DIR, u'var', u'cache', u'salt')
|
||||
CACHE_DIR = os.path.join(ROOT_DIR, 'var', 'cache', 'salt')
|
||||
|
||||
SOCK_DIR = __generated_syspaths.SOCK_DIR
|
||||
if SOCK_DIR is None:
|
||||
SOCK_DIR = os.path.join(ROOT_DIR, u'var', u'run', u'salt')
|
||||
SOCK_DIR = os.path.join(ROOT_DIR, 'var', 'run', 'salt')
|
||||
|
||||
SRV_ROOT_DIR = __generated_syspaths.SRV_ROOT_DIR
|
||||
if SRV_ROOT_DIR is None:
|
||||
SRV_ROOT_DIR = os.path.join(ROOT_DIR, u'srv')
|
||||
SRV_ROOT_DIR = os.path.join(ROOT_DIR, 'srv')
|
||||
|
||||
BASE_FILE_ROOTS_DIR = __generated_syspaths.BASE_FILE_ROOTS_DIR
|
||||
if BASE_FILE_ROOTS_DIR is None:
|
||||
BASE_FILE_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, u'salt')
|
||||
BASE_FILE_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'salt')
|
||||
|
||||
BASE_PILLAR_ROOTS_DIR = __generated_syspaths.BASE_PILLAR_ROOTS_DIR
|
||||
if BASE_PILLAR_ROOTS_DIR is None:
|
||||
BASE_PILLAR_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, u'pillar')
|
||||
BASE_PILLAR_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'pillar')
|
||||
|
||||
BASE_THORIUM_ROOTS_DIR = __generated_syspaths.BASE_THORIUM_ROOTS_DIR
|
||||
if BASE_THORIUM_ROOTS_DIR is None:
|
||||
BASE_THORIUM_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, u'thorium')
|
||||
BASE_THORIUM_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'thorium')
|
||||
|
||||
BASE_MASTER_ROOTS_DIR = __generated_syspaths.BASE_MASTER_ROOTS_DIR
|
||||
if BASE_MASTER_ROOTS_DIR is None:
|
||||
BASE_MASTER_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, u'salt-master')
|
||||
BASE_MASTER_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'salt-master')
|
||||
|
||||
LOGS_DIR = __generated_syspaths.LOGS_DIR
|
||||
if LOGS_DIR is None:
|
||||
LOGS_DIR = os.path.join(ROOT_DIR, u'var', u'log', u'salt')
|
||||
LOGS_DIR = os.path.join(ROOT_DIR, 'var', 'log', 'salt')
|
||||
|
||||
PIDFILE_DIR = __generated_syspaths.PIDFILE_DIR
|
||||
if PIDFILE_DIR is None:
|
||||
PIDFILE_DIR = os.path.join(ROOT_DIR, u'var', u'run')
|
||||
PIDFILE_DIR = os.path.join(ROOT_DIR, 'var', 'run')
|
||||
|
||||
SPM_FORMULA_PATH = __generated_syspaths.SPM_FORMULA_PATH
|
||||
if SPM_FORMULA_PATH is None:
|
||||
SPM_FORMULA_PATH = os.path.join(SRV_ROOT_DIR, u'spm', u'salt')
|
||||
SPM_FORMULA_PATH = os.path.join(SRV_ROOT_DIR, 'spm', 'salt')
|
||||
|
||||
SPM_PILLAR_PATH = __generated_syspaths.SPM_PILLAR_PATH
|
||||
if SPM_PILLAR_PATH is None:
|
||||
SPM_PILLAR_PATH = os.path.join(SRV_ROOT_DIR, u'spm', u'pillar')
|
||||
SPM_PILLAR_PATH = os.path.join(SRV_ROOT_DIR, 'spm', 'pillar')
|
||||
|
||||
SPM_REACTOR_PATH = __generated_syspaths.SPM_REACTOR_PATH
|
||||
if SPM_REACTOR_PATH is None:
|
||||
SPM_REACTOR_PATH = os.path.join(SRV_ROOT_DIR, u'spm', u'reactor')
|
||||
SPM_REACTOR_PATH = os.path.join(SRV_ROOT_DIR, 'spm', 'reactor')
|
||||
|
||||
HOME_DIR = __generated_syspaths.HOME_DIR
|
||||
if HOME_DIR is None:
|
||||
|
@ -145,22 +142,22 @@ if HOME_DIR is None:
|
|||
|
||||
|
||||
__all__ = [
|
||||
u'ROOT_DIR',
|
||||
u'SHARE_DIR',
|
||||
u'CONFIG_DIR',
|
||||
u'CACHE_DIR',
|
||||
u'SOCK_DIR',
|
||||
u'SRV_ROOT_DIR',
|
||||
u'BASE_FILE_ROOTS_DIR',
|
||||
u'BASE_PILLAR_ROOTS_DIR',
|
||||
u'BASE_MASTER_ROOTS_DIR',
|
||||
u'BASE_THORIUM_ROOTS_DIR',
|
||||
u'LOGS_DIR',
|
||||
u'PIDFILE_DIR',
|
||||
u'INSTALL_DIR',
|
||||
u'CLOUD_DIR',
|
||||
u'BOOTSTRAP',
|
||||
u'SPM_FORMULA_PATH',
|
||||
u'SPM_PILLAR_PATH',
|
||||
u'SPM_REACTOR_PATH'
|
||||
'ROOT_DIR',
|
||||
'SHARE_DIR',
|
||||
'CONFIG_DIR',
|
||||
'CACHE_DIR',
|
||||
'SOCK_DIR',
|
||||
'SRV_ROOT_DIR',
|
||||
'BASE_FILE_ROOTS_DIR',
|
||||
'BASE_PILLAR_ROOTS_DIR',
|
||||
'BASE_MASTER_ROOTS_DIR',
|
||||
'BASE_THORIUM_ROOTS_DIR',
|
||||
'LOGS_DIR',
|
||||
'PIDFILE_DIR',
|
||||
'INSTALL_DIR',
|
||||
'CLOUD_DIR',
|
||||
'BOOTSTRAP',
|
||||
'SPM_FORMULA_PATH',
|
||||
'SPM_PILLAR_PATH',
|
||||
'SPM_REACTOR_PATH'
|
||||
]
|
||||
|
|
|
@ -27,7 +27,7 @@ log = logging.getLogger(__name__)
|
|||
# FIXME: we should make the default encoding of a .sls file a configurable
|
||||
# option in the config, and default it to 'utf-8'.
|
||||
#
|
||||
SLS_ENCODING = u'utf-8' # this one has no BOM.
|
||||
SLS_ENCODING = 'utf-8' # this one has no BOM.
|
||||
SLS_ENCODER = codecs.getencoder(SLS_ENCODING)
|
||||
|
||||
|
||||
|
@ -36,9 +36,9 @@ def compile_template(template,
|
|||
default,
|
||||
blacklist,
|
||||
whitelist,
|
||||
saltenv=u'base',
|
||||
sls=u'',
|
||||
input_data=u'',
|
||||
saltenv='base',
|
||||
sls='',
|
||||
input_data='',
|
||||
**kwargs):
|
||||
'''
|
||||
Take the path to a template and return the high data structure
|
||||
|
@ -48,24 +48,24 @@ def compile_template(template,
|
|||
# if any error occurs, we return an empty dictionary
|
||||
ret = {}
|
||||
|
||||
log.debug(u'compile template: %s', template)
|
||||
log.debug('compile template: %s', template)
|
||||
|
||||
if u'env' in kwargs:
|
||||
if 'env' in kwargs:
|
||||
# "env" is not supported; Use "saltenv".
|
||||
kwargs.pop(u'env')
|
||||
kwargs.pop('env')
|
||||
|
||||
if template != u':string:':
|
||||
if template != ':string:':
|
||||
# Template was specified incorrectly
|
||||
if not isinstance(template, six.string_types):
|
||||
log.error(u'Template was specified incorrectly: %s', template)
|
||||
log.error('Template was specified incorrectly: %s', template)
|
||||
return ret
|
||||
# Template does not exist
|
||||
if not os.path.isfile(template):
|
||||
log.error(u'Template does not exist: %s', template)
|
||||
log.error('Template does not exist: %s', template)
|
||||
return ret
|
||||
# Template is an empty file
|
||||
if salt.utils.files.is_empty(template):
|
||||
log.debug(u'Template is an empty file: %s', template)
|
||||
log.debug('Template is an empty file: %s', template)
|
||||
return ret
|
||||
|
||||
with codecs.open(template, encoding=SLS_ENCODING) as ifile:
|
||||
|
@ -73,13 +73,13 @@ def compile_template(template,
|
|||
input_data = ifile.read()
|
||||
if not input_data.strip():
|
||||
# Template is nothing but whitespace
|
||||
log.error(u'Template is nothing but whitespace: %s', template)
|
||||
log.error('Template is nothing but whitespace: %s', template)
|
||||
return ret
|
||||
|
||||
# Get the list of render funcs in the render pipe line.
|
||||
render_pipe = template_shebang(template, renderers, default, blacklist, whitelist, input_data)
|
||||
|
||||
windows_newline = u'\r\n' in input_data
|
||||
windows_newline = '\r\n' in input_data
|
||||
|
||||
input_data = StringIO(input_data)
|
||||
for render, argline in render_pipe:
|
||||
|
@ -88,13 +88,13 @@ def compile_template(template,
|
|||
render_kwargs = dict(renderers=renderers, tmplpath=template)
|
||||
render_kwargs.update(kwargs)
|
||||
if argline:
|
||||
render_kwargs[u'argline'] = argline
|
||||
render_kwargs['argline'] = argline
|
||||
start = time.time()
|
||||
ret = render(input_data, saltenv, sls, **render_kwargs)
|
||||
log.profile(
|
||||
u'Time (in seconds) to render \'%s\' using \'%s\' renderer: %s',
|
||||
'Time (in seconds) to render \'%s\' using \'%s\' renderer: %s',
|
||||
template,
|
||||
render.__module__.split(u'.')[-1],
|
||||
render.__module__.split('.')[-1],
|
||||
time.time() - start
|
||||
)
|
||||
if ret is None:
|
||||
|
@ -108,7 +108,7 @@ def compile_template(template,
|
|||
# structure) we don't want to log this.
|
||||
if salt.utils.stringio.is_readable(ret):
|
||||
log.debug(
|
||||
u'Rendered data from file: %s:\n%s',
|
||||
'Rendered data from file: %s:\n%s',
|
||||
template,
|
||||
salt.utils.locales.sdecode(ret.read())) # pylint: disable=no-member
|
||||
ret.seek(0) # pylint: disable=no-member
|
||||
|
@ -123,8 +123,8 @@ def compile_template(template,
|
|||
contents = ret
|
||||
|
||||
if isinstance(contents, six.string_types):
|
||||
if u'\r\n' not in contents:
|
||||
contents = contents.replace(u'\n', u'\r\n')
|
||||
if '\r\n' not in contents:
|
||||
contents = contents.replace('\n', '\r\n')
|
||||
ret = StringIO(contents) if is_stringio else contents
|
||||
else:
|
||||
if is_stringio:
|
||||
|
@ -138,7 +138,7 @@ def compile_template_str(template, renderers, default, blacklist, whitelist):
|
|||
derived from the template.
|
||||
'''
|
||||
fn_ = salt.utils.files.mkstemp()
|
||||
with salt.utils.files.fopen(fn_, u'wb') as ofile:
|
||||
with salt.utils.files.fopen(fn_, 'wb') as ofile:
|
||||
ofile.write(SLS_ENCODER(template)[0])
|
||||
return compile_template(fn_, renderers, default, blacklist, whitelist)
|
||||
|
||||
|
@ -161,16 +161,16 @@ def template_shebang(template, renderers, default, blacklist, whitelist, input_d
|
|||
#!mako|yaml_odict|stateconf
|
||||
|
||||
'''
|
||||
line = u''
|
||||
line = ''
|
||||
# Open up the first line of the sls template
|
||||
if template == u':string:':
|
||||
if template == ':string:':
|
||||
line = input_data.split()[0]
|
||||
else:
|
||||
with salt.utils.files.fopen(template, u'r') as ifile:
|
||||
with salt.utils.files.fopen(template, 'r') as ifile:
|
||||
line = ifile.readline()
|
||||
|
||||
# Check if it starts with a shebang and not a path
|
||||
if line.startswith(u'#!') and not line.startswith(u'#!/'):
|
||||
if line.startswith('#!') and not line.startswith('#!/'):
|
||||
# pull out the shebang data
|
||||
# If the shebang does not contain recognized/not-blacklisted/whitelisted
|
||||
# renderers, do not fall back to the default renderer
|
||||
|
@ -184,18 +184,18 @@ def template_shebang(template, renderers, default, blacklist, whitelist, input_d
|
|||
#
|
||||
OLD_STYLE_RENDERERS = {}
|
||||
|
||||
for comb in (u'yaml_jinja',
|
||||
u'yaml_mako',
|
||||
u'yaml_wempy',
|
||||
u'json_jinja',
|
||||
u'json_mako',
|
||||
u'json_wempy',
|
||||
u'yamlex_jinja',
|
||||
u'yamlexyamlex_mako',
|
||||
u'yamlexyamlex_wempy'):
|
||||
for comb in ('yaml_jinja',
|
||||
'yaml_mako',
|
||||
'yaml_wempy',
|
||||
'json_jinja',
|
||||
'json_mako',
|
||||
'json_wempy',
|
||||
'yamlex_jinja',
|
||||
'yamlexyamlex_mako',
|
||||
'yamlexyamlex_wempy'):
|
||||
|
||||
fmt, tmpl = comb.split(u'_')
|
||||
OLD_STYLE_RENDERERS[comb] = u'{0}|{1}'.format(tmpl, fmt)
|
||||
fmt, tmpl = comb.split('_')
|
||||
OLD_STYLE_RENDERERS[comb] = '{0}|{1}'.format(tmpl, fmt)
|
||||
|
||||
|
||||
def check_render_pipe_str(pipestr, renderers, blacklist, whitelist):
|
||||
|
@ -206,25 +206,25 @@ def check_render_pipe_str(pipestr, renderers, blacklist, whitelist):
|
|||
'''
|
||||
if pipestr is None:
|
||||
return []
|
||||
parts = [r.strip() for r in pipestr.split(u'|')]
|
||||
parts = [r.strip() for r in pipestr.split('|')]
|
||||
# Note: currently, | is not allowed anywhere in the shebang line except
|
||||
# as pipes between renderers.
|
||||
|
||||
results = []
|
||||
try:
|
||||
if parts[0] == pipestr and pipestr in OLD_STYLE_RENDERERS:
|
||||
parts = OLD_STYLE_RENDERERS[pipestr].split(u'|')
|
||||
parts = OLD_STYLE_RENDERERS[pipestr].split('|')
|
||||
for part in parts:
|
||||
name, argline = (part + u' ').split(u' ', 1)
|
||||
name, argline = (part + ' ').split(' ', 1)
|
||||
if whitelist and name not in whitelist or \
|
||||
blacklist and name in blacklist:
|
||||
log.warning(
|
||||
u'The renderer "%s" is disallowed by configuration and '
|
||||
u'will be skipped.', name
|
||||
'The renderer "%s" is disallowed by configuration and '
|
||||
'will be skipped.', name
|
||||
)
|
||||
continue
|
||||
results.append((renderers[name], argline.strip()))
|
||||
return results
|
||||
except KeyError:
|
||||
log.error(u'The renderer "%s" is not available', pipestr)
|
||||
log.error('The renderer "%s" is not available', pipestr)
|
||||
return []
|
||||
|
|
|
@ -8,97 +8,97 @@ from __future__ import absolute_import
|
|||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
graph_prefix = u'\x1b['
|
||||
graph_suffix = u'm'
|
||||
graph_prefix = '\x1b['
|
||||
graph_suffix = 'm'
|
||||
codes = {
|
||||
u'reset': u'0',
|
||||
'reset': '0',
|
||||
|
||||
u'bold': u'1',
|
||||
u'faint': u'2',
|
||||
u'italic': u'3',
|
||||
u'underline': u'4',
|
||||
u'blink': u'5',
|
||||
u'slow_blink': u'5',
|
||||
u'fast_blink': u'6',
|
||||
u'inverse': u'7',
|
||||
u'conceal': u'8',
|
||||
u'strike': u'9',
|
||||
'bold': '1',
|
||||
'faint': '2',
|
||||
'italic': '3',
|
||||
'underline': '4',
|
||||
'blink': '5',
|
||||
'slow_blink': '5',
|
||||
'fast_blink': '6',
|
||||
'inverse': '7',
|
||||
'conceal': '8',
|
||||
'strike': '9',
|
||||
|
||||
u'primary_font': u'10',
|
||||
u'reset_font': u'10',
|
||||
u'font_0': u'10',
|
||||
u'font_1': u'11',
|
||||
u'font_2': u'12',
|
||||
u'font_3': u'13',
|
||||
u'font_4': u'14',
|
||||
u'font_5': u'15',
|
||||
u'font_6': u'16',
|
||||
u'font_7': u'17',
|
||||
u'font_8': u'18',
|
||||
u'font_9': u'19',
|
||||
u'fraktur': u'20',
|
||||
'primary_font': '10',
|
||||
'reset_font': '10',
|
||||
'font_0': '10',
|
||||
'font_1': '11',
|
||||
'font_2': '12',
|
||||
'font_3': '13',
|
||||
'font_4': '14',
|
||||
'font_5': '15',
|
||||
'font_6': '16',
|
||||
'font_7': '17',
|
||||
'font_8': '18',
|
||||
'font_9': '19',
|
||||
'fraktur': '20',
|
||||
|
||||
u'double_underline': u'21',
|
||||
u'end_bold': u'21',
|
||||
u'normal_intensity': u'22',
|
||||
u'end_italic': u'23',
|
||||
u'end_fraktur': u'23',
|
||||
u'end_underline': u'24', # single or double
|
||||
u'end_blink': u'25',
|
||||
u'end_inverse': u'27',
|
||||
u'end_conceal': u'28',
|
||||
u'end_strike': u'29',
|
||||
'double_underline': '21',
|
||||
'end_bold': '21',
|
||||
'normal_intensity': '22',
|
||||
'end_italic': '23',
|
||||
'end_fraktur': '23',
|
||||
'end_underline': '24', # single or double
|
||||
'end_blink': '25',
|
||||
'end_inverse': '27',
|
||||
'end_conceal': '28',
|
||||
'end_strike': '29',
|
||||
|
||||
u'black': u'30',
|
||||
u'red': u'31',
|
||||
u'green': u'32',
|
||||
u'yellow': u'33',
|
||||
u'blue': u'34',
|
||||
u'magenta': u'35',
|
||||
u'cyan': u'36',
|
||||
u'white': u'37',
|
||||
u'extended': u'38',
|
||||
u'default': u'39',
|
||||
'black': '30',
|
||||
'red': '31',
|
||||
'green': '32',
|
||||
'yellow': '33',
|
||||
'blue': '34',
|
||||
'magenta': '35',
|
||||
'cyan': '36',
|
||||
'white': '37',
|
||||
'extended': '38',
|
||||
'default': '39',
|
||||
|
||||
u'fg_black': u'30',
|
||||
u'fg_red': u'31',
|
||||
u'fg_green': u'32',
|
||||
u'fg_yellow': u'33',
|
||||
u'fg_blue': u'34',
|
||||
u'fg_magenta': u'35',
|
||||
u'fg_cyan': u'36',
|
||||
u'fg_white': u'37',
|
||||
u'fg_extended': u'38',
|
||||
u'fg_default': u'39',
|
||||
'fg_black': '30',
|
||||
'fg_red': '31',
|
||||
'fg_green': '32',
|
||||
'fg_yellow': '33',
|
||||
'fg_blue': '34',
|
||||
'fg_magenta': '35',
|
||||
'fg_cyan': '36',
|
||||
'fg_white': '37',
|
||||
'fg_extended': '38',
|
||||
'fg_default': '39',
|
||||
|
||||
u'bg_black': u'40',
|
||||
u'bg_red': u'41',
|
||||
u'bg_green': u'42',
|
||||
u'bg_yellow': u'44',
|
||||
u'bg_blue': u'44',
|
||||
u'bg_magenta': u'45',
|
||||
u'bg_cyan': u'46',
|
||||
u'bg_white': u'47',
|
||||
u'bg_extended': u'48',
|
||||
u'bg_default': u'49',
|
||||
'bg_black': '40',
|
||||
'bg_red': '41',
|
||||
'bg_green': '42',
|
||||
'bg_yellow': '44',
|
||||
'bg_blue': '44',
|
||||
'bg_magenta': '45',
|
||||
'bg_cyan': '46',
|
||||
'bg_white': '47',
|
||||
'bg_extended': '48',
|
||||
'bg_default': '49',
|
||||
|
||||
u'frame': u'51',
|
||||
u'encircle': u'52',
|
||||
u'overline': u'53',
|
||||
u'end_frame': u'54',
|
||||
u'end_encircle': u'54',
|
||||
u'end_overline': u'55',
|
||||
'frame': '51',
|
||||
'encircle': '52',
|
||||
'overline': '53',
|
||||
'end_frame': '54',
|
||||
'end_encircle': '54',
|
||||
'end_overline': '55',
|
||||
|
||||
u'ideogram_underline': u'60',
|
||||
u'right_line': u'60',
|
||||
u'ideogram_double_underline': u'61',
|
||||
u'right_double_line': u'61',
|
||||
u'ideogram_overline': u'62',
|
||||
u'left_line': u'62',
|
||||
u'ideogram_double_overline': u'63',
|
||||
u'left_double_line': u'63',
|
||||
u'ideogram_stress': u'64',
|
||||
u'reset_ideogram': u'65'
|
||||
'ideogram_underline': '60',
|
||||
'right_line': '60',
|
||||
'ideogram_double_underline': '61',
|
||||
'right_double_line': '61',
|
||||
'ideogram_overline': '62',
|
||||
'left_line': '62',
|
||||
'ideogram_double_overline': '63',
|
||||
'left_double_line': '63',
|
||||
'ideogram_stress': '64',
|
||||
'reset_ideogram': '65'
|
||||
}
|
||||
|
||||
|
||||
|
@ -144,8 +144,8 @@ class TextFormat(object):
|
|||
'''
|
||||
self.codes = [codes[attr.lower()] for attr in attrs if isinstance(attr, six.string_types)]
|
||||
|
||||
if kwargs.get(u'reset', True):
|
||||
self.codes[:0] = [codes[u'reset']]
|
||||
if kwargs.get('reset', True):
|
||||
self.codes[:0] = [codes['reset']]
|
||||
|
||||
def qualify_int(i):
|
||||
if isinstance(i, int):
|
||||
|
@ -155,20 +155,20 @@ class TextFormat(object):
|
|||
if isinstance(t, (list, tuple)) and len(t) == 3:
|
||||
return qualify_int(t[0]), qualify_int(t[1]), qualify_int(t[2])
|
||||
|
||||
if kwargs.get(u'x', None) is not None:
|
||||
self.codes.extend((codes[u'extended'], u'5', qualify_int(kwargs[u'x'])))
|
||||
elif kwargs.get(u'rgb', None) is not None:
|
||||
self.codes.extend((codes[u'extended'], u'2'))
|
||||
self.codes.extend(*qualify_triple_int(kwargs[u'rgb']))
|
||||
if kwargs.get('x', None) is not None:
|
||||
self.codes.extend((codes['extended'], '5', qualify_int(kwargs['x'])))
|
||||
elif kwargs.get('rgb', None) is not None:
|
||||
self.codes.extend((codes['extended'], '2'))
|
||||
self.codes.extend(*qualify_triple_int(kwargs['rgb']))
|
||||
|
||||
if kwargs.get(u'bg_x', None) is not None:
|
||||
self.codes.extend((codes[u'extended'], u'5', qualify_int(kwargs[u'bg_x'])))
|
||||
elif kwargs.get(u'bg_rgb', None) is not None:
|
||||
self.codes.extend((codes[u'extended'], u'2'))
|
||||
self.codes.extend(*qualify_triple_int(kwargs[u'bg_rgb']))
|
||||
if kwargs.get('bg_x', None) is not None:
|
||||
self.codes.extend((codes['extended'], '5', qualify_int(kwargs['bg_x'])))
|
||||
elif kwargs.get('bg_rgb', None) is not None:
|
||||
self.codes.extend((codes['extended'], '2'))
|
||||
self.codes.extend(*qualify_triple_int(kwargs['bg_rgb']))
|
||||
|
||||
self.sequence = u'%s%s%s' % (graph_prefix, # pylint: disable=E1321
|
||||
u';'.join(self.codes),
|
||||
self.sequence = '%s%s%s' % (graph_prefix, # pylint: disable=E1321
|
||||
';'.join(self.codes),
|
||||
graph_suffix)
|
||||
|
||||
def __call__(self, text, reset=True):
|
||||
|
@ -183,8 +183,8 @@ class TextFormat(object):
|
|||
green_blink_text = TextFormat('blink', 'green')
|
||||
'The answer is: {0}'.format(green_blink_text(42))
|
||||
'''
|
||||
end = TextFormat(u'reset') if reset else u''
|
||||
return u'%s%s%s' % (self.sequence, text, end) # pylint: disable=E1321
|
||||
end = TextFormat('reset') if reset else ''
|
||||
return '%s%s%s' % (self.sequence, text, end) # pylint: disable=E1321
|
||||
|
||||
def __str__(self):
|
||||
return self.sequence
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Custom configparser classes
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import re
|
||||
|
||||
# Import Salt libs
|
||||
|
@ -45,8 +45,8 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
... conf.write(fh)
|
||||
>>>
|
||||
'''
|
||||
DEFAULTSECT = u'DEFAULT'
|
||||
SPACEINDENT = u' ' * 8
|
||||
DEFAULTSECT = 'DEFAULT'
|
||||
SPACEINDENT = ' ' * 8
|
||||
|
||||
def __init__(self, defaults=None, dict_type=_default_dict,
|
||||
allow_no_value=True):
|
||||
|
@ -79,20 +79,20 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
break
|
||||
lineno = lineno + 1
|
||||
# comment or blank line?
|
||||
if line.strip() == u'' or line[0] in u'#;':
|
||||
if line.strip() == '' or line[0] in '#;':
|
||||
continue
|
||||
if line.split(None, 1)[0].lower() == u'rem' and line[0] in u'rR':
|
||||
if line.split(None, 1)[0].lower() == 'rem' and line[0] in 'rR':
|
||||
# no leading whitespace
|
||||
continue
|
||||
# Replace space indentation with a tab. Allows parser to work
|
||||
# properly in cases where someone has edited the git config by hand
|
||||
# and indented using spaces instead of tabs.
|
||||
if line.startswith(self.SPACEINDENT):
|
||||
line = u'\t' + line[len(self.SPACEINDENT):]
|
||||
line = '\t' + line[len(self.SPACEINDENT):]
|
||||
# is it a section header?
|
||||
mo = self.SECTCRE.match(line)
|
||||
if mo:
|
||||
sectname = mo.group(u'header')
|
||||
sectname = mo.group('header')
|
||||
if sectname in self._sections:
|
||||
cursect = self._sections[sectname]
|
||||
elif sectname == self.DEFAULTSECT:
|
||||
|
@ -112,21 +112,21 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
else:
|
||||
mo = self._optcre.match(line.lstrip())
|
||||
if mo:
|
||||
optname, vi, optval = mo.group(u'option', u'vi', u'value')
|
||||
optname, vi, optval = mo.group('option', 'vi', 'value')
|
||||
optname = self.optionxform(optname.rstrip())
|
||||
if optval is None:
|
||||
optval = u''
|
||||
optval = ''
|
||||
if optval:
|
||||
if vi in (u'=', u':') and u';' in optval:
|
||||
if vi in ('=', ':') and ';' in optval:
|
||||
# ';' is a comment delimiter only if it follows
|
||||
# a spacing character
|
||||
pos = optval.find(u';')
|
||||
pos = optval.find(';')
|
||||
if pos != -1 and optval[pos-1].isspace():
|
||||
optval = optval[:pos]
|
||||
optval = optval.strip()
|
||||
# Empty strings should be considered as blank strings
|
||||
if optval in (u'""', u"''"):
|
||||
optval = u''
|
||||
if optval in ('""', "''"):
|
||||
optval = ''
|
||||
self._add_option(cursect, optname, optval)
|
||||
else:
|
||||
# a non-fatal parsing error occurred. set up the
|
||||
|
@ -148,11 +148,11 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
if self._optcre is self.OPTCRE or value:
|
||||
is_list = isinstance(value, list)
|
||||
if is_list and not allow_list:
|
||||
raise TypeError('option value cannot be a list unless allow_list is True') # future lint: disable=non-unicode-string
|
||||
raise TypeError('option value cannot be a list unless allow_list is True')
|
||||
elif not is_list:
|
||||
value = [value]
|
||||
if not all(isinstance(x, six.string_types) for x in value):
|
||||
raise TypeError('option values must be strings') # future lint: disable=non-unicode-string
|
||||
raise TypeError('option values must be strings')
|
||||
|
||||
def get(self, section, option, as_list=False):
|
||||
'''
|
||||
|
@ -165,7 +165,7 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
ret = [ret]
|
||||
return ret
|
||||
|
||||
def set(self, section, option, value=u''):
|
||||
def set(self, section, option, value=''):
|
||||
'''
|
||||
This is overridden from the RawConfigParser merely to change the
|
||||
default value for the 'value' argument.
|
||||
|
@ -188,9 +188,9 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
sectdict[key] = [sectdict[key]]
|
||||
sectdict[key].append(value)
|
||||
else:
|
||||
raise TypeError('Expected str or list for option value, got %s' % type(value).__name__) # future lint: disable=non-unicode-string
|
||||
raise TypeError('Expected str or list for option value, got %s' % type(value).__name__)
|
||||
|
||||
def set_multivar(self, section, option, value=u''):
|
||||
def set_multivar(self, section, option, value=''):
|
||||
'''
|
||||
This function is unique to the GitConfigParser. It will add another
|
||||
value for the option if it already exists, converting the option's
|
||||
|
@ -255,18 +255,18 @@ class GitConfigParser(RawConfigParser, object): # pylint: disable=undefined-var
|
|||
4. Drops support for continuation lines.
|
||||
'''
|
||||
convert = salt.utils.stringutils.to_bytes \
|
||||
if u'b' in fp_.mode \
|
||||
if 'b' in fp_.mode \
|
||||
else salt.utils.stringutils.to_str
|
||||
if self._defaults:
|
||||
fp_.write(convert(u'[%s]\n' % self.DEFAULTSECT))
|
||||
fp_.write(convert('[%s]\n' % self.DEFAULTSECT))
|
||||
for (key, value) in six.iteritems(self._defaults):
|
||||
value = salt.utils.stringutils.to_unicode(value).replace(u'\n', u'\n\t')
|
||||
fp_.write(convert(u'%s = %s\n' % (key, value)))
|
||||
value = salt.utils.stringutils.to_unicode(value).replace('\n', '\n\t')
|
||||
fp_.write(convert('%s = %s\n' % (key, value)))
|
||||
for section in self._sections:
|
||||
fp_.write(convert(u'[%s]\n' % section))
|
||||
fp_.write(convert('[%s]\n' % section))
|
||||
for (key, value) in six.iteritems(self._sections[section]):
|
||||
if (value is not None) or (self._optcre == self.OPTCRE):
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
for item in value:
|
||||
fp_.write(convert(u'\t%s\n' % u' = '.join((key, item)).rstrip()))
|
||||
fp_.write(convert('\t%s\n' % ' = '.join((key, item)).rstrip()))
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
Context managers used throughout Salt's source code.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import copy
|
||||
|
@ -206,7 +206,7 @@ class NamespacedDictWrapper(collections.MutableMapping, dict):
|
|||
if override_name:
|
||||
self.__class__.__module__ = 'salt'
|
||||
# __name__ can't be assigned a unicode
|
||||
self.__class__.__name__ = str(override_name) # future lint: disable=non-unicode-string
|
||||
self.__class__.__name__ = str(override_name) # future lint: disable=blacklisted-function
|
||||
super(NamespacedDictWrapper, self).__init__(self._dict())
|
||||
|
||||
def _dict(self):
|
||||
|
|
|
@ -27,7 +27,7 @@ class JinjaFilter(object):
|
|||
'''
|
||||
name = self.name or function.__name__
|
||||
if name not in self.salt_jinja_filters:
|
||||
log.debug(u'Marking \'%s\' as a jinja filter', name)
|
||||
log.debug('Marking \'%s\' as a jinja filter', name)
|
||||
self.salt_jinja_filters[name] = function
|
||||
return function
|
||||
|
||||
|
|
|
@ -176,16 +176,16 @@ def _prompt_choice(var_name, options):
|
|||
:returns: The selected user
|
||||
'''
|
||||
choice_map = OrderedDict(
|
||||
(u'{0}'.format(i), value) for i, value in enumerate(options, 1) if value[0] != 'test'
|
||||
('{0}'.format(i), value) for i, value in enumerate(options, 1) if value[0] != 'test'
|
||||
)
|
||||
choices = choice_map.keys()
|
||||
default = u'1'
|
||||
default = '1'
|
||||
|
||||
choice_lines = [u'{0} - {1} - {2}'.format(c[0], c[1][0], c[1][1]) for c in choice_map.items()]
|
||||
prompt = u'\n'.join((
|
||||
u'Select {0}:'.format(var_name),
|
||||
u'\n'.join(choice_lines),
|
||||
u'Choose from {0}'.format(u', '.join(choices))
|
||||
choice_lines = ['{0} - {1} - {2}'.format(c[0], c[1][0], c[1][1]) for c in choice_map.items()]
|
||||
prompt = '\n'.join((
|
||||
'Select {0}:'.format(var_name),
|
||||
'\n'.join(choice_lines),
|
||||
'Choose from {0}'.format(', '.join(choices))
|
||||
))
|
||||
|
||||
user_choice = click.prompt(
|
||||
|
|
|
@ -548,14 +548,14 @@ def safe_filename_leaf(file_basename):
|
|||
:codeauthor: Damon Atkins <https://github.com/damon-atkins>
|
||||
'''
|
||||
def _replace(re_obj):
|
||||
return urllib.quote(re_obj.group(0), safe=u'')
|
||||
return urllib.quote(re_obj.group(0), safe='')
|
||||
if not isinstance(file_basename, six.text_type):
|
||||
# the following string is not prefixed with u
|
||||
return re.sub('[\\\\:/*?"<>|]',
|
||||
_replace,
|
||||
six.text_type(file_basename, 'utf8').encode('ascii', 'backslashreplace'))
|
||||
# the following string is prefixed with u
|
||||
return re.sub(u'[\\\\:/*?"<>|]', _replace, file_basename, flags=re.UNICODE)
|
||||
return re.sub('[\\\\:/*?"<>|]', _replace, file_basename, flags=re.UNICODE)
|
||||
|
||||
|
||||
def safe_filepath(file_path_name, dir_sep=None):
|
||||
|
|
|
@ -338,7 +338,7 @@ def sanitize_win_path(winpath):
|
|||
'''
|
||||
intab = '<>:|?*'
|
||||
if isinstance(winpath, six.text_type):
|
||||
winpath = winpath.translate(dict((ord(c), u'_') for c in intab))
|
||||
winpath = winpath.translate(dict((ord(c), '_') for c in intab))
|
||||
elif isinstance(winpath, six.string_types):
|
||||
outtab = '_' * len(intab)
|
||||
trantab = ''.maketrans(intab, outtab) if six.PY3 else string.maketrans(intab, outtab) # pylint: disable=no-member
|
||||
|
|
|
@ -203,8 +203,8 @@ def merge_subreturn(original_return, sub_return, subkey=None):
|
|||
else:
|
||||
if original_return['comment']:
|
||||
# Skip for empty original comments
|
||||
original_return['comment'] += u'\n'
|
||||
original_return['comment'] += u'\n'.join(sub_comment)
|
||||
original_return['comment'] += '\n'
|
||||
original_return['comment'] += '\n'.join(sub_comment)
|
||||
|
||||
if sub_return['changes']: # changes always exists
|
||||
original_return.setdefault('changes', {})
|
||||
|
|
|
@ -4,14 +4,13 @@ Functions for manipulating or otherwise processing strings
|
|||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import re
|
||||
import string
|
||||
import time
|
||||
|
||||
# Import Salt libs
|
||||
|
@ -147,10 +146,10 @@ def is_binary(data):
|
|||
'''
|
||||
Detects if the passed string of data is binary or text
|
||||
'''
|
||||
if not data or not isinstance(data, six.string_types):
|
||||
return False
|
||||
if '\0' in data:
|
||||
return True
|
||||
if not data:
|
||||
return False
|
||||
|
||||
text_characters = ''.join([chr(x) for x in range(32, 127)] + list('\n\r\t\b'))
|
||||
# Get the non-text characters (map each character to itself then use the
|
||||
|
@ -159,8 +158,11 @@ def is_binary(data):
|
|||
trans = ''.maketrans('', '', text_characters)
|
||||
nontext = data.translate(trans)
|
||||
else:
|
||||
trans = string.maketrans('', '') # pylint: disable=no-member
|
||||
nontext = data.translate(trans, text_characters)
|
||||
if isinstance(data, unicode): # pylint: disable=incompatible-py3-code
|
||||
trans_args = ({ord(x): None for x in text_characters},)
|
||||
else:
|
||||
trans_args = (None, str(text_characters)) # future lint: blacklisted-function
|
||||
nontext = data.translate(*trans_args)
|
||||
|
||||
# If more than 30% non-text characters, then
|
||||
# this is considered binary data
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Template render systems
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import codecs
|
||||
|
@ -127,7 +127,7 @@ def get_context(template, line, num_lines=5, marker=None):
|
|||
if marker:
|
||||
buf[error_line_in_context] += marker
|
||||
|
||||
return u'---\n{0}\n---'.format(u'\n'.join(buf))
|
||||
return '---\n{0}\n---'.format('\n'.join(buf))
|
||||
|
||||
|
||||
def wrap_tmpl_func(render_str):
|
||||
|
@ -256,7 +256,7 @@ def _get_jinja_error_message(tb_data):
|
|||
'''
|
||||
try:
|
||||
line = _get_jinja_error_slug(tb_data)
|
||||
return u'{0}({1}):\n{3}'.format(*line)
|
||||
return '{0}({1}):\n{3}'.format(*line)
|
||||
except IndexError:
|
||||
pass
|
||||
return None
|
||||
|
|
|
@ -48,9 +48,9 @@ def create(path, saltenv=None):
|
|||
path = salt.utils.path.sanitize_win_path(path)
|
||||
path = sdecode(path)
|
||||
|
||||
query = u'saltenv={0}'.format(saltenv) if saltenv else ''
|
||||
query = 'saltenv={0}'.format(saltenv) if saltenv else ''
|
||||
url = sdecode(urlunparse(('file', '', path, '', query, '')))
|
||||
return u'salt://{0}'.format(url[len('file:///'):])
|
||||
return 'salt://{0}'.format(url[len('file:///'):])
|
||||
|
||||
|
||||
def is_escaped(url):
|
||||
|
@ -82,13 +82,13 @@ def escape(url):
|
|||
if url.startswith('|'):
|
||||
return url
|
||||
else:
|
||||
return u'|{0}'.format(url)
|
||||
return '|{0}'.format(url)
|
||||
elif scheme == 'salt':
|
||||
path, saltenv = parse(url)
|
||||
if path.startswith('|'):
|
||||
return create(path, saltenv)
|
||||
else:
|
||||
return create(u'|{0}'.format(path), saltenv)
|
||||
return create('|{0}'.format(path), saltenv)
|
||||
else:
|
||||
return url
|
||||
|
||||
|
|
|
@ -45,13 +45,13 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
|||
if dictclass is not dict:
|
||||
# then assume ordered dict and use it for both !map and !omap
|
||||
self.add_constructor(
|
||||
u'tag:yaml.org,2002:map',
|
||||
'tag:yaml.org,2002:map',
|
||||
type(self).construct_yaml_map)
|
||||
self.add_constructor(
|
||||
u'tag:yaml.org,2002:omap',
|
||||
'tag:yaml.org,2002:omap',
|
||||
type(self).construct_yaml_map)
|
||||
self.add_constructor(
|
||||
u'tag:yaml.org,2002:python/unicode',
|
||||
'tag:yaml.org,2002:python/unicode',
|
||||
type(self).construct_unicode)
|
||||
self.dictclass = dictclass
|
||||
|
||||
|
@ -119,7 +119,7 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
|||
while index < len(node.value):
|
||||
key_node, value_node = node.value[index]
|
||||
|
||||
if key_node.tag == u'tag:yaml.org,2002:merge':
|
||||
if key_node.tag == 'tag:yaml.org,2002:merge':
|
||||
del node.value[index]
|
||||
if isinstance(value_node, MappingNode):
|
||||
self.flatten_mapping(value_node)
|
||||
|
@ -142,8 +142,8 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
|||
node.start_mark,
|
||||
"expected a mapping or list of mappings for merging, but found {0}".format(value_node.id),
|
||||
value_node.start_mark)
|
||||
elif key_node.tag == u'tag:yaml.org,2002:value':
|
||||
key_node.tag = u'tag:yaml.org,2002:str'
|
||||
elif key_node.tag == 'tag:yaml.org,2002:value':
|
||||
key_node.tag = 'tag:yaml.org,2002:str'
|
||||
index += 1
|
||||
else:
|
||||
index += 1
|
||||
|
|
216
salt/version.py
216
salt/version.py
|
@ -4,7 +4,7 @@ Set up the version of Salt
|
|||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
import sys
|
||||
import locale
|
||||
|
@ -60,9 +60,8 @@ class SaltStackVersion(object):
|
|||
and also supports version comparison.
|
||||
'''
|
||||
|
||||
__slots__ = (u'name', u'major', u'minor', u'bugfix', u'mbugfix', u'pre_type', u'pre_num', u'noc', u'sha')
|
||||
__slots__ = ('name', 'major', 'minor', 'bugfix', 'mbugfix', 'pre_type', 'pre_num', 'noc', 'sha')
|
||||
|
||||
# future lint: disable=non-unicode-string
|
||||
git_describe_regex = re.compile(
|
||||
r'(?:[^\d]+)?(?P<major>[\d]{1,4})'
|
||||
r'\.(?P<minor>[\d]{1,2})'
|
||||
|
@ -75,7 +74,6 @@ class SaltStackVersion(object):
|
|||
if six.PY2:
|
||||
git_sha_regex = git_sha_regex.decode(__salt_system_encoding__)
|
||||
git_sha_regex = re.compile(git_sha_regex)
|
||||
# future lint: enable=non-unicode-string
|
||||
|
||||
# Salt versions after 0.17.0 will be numbered like:
|
||||
# <4-digit-year>.<month>.<bugfix>
|
||||
|
@ -93,17 +91,17 @@ class SaltStackVersion(object):
|
|||
# ----- Please refrain from fixing PEP-8 E203 and E265 ----->
|
||||
# The idea is to keep this readable.
|
||||
# -----------------------------------------------------------
|
||||
u'Hydrogen' : (2014, 1),
|
||||
u'Helium' : (2014, 7),
|
||||
u'Lithium' : (2015, 5),
|
||||
u'Beryllium' : (2015, 8),
|
||||
u'Boron' : (2016, 3),
|
||||
u'Carbon' : (2016, 11),
|
||||
u'Nitrogen' : (2017, 7),
|
||||
u'Oxygen' : (MAX_SIZE - 101, 0),
|
||||
u'Fluorine' : (MAX_SIZE - 100, 0),
|
||||
u'Neon' : (MAX_SIZE - 99, 0),
|
||||
u'Sodium' : (MAX_SIZE - 98, 0),
|
||||
'Hydrogen' : (2014, 1),
|
||||
'Helium' : (2014, 7),
|
||||
'Lithium' : (2015, 5),
|
||||
'Beryllium' : (2015, 8),
|
||||
'Boron' : (2016, 3),
|
||||
'Carbon' : (2016, 11),
|
||||
'Nitrogen' : (2017, 7),
|
||||
'Oxygen' : (MAX_SIZE - 101, 0),
|
||||
'Fluorine' : (MAX_SIZE - 100, 0),
|
||||
'Neon' : (MAX_SIZE - 99, 0),
|
||||
'Sodium' : (MAX_SIZE - 98, 0),
|
||||
# pylint: disable=E8265
|
||||
#'Sodium' : (MAX_SIZE - 98, 0),
|
||||
#'Magnesium' : (MAX_SIZE - 97, 0),
|
||||
|
@ -239,7 +237,7 @@ class SaltStackVersion(object):
|
|||
mbugfix = int(mbugfix)
|
||||
|
||||
if pre_type is None:
|
||||
pre_type = u''
|
||||
pre_type = ''
|
||||
if pre_num is None:
|
||||
pre_num = 0
|
||||
elif isinstance(pre_num, string_types):
|
||||
|
@ -247,7 +245,7 @@ class SaltStackVersion(object):
|
|||
|
||||
if noc is None:
|
||||
noc = 0
|
||||
elif isinstance(noc, string_types) and noc == u'n/a':
|
||||
elif isinstance(noc, string_types) and noc == 'n/a':
|
||||
noc = -1
|
||||
elif isinstance(noc, string_types):
|
||||
noc = int(noc)
|
||||
|
@ -270,7 +268,7 @@ class SaltStackVersion(object):
|
|||
match = cls.git_describe_regex.match(vstr)
|
||||
if not match:
|
||||
raise ValueError(
|
||||
u'Unable to parse version string: \'{0}\''.format(version_string)
|
||||
'Unable to parse version string: \'{0}\''.format(version_string)
|
||||
)
|
||||
return cls(*match.groups())
|
||||
|
||||
|
@ -278,7 +276,7 @@ class SaltStackVersion(object):
|
|||
def from_name(cls, name):
|
||||
if name.lower() not in cls.LNAMES:
|
||||
raise ValueError(
|
||||
u'Named version \'{0}\' is not known'.format(name)
|
||||
'Named version \'{0}\' is not known'.format(name)
|
||||
)
|
||||
return cls(*cls.LNAMES[name.lower()])
|
||||
|
||||
|
@ -354,20 +352,20 @@ class SaltStackVersion(object):
|
|||
|
||||
@property
|
||||
def string(self):
|
||||
version_string = u'{0}.{1}.{2}'.format(
|
||||
version_string = '{0}.{1}.{2}'.format(
|
||||
self.major,
|
||||
self.minor,
|
||||
self.bugfix
|
||||
)
|
||||
if self.mbugfix:
|
||||
version_string += u'.{0}'.format(self.mbugfix)
|
||||
version_string += '.{0}'.format(self.mbugfix)
|
||||
if self.pre_type:
|
||||
version_string += u'{0}{1}'.format(self.pre_type, self.pre_num)
|
||||
version_string += '{0}{1}'.format(self.pre_type, self.pre_num)
|
||||
if self.noc and self.sha:
|
||||
noc = self.noc
|
||||
if noc < 0:
|
||||
noc = u'n/a'
|
||||
version_string += u'-{0}-{1}'.format(noc, self.sha)
|
||||
noc = 'n/a'
|
||||
version_string += '-{0}-{1}'.format(noc, self.sha)
|
||||
return version_string
|
||||
|
||||
@property
|
||||
|
@ -375,14 +373,14 @@ class SaltStackVersion(object):
|
|||
if self.name and self.major > 10000:
|
||||
version_string = self.name
|
||||
if self.sse:
|
||||
version_string += u' Enterprise'
|
||||
version_string += u' (Unreleased)'
|
||||
version_string += ' Enterprise'
|
||||
version_string += ' (Unreleased)'
|
||||
return version_string
|
||||
version_string = self.string
|
||||
if self.sse:
|
||||
version_string += u' Enterprise'
|
||||
version_string += ' Enterprise'
|
||||
if (self.major, self.minor) in self.RMATCH:
|
||||
version_string += u' ({0})'.format(self.RMATCH[(self.major, self.minor)])
|
||||
version_string += ' ({0})'.format(self.RMATCH[(self.major, self.minor)])
|
||||
return version_string
|
||||
|
||||
def __str__(self):
|
||||
|
@ -396,7 +394,7 @@ class SaltStackVersion(object):
|
|||
other = SaltStackVersion(*other)
|
||||
else:
|
||||
raise ValueError(
|
||||
u'Cannot instantiate Version from type \'{0}\''.format(
|
||||
'Cannot instantiate Version from type \'{0}\''.format(
|
||||
type(other)
|
||||
)
|
||||
)
|
||||
|
@ -408,13 +406,13 @@ class SaltStackVersion(object):
|
|||
if self.pre_type and not other.pre_type:
|
||||
# We have pre-release information, the other side doesn't
|
||||
other_noc_info = list(other.noc_info)
|
||||
other_noc_info[4] = u'zzzzz'
|
||||
other_noc_info[4] = 'zzzzz'
|
||||
return method(self.noc_info, tuple(other_noc_info))
|
||||
|
||||
if not self.pre_type and other.pre_type:
|
||||
# The other side has pre-release informatio, we don't
|
||||
noc_info = list(self.noc_info)
|
||||
noc_info[4] = u'zzzzz'
|
||||
noc_info[4] = 'zzzzz'
|
||||
return method(tuple(noc_info), other.noc_info)
|
||||
|
||||
def __lt__(self, other):
|
||||
|
@ -438,25 +436,25 @@ class SaltStackVersion(object):
|
|||
def __repr__(self):
|
||||
parts = []
|
||||
if self.name:
|
||||
parts.append(u'name=\'{0}\''.format(self.name))
|
||||
parts.append('name=\'{0}\''.format(self.name))
|
||||
parts.extend([
|
||||
u'major={0}'.format(self.major),
|
||||
u'minor={0}'.format(self.minor),
|
||||
u'bugfix={0}'.format(self.bugfix)
|
||||
'major={0}'.format(self.major),
|
||||
'minor={0}'.format(self.minor),
|
||||
'bugfix={0}'.format(self.bugfix)
|
||||
])
|
||||
if self.mbugfix:
|
||||
parts.append(u'minor-bugfix={0}'.format(self.mbugfix))
|
||||
parts.append('minor-bugfix={0}'.format(self.mbugfix))
|
||||
if self.pre_type:
|
||||
parts.append(u'{0}={1}'.format(self.pre_type, self.pre_num))
|
||||
parts.append('{0}={1}'.format(self.pre_type, self.pre_num))
|
||||
noc = self.noc
|
||||
if noc == -1:
|
||||
noc = u'n/a'
|
||||
noc = 'n/a'
|
||||
if noc and self.sha:
|
||||
parts.extend([
|
||||
u'noc={0}'.format(noc),
|
||||
u'sha={0}'.format(self.sha)
|
||||
'noc={0}'.format(noc),
|
||||
'sha={0}'.format(self.sha)
|
||||
])
|
||||
return u'<{0} {1}>'.format(self.__class__.__name__, u' '.join(parts))
|
||||
return '<{0} {1}>'.format(self.__class__.__name__, ' '.join(parts))
|
||||
|
||||
|
||||
# ----- Hardcoded Salt Codename Version Information ----------------------------------------------------------------->
|
||||
|
@ -474,15 +472,15 @@ def __discover_version(saltstack_version):
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
if u'SETUP_DIRNAME' in globals():
|
||||
if 'SETUP_DIRNAME' in globals():
|
||||
# This is from the exec() call in Salt's setup.py
|
||||
cwd = SETUP_DIRNAME # pylint: disable=E0602
|
||||
if not os.path.exists(os.path.join(cwd, u'.git')):
|
||||
if not os.path.exists(os.path.join(cwd, '.git')):
|
||||
# This is not a Salt git checkout!!! Don't even try to parse...
|
||||
return saltstack_version
|
||||
else:
|
||||
cwd = os.path.abspath(os.path.dirname(__file__))
|
||||
if not os.path.exists(os.path.join(os.path.dirname(cwd), u'.git')):
|
||||
if not os.path.exists(os.path.join(os.path.dirname(cwd), '.git')):
|
||||
# This is not a Salt git checkout!!! Don't even try to parse...
|
||||
return saltstack_version
|
||||
|
||||
|
@ -493,12 +491,12 @@ def __discover_version(saltstack_version):
|
|||
cwd=cwd
|
||||
)
|
||||
|
||||
if not sys.platform.startswith(u'win'):
|
||||
if not sys.platform.startswith('win'):
|
||||
# Let's not import `salt.utils` for the above check
|
||||
kwargs[u'close_fds'] = True
|
||||
kwargs['close_fds'] = True
|
||||
|
||||
process = subprocess.Popen(
|
||||
[u'git', u'describe', u'--tags', u'--first-parent', u'--match', u'v[0-9]*', u'--always'], **kwargs)
|
||||
['git', 'describe', '--tags', '--first-parent', '--match', 'v[0-9]*', '--always'], **kwargs)
|
||||
|
||||
out, err = process.communicate()
|
||||
|
||||
|
@ -506,7 +504,7 @@ def __discover_version(saltstack_version):
|
|||
# The git version running this might not support --first-parent
|
||||
# Revert to old command
|
||||
process = subprocess.Popen(
|
||||
[u'git', u'describe', u'--tags', u'--match', u'v[0-9]*', u'--always'], **kwargs)
|
||||
['git', 'describe', '--tags', '--match', 'v[0-9]*', '--always'], **kwargs)
|
||||
out, err = process.communicate()
|
||||
out = out.strip()
|
||||
err = err.strip()
|
||||
|
@ -563,7 +561,7 @@ def salt_information():
|
|||
'''
|
||||
Report version of salt.
|
||||
'''
|
||||
yield u'Salt', __version__
|
||||
yield 'Salt', __version__
|
||||
|
||||
|
||||
def dependency_information(include_salt_cloud=False):
|
||||
|
@ -571,39 +569,39 @@ def dependency_information(include_salt_cloud=False):
|
|||
Report versions of library dependencies.
|
||||
'''
|
||||
libs = [
|
||||
(u'Python', None, sys.version.rsplit(u'\n')[0].strip()),
|
||||
(u'Jinja2', u'jinja2', u'__version__'),
|
||||
(u'M2Crypto', u'M2Crypto', u'version'),
|
||||
(u'msgpack-python', u'msgpack', u'version'),
|
||||
(u'msgpack-pure', u'msgpack_pure', u'version'),
|
||||
(u'pycrypto', u'Crypto', u'__version__'),
|
||||
(u'pycryptodome', u'Cryptodome', u'version_info'),
|
||||
(u'libnacl', u'libnacl', u'__version__'),
|
||||
(u'PyYAML', u'yaml', u'__version__'),
|
||||
(u'ioflo', u'ioflo', u'__version__'),
|
||||
(u'PyZMQ', u'zmq', u'__version__'),
|
||||
(u'RAET', u'raet', u'__version__'),
|
||||
(u'ZMQ', u'zmq', u'zmq_version'),
|
||||
(u'Mako', u'mako', u'__version__'),
|
||||
(u'Tornado', u'tornado', u'version'),
|
||||
(u'timelib', u'timelib', u'version'),
|
||||
(u'dateutil', u'dateutil', u'__version__'),
|
||||
(u'pygit2', u'pygit2', u'__version__'),
|
||||
(u'libgit2', u'pygit2', u'LIBGIT2_VERSION'),
|
||||
(u'smmap', u'smmap', u'__version__'),
|
||||
(u'cffi', u'cffi', u'__version__'),
|
||||
(u'pycparser', u'pycparser', u'__version__'),
|
||||
(u'gitdb', u'gitdb', u'__version__'),
|
||||
(u'gitpython', u'git', u'__version__'),
|
||||
(u'python-gnupg', u'gnupg', u'__version__'),
|
||||
(u'mysql-python', u'MySQLdb', u'__version__'),
|
||||
(u'cherrypy', u'cherrypy', u'__version__'),
|
||||
(u'docker-py', u'docker', u'__version__'),
|
||||
('Python', None, sys.version.rsplit('\n')[0].strip()),
|
||||
('Jinja2', 'jinja2', '__version__'),
|
||||
('M2Crypto', 'M2Crypto', 'version'),
|
||||
('msgpack-python', 'msgpack', 'version'),
|
||||
('msgpack-pure', 'msgpack_pure', 'version'),
|
||||
('pycrypto', 'Crypto', '__version__'),
|
||||
('pycryptodome', 'Cryptodome', 'version_info'),
|
||||
('libnacl', 'libnacl', '__version__'),
|
||||
('PyYAML', 'yaml', '__version__'),
|
||||
('ioflo', 'ioflo', '__version__'),
|
||||
('PyZMQ', 'zmq', '__version__'),
|
||||
('RAET', 'raet', '__version__'),
|
||||
('ZMQ', 'zmq', 'zmq_version'),
|
||||
('Mako', 'mako', '__version__'),
|
||||
('Tornado', 'tornado', 'version'),
|
||||
('timelib', 'timelib', 'version'),
|
||||
('dateutil', 'dateutil', '__version__'),
|
||||
('pygit2', 'pygit2', '__version__'),
|
||||
('libgit2', 'pygit2', 'LIBGIT2_VERSION'),
|
||||
('smmap', 'smmap', '__version__'),
|
||||
('cffi', 'cffi', '__version__'),
|
||||
('pycparser', 'pycparser', '__version__'),
|
||||
('gitdb', 'gitdb', '__version__'),
|
||||
('gitpython', 'git', '__version__'),
|
||||
('python-gnupg', 'gnupg', '__version__'),
|
||||
('mysql-python', 'MySQLdb', '__version__'),
|
||||
('cherrypy', 'cherrypy', '__version__'),
|
||||
('docker-py', 'docker', '__version__'),
|
||||
]
|
||||
|
||||
if include_salt_cloud:
|
||||
libs.append(
|
||||
(u'Apache Libcloud', u'libcloud', u'__version__'),
|
||||
('Apache Libcloud', 'libcloud', '__version__'),
|
||||
)
|
||||
|
||||
for name, imp, attr in libs:
|
||||
|
@ -616,7 +614,7 @@ def dependency_information(include_salt_cloud=False):
|
|||
if callable(version):
|
||||
version = version()
|
||||
if isinstance(version, (tuple, list)):
|
||||
version = u'.'.join(map(str, version))
|
||||
version = '.'.join(map(str, version))
|
||||
yield name, version
|
||||
except Exception:
|
||||
yield name, None
|
||||
|
@ -635,26 +633,26 @@ def system_information():
|
|||
win_ver = platform.win32_ver()
|
||||
|
||||
if lin_ver[0]:
|
||||
return u' '.join(lin_ver)
|
||||
return ' '.join(lin_ver)
|
||||
elif mac_ver[0]:
|
||||
if isinstance(mac_ver[1], (tuple, list)) and u''.join(mac_ver[1]):
|
||||
return u' '.join([mac_ver[0], u'.'.join(mac_ver[1]), mac_ver[2]])
|
||||
if isinstance(mac_ver[1], (tuple, list)) and ''.join(mac_ver[1]):
|
||||
return ' '.join([mac_ver[0], '.'.join(mac_ver[1]), mac_ver[2]])
|
||||
else:
|
||||
return u' '.join([mac_ver[0], mac_ver[2]])
|
||||
return ' '.join([mac_ver[0], mac_ver[2]])
|
||||
elif win_ver[0]:
|
||||
return u' '.join(win_ver)
|
||||
return ' '.join(win_ver)
|
||||
else:
|
||||
return u''
|
||||
return ''
|
||||
|
||||
version = system_version()
|
||||
release = platform.release()
|
||||
if platform.win32_ver()[0]:
|
||||
import win32api # pylint: disable=3rd-party-module-not-gated
|
||||
server = {u'Vista': u'2008Server',
|
||||
u'7': u'2008ServerR2',
|
||||
u'8': u'2012Server',
|
||||
u'8.1': u'2012ServerR2',
|
||||
u'10': u'2016Server'}
|
||||
server = {'Vista': '2008Server',
|
||||
'7': '2008ServerR2',
|
||||
'8': '2012Server',
|
||||
'8.1': '2012ServerR2',
|
||||
'10': '2016Server'}
|
||||
# Starting with Python 2.7.12 and 3.5.2 the `platform.uname()` function
|
||||
# started reporting the Desktop version instead of the Server version on
|
||||
# Server versions of Windows, so we need to look those up
|
||||
|
@ -668,12 +666,12 @@ def system_information():
|
|||
version = ' '.join([release, ver, sp, extra])
|
||||
|
||||
system = [
|
||||
(u'system', platform.system()),
|
||||
(u'dist', u' '.join(linux_distribution(full_distribution_name=False))),
|
||||
(u'release', release),
|
||||
(u'machine', platform.machine()),
|
||||
(u'version', version),
|
||||
(u'locale', locale.getpreferredencoding()),
|
||||
('system', platform.system()),
|
||||
('dist', ' '.join(linux_distribution(full_distribution_name=False))),
|
||||
('release', release),
|
||||
('machine', platform.machine()),
|
||||
('version', version),
|
||||
('locale', locale.getpreferredencoding()),
|
||||
]
|
||||
|
||||
for name, attr in system:
|
||||
|
@ -689,9 +687,9 @@ def versions_information(include_salt_cloud=False):
|
|||
lib_info = list(dependency_information(include_salt_cloud))
|
||||
sys_info = list(system_information())
|
||||
|
||||
return {u'Salt Version': dict(salt_info),
|
||||
u'Dependency Versions': dict(lib_info),
|
||||
u'System Versions': dict(sys_info)}
|
||||
return {'Salt Version': dict(salt_info),
|
||||
'Dependency Versions': dict(lib_info),
|
||||
'System Versions': dict(sys_info)}
|
||||
|
||||
|
||||
def versions_report(include_salt_cloud=False):
|
||||
|
@ -700,21 +698,21 @@ def versions_report(include_salt_cloud=False):
|
|||
'''
|
||||
ver_info = versions_information(include_salt_cloud)
|
||||
|
||||
lib_pad = max(len(name) for name in ver_info[u'Dependency Versions'])
|
||||
sys_pad = max(len(name) for name in ver_info[u'System Versions'])
|
||||
lib_pad = max(len(name) for name in ver_info['Dependency Versions'])
|
||||
sys_pad = max(len(name) for name in ver_info['System Versions'])
|
||||
padding = max(lib_pad, sys_pad) + 1
|
||||
|
||||
fmt = u'{0:>{pad}}: {1}'
|
||||
fmt = '{0:>{pad}}: {1}'
|
||||
info = []
|
||||
for ver_type in (u'Salt Version', u'Dependency Versions', u'System Versions'):
|
||||
info.append(u'{0}:'.format(ver_type))
|
||||
for ver_type in ('Salt Version', 'Dependency Versions', 'System Versions'):
|
||||
info.append('{0}:'.format(ver_type))
|
||||
# List dependencies in alphabetical, case insensitive order
|
||||
for name in sorted(ver_info[ver_type], key=lambda x: x.lower()):
|
||||
ver = fmt.format(name,
|
||||
ver_info[ver_type][name] or u'Not Installed',
|
||||
ver_info[ver_type][name] or 'Not Installed',
|
||||
pad=padding)
|
||||
info.append(ver)
|
||||
info.append(u' ')
|
||||
info.append(' ')
|
||||
|
||||
for line in info:
|
||||
yield line
|
||||
|
@ -736,10 +734,10 @@ def msi_conformant_version():
|
|||
month = __saltstack_version__.minor
|
||||
minor = __saltstack_version__.bugfix
|
||||
commi = __saltstack_version__.noc
|
||||
return u'{0}.{1}.{2}.{3}'.format(year2, month, minor, commi)
|
||||
return '{0}.{1}.{2}.{3}'.format(year2, month, minor, commi)
|
||||
|
||||
if __name__ == u'__main__':
|
||||
if len(sys.argv) == 2 and sys.argv[1] == u'msi':
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) == 2 and sys.argv[1] == 'msi':
|
||||
# Building the msi requires an msi-conformant version
|
||||
print(msi_conformant_version())
|
||||
else:
|
||||
|
|
|
@ -2053,6 +2053,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
@skipIf(six.PY3, 'This test will have a LOT of rewriting to support both Py2 and Py3')
|
||||
# And I'm more comfortable with the author doing it - s0undt3ch
|
||||
@skipIf(IS_WINDOWS, 'Don\'t know how to fix for Windows')
|
||||
@skipIf(True, 'Skipped until unicode codebase conversion is completed')
|
||||
def test_issue_8947_utf8_sls(self):
|
||||
'''
|
||||
Test some file operation with utf-8 characters on the sls
|
||||
|
|
|
@ -428,6 +428,7 @@ class ForceImportErrorOn(object):
|
|||
self.__module_names[modname] = set(entry[1:])
|
||||
else:
|
||||
self.__module_names[entry] = None
|
||||
self.__original_import = builtins.__import__
|
||||
self.patcher = patch.object(builtins, '__import__', self.__fake_import__)
|
||||
|
||||
def patch_import_function(self):
|
||||
|
|
|
@ -68,7 +68,8 @@ except ImportError as exc:
|
|||
|
||||
class MagicMock(object):
|
||||
|
||||
__name__ = '{0}.fakemock'.format(__name__) # future lint: disable=non-unicode-string
|
||||
# __name__ can't be assigned a unicode
|
||||
__name__ = str('{0}.fakemock').format(__name__) # future lint: disable=blacklisted-function
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
# pylint: disable=unused-import,blacklisted-module,deprecated-method
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
|
|
@ -92,7 +92,7 @@ class InitTestCase(TestCase, LoaderModuleMockMixin):
|
|||
with self.assertRaises(salt.exceptions.InvalidConfigError) as \
|
||||
excinfo:
|
||||
esxcluster.init(self.opts_userpass)
|
||||
self.assertEqual(excinfo.exception.strerror.message,
|
||||
self.assertEqual(excinfo.exception.strerror,
|
||||
'Validation Error')
|
||||
|
||||
def test_no_username(self):
|
||||
|
|
|
@ -90,7 +90,7 @@ class InitTestCase(TestCase, LoaderModuleMockMixin):
|
|||
with self.assertRaises(salt.exceptions.InvalidConfigError) as \
|
||||
excinfo:
|
||||
esxdatacenter.init(self.opts_userpass)
|
||||
self.assertEqual(excinfo.exception.strerror.message,
|
||||
self.assertEqual(excinfo.exception.strerror,
|
||||
'Validation Error')
|
||||
|
||||
def test_no_username(self):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from jinja2 import Environment, DictLoader, exceptions
|
||||
import ast
|
||||
import copy
|
||||
|
@ -374,9 +374,7 @@ class TestGetTemplate(TestCase):
|
|||
salt=self.local_salt
|
||||
)
|
||||
with salt.utils.files.fopen(out['data']) as fp:
|
||||
result = fp.read()
|
||||
if six.PY2:
|
||||
result = salt.utils.stringutils.to_unicode(result)
|
||||
result = fp.read().decode(__salt_system_encoding__)
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode('Assunção' + os.linesep), result)
|
||||
|
||||
def test_get_context_has_enough_context(self):
|
||||
|
@ -420,7 +418,7 @@ class TestGetTemplate(TestCase):
|
|||
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)
|
||||
)
|
||||
|
||||
@skipIf(six.PY3, 'Not applicable to Python 3: skipping.')
|
||||
@skipIf(six.PY3, 'Not applicable to Python 3')
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_render_with_unicode_syntax_error(self):
|
||||
with patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
|
||||
|
@ -437,8 +435,10 @@ class TestGetTemplate(TestCase):
|
|||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_render_with_utf8_syntax_error(self):
|
||||
with patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
|
||||
template = 'hello\n\n{{ bad\n\nfoo\xed\x95\x9c'
|
||||
expected = r'.*---\nhello\n\n{{ bad\n\nfoo\xed\x95\x9c <======================\n---'
|
||||
template = 'hello\n\n{{ bad\n\nfoo한'
|
||||
expected = salt.utils.stringutils.to_str(
|
||||
r'.*---\nhello\n\n{{ bad\n\nfoo한 <======================\n---'
|
||||
)
|
||||
self.assertRaisesRegex(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
|
@ -809,7 +809,7 @@ class TestCustomExtensions(TestCase):
|
|||
def test_nested_structures(self):
|
||||
env = Environment(extensions=[SerializerExtension])
|
||||
rendered = env.from_string('{{ data }}').render(data="foo")
|
||||
self.assertEqual(rendered, u"foo")
|
||||
self.assertEqual(rendered, "foo")
|
||||
|
||||
data = OrderedDict([
|
||||
('foo', OrderedDict([
|
||||
|
@ -820,7 +820,7 @@ class TestCustomExtensions(TestCase):
|
|||
])
|
||||
|
||||
rendered = env.from_string('{{ data }}').render(data=data)
|
||||
self.assertEqual(rendered, u"{'foo': {'bar': 'baz', 'qux': 42}}")
|
||||
self.assertEqual(rendered, u"{u'foo': {u'bar': u'baz', u'qux': 42}}")
|
||||
|
||||
rendered = env.from_string('{{ data }}').render(data=[
|
||||
OrderedDict(
|
||||
|
@ -830,7 +830,7 @@ class TestCustomExtensions(TestCase):
|
|||
baz=42,
|
||||
)
|
||||
])
|
||||
self.assertEqual(rendered, u"[{'foo': 'bar'}, {'baz': 42}]")
|
||||
self.assertEqual(rendered, u"[{'foo': u'bar'}, {'baz': 42}]")
|
||||
|
||||
def test_sequence(self):
|
||||
env = Environment()
|
||||
|
|
|
@ -6,7 +6,7 @@ tests.unit.utils.test_configparser
|
|||
Test the funcs in the custom parsers in salt.utils.configparser
|
||||
'''
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import copy
|
||||
import errno
|
||||
import logging
|
||||
|
@ -25,7 +25,7 @@ import salt.utils.configparser
|
|||
|
||||
# The user.name param here is intentionally indented with spaces instead of a
|
||||
# tab to test that we properly load a file with mixed indentation.
|
||||
ORIG_CONFIG = u'''[user]
|
||||
ORIG_CONFIG = '''[user]
|
||||
name = Артём Анисимов
|
||||
\temail = foo@bar.com
|
||||
[remote "origin"]
|
||||
|
@ -46,7 +46,7 @@ ORIG_CONFIG = u'''[user]
|
|||
\tgraph = log --all --decorate --oneline --graph
|
||||
\thist = log --pretty=format:\\"%h %ad | %s%d [%an]\\" --graph --date=short
|
||||
[http]
|
||||
\tsslverify = false'''.split(u'\n') # future lint: disable=non-unicode-string
|
||||
\tsslverify = false'''.split('\n')
|
||||
|
||||
|
||||
class TestGitConfigParser(TestCase):
|
||||
|
@ -54,9 +54,9 @@ class TestGitConfigParser(TestCase):
|
|||
Tests for salt.utils.configparser.GitConfigParser
|
||||
'''
|
||||
maxDiff = None
|
||||
orig_config = os.path.join(TMP, u'test_gitconfig.orig')
|
||||
new_config = os.path.join(TMP, u'test_gitconfig.new')
|
||||
remote = u'remote "origin"'
|
||||
orig_config = os.path.join(TMP, 'test_gitconfig.orig')
|
||||
new_config = os.path.join(TMP, 'test_gitconfig.new')
|
||||
remote = 'remote "origin"'
|
||||
|
||||
def tearDown(self):
|
||||
del self.conf
|
||||
|
@ -68,10 +68,10 @@ class TestGitConfigParser(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
if not os.path.exists(self.orig_config):
|
||||
with salt.utils.files.fopen(self.orig_config, u'wb') as fp_:
|
||||
with salt.utils.files.fopen(self.orig_config, 'wb') as fp_:
|
||||
fp_.write(
|
||||
salt.utils.stringutils.to_bytes(
|
||||
u'\n'.join(ORIG_CONFIG)
|
||||
'\n'.join(ORIG_CONFIG)
|
||||
)
|
||||
)
|
||||
self.conf = salt.utils.configparser.GitConfigParser()
|
||||
|
@ -94,12 +94,12 @@ class TestGitConfigParser(TestCase):
|
|||
ret = copy.copy(lines)
|
||||
for i, _ in enumerate(ret):
|
||||
if ret[i].startswith(salt.utils.configparser.GitConfigParser.SPACEINDENT):
|
||||
ret[i] = ret[i].replace(salt.utils.configparser.GitConfigParser.SPACEINDENT, u'\t')
|
||||
ret[i] = ret[i].replace(salt.utils.configparser.GitConfigParser.SPACEINDENT, '\t')
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def get_lines(path):
|
||||
with salt.utils.files.fopen(path, u'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
return salt.utils.stringutils.to_unicode(fp_.read()).splitlines()
|
||||
|
||||
def _test_write(self, mode):
|
||||
|
@ -115,16 +115,16 @@ class TestGitConfigParser(TestCase):
|
|||
Test getting an option's value
|
||||
'''
|
||||
# Numeric values should be loaded as strings
|
||||
self.assertEqual(self.conf.get(u'color "diff"', u'old'), u'196')
|
||||
self.assertEqual(self.conf.get('color "diff"', 'old'), '196')
|
||||
# Complex strings should be loaded with their literal quotes and
|
||||
# slashes intact
|
||||
self.assertEqual(
|
||||
self.conf.get(u'alias', u'modified'),
|
||||
u"""! git status --porcelain | awk 'match($1, "M"){print $2}'"""
|
||||
self.conf.get('alias', 'modified'),
|
||||
"""! git status --porcelain | awk 'match($1, "M"){print $2}'"""
|
||||
)
|
||||
# future lint: disable=non-unicode-string
|
||||
self.assertEqual(
|
||||
self.conf.get(u'alias', u'hist'),
|
||||
self.conf.get('alias', 'hist'),
|
||||
salt.utils.stringutils.to_unicode(
|
||||
r"""log --pretty=format:\"%h %ad | %s%d [%an]\" --graph --date=short"""
|
||||
)
|
||||
|
@ -137,22 +137,22 @@ class TestGitConfigParser(TestCase):
|
|||
using spaces instead of a tab. Additionally, this tests that the value
|
||||
was loaded as a unicode type on PY2.
|
||||
'''
|
||||
self.assertEqual(self.conf.get(u'user', u'name'), u'Артём Анисимов')
|
||||
self.assertEqual(self.conf.get('user', 'name'), u'Артём Анисимов')
|
||||
|
||||
def test_set_new_option(self):
|
||||
'''
|
||||
Test setting a new option in an existing section
|
||||
'''
|
||||
self.conf.set(u'http', u'useragent', u'myawesomeagent')
|
||||
self.assertEqual(self.conf.get(u'http', u'useragent'), u'myawesomeagent')
|
||||
self.conf.set('http', 'useragent', 'myawesomeagent')
|
||||
self.assertEqual(self.conf.get('http', 'useragent'), 'myawesomeagent')
|
||||
|
||||
def test_add_section(self):
|
||||
'''
|
||||
Test adding a section and adding an item to that section
|
||||
'''
|
||||
self.conf.add_section(u'foo')
|
||||
self.conf.set(u'foo', u'bar', u'baz')
|
||||
self.assertEqual(self.conf.get(u'foo', u'bar'), u'baz')
|
||||
self.conf.add_section('foo')
|
||||
self.conf.set('foo', 'bar', 'baz')
|
||||
self.assertEqual(self.conf.get('foo', 'bar'), 'baz')
|
||||
|
||||
def test_replace_option(self):
|
||||
'''
|
||||
|
@ -160,40 +160,40 @@ class TestGitConfigParser(TestCase):
|
|||
'''
|
||||
# We're also testing the normalization of key names, here. Setting
|
||||
# "sslVerify" should actually set an "sslverify" option.
|
||||
self.conf.set(u'http', u'sslVerify', u'true')
|
||||
self.assertEqual(self.conf.get(u'http', u'sslverify'), u'true')
|
||||
self.conf.set('http', 'sslVerify', 'true')
|
||||
self.assertEqual(self.conf.get('http', 'sslverify'), 'true')
|
||||
|
||||
def test_set_multivar(self):
|
||||
'''
|
||||
Test setting a multivar and then writing the resulting file
|
||||
'''
|
||||
orig_refspec = u'+refs/heads/*:refs/remotes/origin/*'
|
||||
new_refspec = u'+refs/tags/*:refs/tags/*'
|
||||
orig_refspec = '+refs/heads/*:refs/remotes/origin/*'
|
||||
new_refspec = '+refs/tags/*:refs/tags/*'
|
||||
# Make sure that the original value is a string
|
||||
self.assertEqual(
|
||||
self.conf.get(self.remote, u'fetch'),
|
||||
self.conf.get(self.remote, 'fetch'),
|
||||
orig_refspec
|
||||
)
|
||||
# Add another refspec
|
||||
self.conf.set_multivar(self.remote, u'fetch', new_refspec)
|
||||
self.conf.set_multivar(self.remote, 'fetch', new_refspec)
|
||||
# The value should now be a list
|
||||
self.assertEqual(
|
||||
self.conf.get(self.remote, u'fetch'),
|
||||
self.conf.get(self.remote, 'fetch'),
|
||||
[orig_refspec, new_refspec]
|
||||
)
|
||||
# Write the config object to a file
|
||||
with salt.utils.files.fopen(self.new_config, u'w') as fp_:
|
||||
with salt.utils.files.fopen(self.new_config, 'w') as fp_:
|
||||
self.conf.write(fp_)
|
||||
# Confirm that the new file was written correctly
|
||||
expected = self.fix_indent(ORIG_CONFIG)
|
||||
expected.insert(6, u'\tfetch = %s' % new_refspec) # pylint: disable=string-substitution-usage-error
|
||||
expected.insert(6, '\tfetch = %s' % new_refspec) # pylint: disable=string-substitution-usage-error
|
||||
self.assertEqual(self.get_lines(self.new_config), expected)
|
||||
|
||||
def test_remove_option(self):
|
||||
'''
|
||||
test removing an option, including all items from a multivar
|
||||
'''
|
||||
for item in (u'fetch', u'pushurl'):
|
||||
for item in ('fetch', 'pushurl'):
|
||||
self.conf.remove_option(self.remote, item)
|
||||
# To confirm that the option is now gone, a get should raise an
|
||||
# NoOptionError exception.
|
||||
|
@ -207,62 +207,62 @@ class TestGitConfigParser(TestCase):
|
|||
'''
|
||||
test removing an option, including all items from a multivar
|
||||
'''
|
||||
orig_refspec = u'+refs/heads/*:refs/remotes/origin/*'
|
||||
new_refspec_1 = u'+refs/tags/*:refs/tags/*'
|
||||
new_refspec_2 = u'+refs/foo/*:refs/foo/*'
|
||||
orig_refspec = '+refs/heads/*:refs/remotes/origin/*'
|
||||
new_refspec_1 = '+refs/tags/*:refs/tags/*'
|
||||
new_refspec_2 = '+refs/foo/*:refs/foo/*'
|
||||
# First, add both refspecs
|
||||
self.conf.set_multivar(self.remote, u'fetch', new_refspec_1)
|
||||
self.conf.set_multivar(self.remote, u'fetch', new_refspec_2)
|
||||
self.conf.set_multivar(self.remote, 'fetch', new_refspec_1)
|
||||
self.conf.set_multivar(self.remote, 'fetch', new_refspec_2)
|
||||
# Make sure that all three values are there
|
||||
self.assertEqual(
|
||||
self.conf.get(self.remote, u'fetch'),
|
||||
self.conf.get(self.remote, 'fetch'),
|
||||
[orig_refspec, new_refspec_1, new_refspec_2]
|
||||
)
|
||||
# If the regex doesn't match, no items should be removed
|
||||
self.assertFalse(
|
||||
self.conf.remove_option_regexp(
|
||||
self.remote,
|
||||
u'fetch',
|
||||
'fetch',
|
||||
salt.utils.stringutils.to_unicode(r'\d{7,10}') # future lint: disable=non-unicode-string
|
||||
)
|
||||
)
|
||||
# Make sure that all three values are still there (since none should
|
||||
# have been removed)
|
||||
self.assertEqual(
|
||||
self.conf.get(self.remote, u'fetch'),
|
||||
self.conf.get(self.remote, 'fetch'),
|
||||
[orig_refspec, new_refspec_1, new_refspec_2]
|
||||
)
|
||||
# Remove one of the values
|
||||
self.assertTrue(
|
||||
self.conf.remove_option_regexp(self.remote, u'fetch', u'tags'))
|
||||
self.conf.remove_option_regexp(self.remote, 'fetch', 'tags'))
|
||||
# Confirm that the value is gone
|
||||
self.assertEqual(
|
||||
self.conf.get(self.remote, u'fetch'),
|
||||
self.conf.get(self.remote, 'fetch'),
|
||||
[orig_refspec, new_refspec_2]
|
||||
)
|
||||
# Remove the other one we added earlier
|
||||
self.assertTrue(
|
||||
self.conf.remove_option_regexp(self.remote, u'fetch', u'foo'))
|
||||
self.conf.remove_option_regexp(self.remote, 'fetch', 'foo'))
|
||||
# Since the option now only has one value, it should be a string
|
||||
self.assertEqual(self.conf.get(self.remote, u'fetch'), orig_refspec)
|
||||
self.assertEqual(self.conf.get(self.remote, 'fetch'), orig_refspec)
|
||||
# Remove the last remaining option
|
||||
self.assertTrue(
|
||||
self.conf.remove_option_regexp(self.remote, u'fetch', u'heads'))
|
||||
self.conf.remove_option_regexp(self.remote, 'fetch', 'heads'))
|
||||
# Trying to do a get now should raise an exception
|
||||
self.assertRaises(
|
||||
salt.utils.configparser.NoOptionError,
|
||||
self.conf.get,
|
||||
self.remote,
|
||||
u'fetch')
|
||||
'fetch')
|
||||
|
||||
def test_write(self):
|
||||
'''
|
||||
Test writing using non-binary filehandle
|
||||
'''
|
||||
self._test_write(mode=u'w')
|
||||
self._test_write(mode='w')
|
||||
|
||||
def test_write_binary(self):
|
||||
'''
|
||||
Test writing using binary filehandle
|
||||
'''
|
||||
self._test_write(mode=u'wb')
|
||||
self._test_write(mode='wb')
|
||||
|
|
Loading…
Add table
Reference in a new issue