Merge branch '2014.7' into merge-forward

Conflicts:
	salt/modules/glusterfs.py
	salt/modules/macports.py
	salt/modules/makeconf.py
	salt/modules/mdadm.py
	salt/modules/smartos_vmadm.py
	salt/modules/zypper.py
	salt/utils/event.py
	tests/integration/netapi/rest_tornado/test_app.py
	tests/unit/netapi/rest_tornado/test_handlers.py
This commit is contained in:
Colton Myers 2014-11-21 12:05:32 -07:00
commit dc43840cee
43 changed files with 450 additions and 281 deletions

View file

@ -170,11 +170,11 @@ remotely.
When starting up, salt minions connect _back_ to a master defined in the minion
config file. The connect to two ports on the master:
* TCP:4505
* TCP: 4505
This is the connection to the master Publisher. It is on this port that
the minion receives jobs from the master.
* TCP:4506
* TCP: 4506
This is the connection to the master ReqServer. It is on this port that
the minion sends job results back to the master.

View file

@ -310,7 +310,7 @@ the :command:`salt-api` script and init scripts remain intact.
``salt-api`` has always provided Yet Another Pluggable Interface to Salt (TM)
in the form of "netapi" modules. These are modules that bind to a port and
start a service. Like many of Salt's other module types, netapi modules often
have library and configuration dependencies. See the documentaiton for each
have library and configuration dependencies. See the documentation for each
module for instructions.
.. seealso:: :ref:`The full list of netapi modules. <all-netapi-modules>`
@ -552,8 +552,8 @@ New Salt-Cloud Providers
Deprecations
============
:mod:`salt.modules.virturalenv_mod`
-----------------------------------
:mod:`salt.modules.virtualenv_mod`
----------------------------------
- Removed deprecated ``memoize`` function from ``salt/utils/__init__.py`` (deprecated)
- Removed deprecated ``no_site_packages`` argument from ``create`` function (deprecated)

View file

@ -1389,7 +1389,7 @@ class LocalClient(object):
master_uri=master_uri)
try:
payload = sreq.send(payload_kwargs)
payload = sreq.send(payload_kwargs, timeout=timeout)
except SaltReqTimeoutError:
log.error(
'Salt request timed out. If this error persists, '

View file

@ -66,7 +66,7 @@ class AsyncClientMixin(object):
client = None
tag_prefix = None
def _proc_function(self, fun, low, user, tag, jid, fire_event=True):
def _proc_function(self, fun, low, user, tag, jid):
'''
Run this method in a multiprocess target to execute the function in a
multiprocess and fire the return data on the event bus
@ -76,14 +76,13 @@ class AsyncClientMixin(object):
'jid': jid,
'user': user,
}
if fire_event:
event = salt.utils.event.get_event(
'master',
self.opts['sock_dir'],
self.opts['transport'],
opts=self.opts,
listen=False)
event.fire_event(data, tagify('new', base=tag))
event = salt.utils.event.get_event(
'master',
self.opts['sock_dir'],
self.opts['transport'],
opts=self.opts,
listen=False)
event.fire_event(data, tagify('new', base=tag))
try:
data['return'] = self.low(fun, low)
@ -98,13 +97,12 @@ class AsyncClientMixin(object):
data['success'] = False
data['user'] = user
if fire_event:
event.fire_event(data, tagify('ret', base=tag))
# if we fired an event, make sure to delete the event object.
# This will ensure that we call destroy, which will do the 0MQ linger
del event
event.fire_event(data, tagify('ret', base=tag))
# if we fired an event, make sure to delete the event object.
# This will ensure that we call destroy, which will do the 0MQ linger
del event
def async(self, fun, low, user='UNKNOWN', fire_event=True):
def async(self, fun, low, user='UNKNOWN'):
'''
Execute the function in a multiprocess and return the event tag to use
to watch for the return
@ -114,7 +112,6 @@ class AsyncClientMixin(object):
proc = multiprocessing.Process(
target=self._proc_function,
args=(fun, low, user, tag, jid),
kwargs={'fire_event': fire_event})
args=(fun, low, user, tag, jid))
proc.start()
return {'tag': tag, 'jid': jid}

View file

@ -216,6 +216,7 @@ class SSH(object):
self.serial = salt.payload.Serial(opts)
self.returners = salt.loader.returners(self.opts, {})
self.fsclient = salt.fileclient.FSClient(self.opts)
self.thin = salt.utils.thin.gen_thin(self.opts['cachedir'])
self.mods = mod_data(self.fsclient)
def get_pubkey(self):
@ -274,6 +275,7 @@ class SSH(object):
host,
mods=self.mods,
fsclient=self.fsclient,
thin=self.thin,
**target)
if salt.utils.which('ssh-copy-id'):
# we have ssh-copy-id, use it!
@ -288,6 +290,7 @@ class SSH(object):
host,
mods=self.mods,
fsclient=self.fsclient,
thin=self.thin,
**target)
stdout, stderr, retcode = single.cmd_block()
try:
@ -312,6 +315,7 @@ class SSH(object):
host,
mods=self.mods,
fsclient=self.fsclient,
thin=self.thin,
**target)
ret = {'id': single.id}
stdout, stderr, retcode = single.run()
@ -496,6 +500,7 @@ class Single(object):
tty=False,
mods=None,
fsclient=None,
thin=None,
**kwargs):
self.opts = opts
if kwargs.get('wipe'):
@ -548,6 +553,7 @@ class Single(object):
self.serial = salt.payload.Serial(opts)
self.wfuncs = salt.loader.ssh_wrapper(opts, None, self.context)
self.shell = salt.client.ssh.shell.Shell(opts, **args)
self.thin = thin if thin else salt.utils.thin.thin_path(opts['cachedir'])
def __arg_comps(self):
'''
@ -587,13 +593,8 @@ class Single(object):
'''
Deploy salt-thin
'''
if self.opts.get('_caller_cachedir'):
cachedir = self.opts.get('_caller_cachedir')
else:
cachedir = self.opts['cachedir']
thin = salt.utils.thin.gen_thin(cachedir)
self.shell.send(
thin,
self.thin,
os.path.join(self.thin_dir, 'salt-thin.tgz'),
)
self.deploy_ext()

View file

@ -97,7 +97,6 @@ def lowstate_file_refs(chunks, extras=''):
for env in refs:
for x in extra_refs:
refs[env].append([x])
return refs
@ -160,10 +159,13 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None):
files = file_client.cache_dir(name, saltenv)
if files:
for filename in files:
fn = filename[filename.find(short) + len(short):]
if fn.startswith('/'):
fn.strip('/')
tgt = os.path.join(
env_root,
short,
filename[filename.find(short) + len(short) + 1:],
fn,
)
tgt_dir = os.path.dirname(tgt)
if not os.path.isdir(tgt_dir):

View file

@ -218,6 +218,8 @@ VALID_OPTS = {
'publish_session': int,
'reactor': list,
'reactor_refresh_interval': int,
'reactor_worker_threads': int,
'reactor_worker_hwm': int,
'serial': str,
'search': str,
'search_index_interval': int,
@ -537,6 +539,8 @@ DEFAULT_MASTER_OPTS = {
'range_server': 'range:80',
'reactor': [],
'reactor_refresh_interval': 60,
'reactor_worker_threads': 10,
'reactor_worker_hwm': 10000,
'serial': 'msgpack',
'state_verbose': True,
'state_output': 'full',

View file

@ -114,6 +114,10 @@ def gen_keys(keydir, keyname, keysize, user=None):
pub = '{0}.pub'.format(base)
gen = RSA.gen_key(keysize, 65537, callback=lambda x, y, z: None)
if os.path.isfile(priv):
# Between first checking and the generation another process has made
# a key! Use the winner's key
return priv
cumask = os.umask(191)
gen.save_key(priv, None)
os.umask(cumask)

View file

@ -408,7 +408,7 @@ IPV4_ATTR_MAP = {
'broadcast': __ipv4_quad,
'metric': __int,
'gateway': __ipv4_quad, # supports a colon-delimited list
'pointtopoint': __ipv4_quad,
'pointopoint': __ipv4_quad,
'hwaddress': __mac,
'mtu': __int,
'scope': __within(['global', 'link', 'host'], dtype=str),

View file

@ -57,7 +57,7 @@ def available():
salt '*' kmod.available
'''
ret = []
for path in __salt__['cmd.run']('ls /boot/kernel | grep .ko$').splitlines():
for path in __salt__['file.find']('/boot/kernel', name='*.ko$'):
bpath = os.path.basename(path)
comps = bpath.split('.')
if 'ko' in comps:

View file

@ -456,7 +456,7 @@ def _rehash():
Recomputes internal hash table for the PATH variable. Use whenever a new
command is created during the current session.
'''
shell = __salt__['cmd.run']('echo $SHELL', output_loglevel='trace')
shell = __salt__['environ.get']('SHELL', output_loglevel='trace')
if shell.split('/')[-1] in ('csh', 'tcsh'):
__salt__['cmd.run']('rehash', output_loglevel='trace')

View file

@ -9,6 +9,10 @@ import logging
# Import 3rd-party libs
from salt.ext.six.moves import range, shlex_quote as _cmd_quote # pylint: disable=import-error,redefined-builtin
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
# Import salt libs
import salt.utils

View file

@ -455,8 +455,9 @@ def save(filename=None, family='ipv4'):
parent_dir = os.path.dirname(filename)
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
cmd = '{0}-save > {1}'.format(_iptables_cmd(family), filename)
out = __salt__['cmd.run'](cmd)
cmd = '{0}-save'.format(_iptables_cmd(family))
ipt = __salt__['cmd.run'](cmd)
out = __salt__['file.write'](filename, ipt)
return out

View file

@ -19,6 +19,11 @@ import salt.utils
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.ext.six import string_types
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
log = logging.getLogger(__name__)
# Define the module's virtual name
@ -48,9 +53,11 @@ def _dscl(cmd, ctype='create'):
source, noderoot = '.', ''
else:
source, noderoot = 'localhost', '/Local/Default'
# Note, it's OK that cmd is not quoted here, we clean it up below
return __salt__['cmd.run_all'](
'dscl {0} -{1} {2}{3}'.format(source, ctype, noderoot, cmd),
output_loglevel='quiet' if ctype == 'passwd' else False
output_loglevel='quiet' if ctype == 'passwd' else 'warning'
)
@ -102,18 +109,23 @@ def add(name,
if not isinstance(gid, int):
raise SaltInvocationError('gid must be an integer')
_dscl('/Users/{0} UniqueID {1!r}'.format(name, uid))
_dscl('/Users/{0} PrimaryGroupID {1!r}'.format(name, gid))
_dscl('/Users/{0} UserShell {1!r}'.format(name, shell))
_dscl('/Users/{0} NFSHomeDirectory {1!r}'.format(name, home))
_dscl('/Users/{0} RealName {1!r}'.format(name, fullname))
_dscl('/Users/{0} UniqueID {1!r}'.format(_cmd_quote(name), _cmd_quote(uid)))
_dscl('/Users/{0} PrimaryGroupID {1!r}'.format(_cmd_quote(name),
_cmd_quote(gid)))
_dscl('/Users/{0} UserShell {1!r}'.format(_cmd_quote(name),
_cmd_quote(shell)))
_dscl('/Users/{0} NFSHomeDirectory {1!r}'.format(_cmd_quote(name),
_cmd_quote(home)))
_dscl('/Users/{0} RealName {1!r}'.format(_cmd_quote(name),
_cmd_quote(fullname)))
# Set random password, since without a password the account will not be
# available. TODO: add shadow module
randpass = ''.join(
random.SystemRandom().choice(string.letters + string.digits) for x in range(20)
)
_dscl('/Users/{0} {1!r}'.format(name, randpass), ctype='passwd')
_dscl('/Users/{0} {1!r}'.format(_cmd_quote(name),
_cmd_quote(randpass)), ctype='passwd')
# dscl buffers changes, sleep before setting group membership
time.sleep(1)
@ -142,7 +154,7 @@ def delete(name, *args):
# group membership is managed separately from users and an entry for the
# user will persist even after the user is removed.
chgroups(name, ())
return _dscl('/Users/{0}'.format(name), ctype='delete')['retcode'] == 0
return _dscl('/Users/{0}'.format(_cmd_quote(name)), ctype='delete')['retcode'] == 0
def getent(refresh=False):
@ -183,7 +195,9 @@ def chuid(name, uid):
if uid == pre_info['uid']:
return True
_dscl(
'/Users/{0} UniqueID {1!r} {2!r}'.format(name, pre_info['uid'], uid),
'/Users/{0} UniqueID {1!r} {2!r}'.format(_cmd_quote(name),
_cmd_quote(pre_info['uid']),
uid),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
@ -211,8 +225,8 @@ def chgid(name, gid):
return True
_dscl(
'/Users/{0} PrimaryGroupID {1!r} {2!r}'.format(
name, pre_info['gid'], gid
),
_cmd_quote(name), _cmd_quote(pre_info['gid']),
_cmd_quote(gid)),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
@ -238,8 +252,9 @@ def chshell(name, shell):
return True
_dscl(
'/Users/{0} UserShell {1!r} {2!r}'.format(
name, pre_info['shell'], shell
),
_cmd_quote(name),
_cmd_quote(pre_info['shell']),
_cmd_quote(shell)),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
@ -265,8 +280,9 @@ def chhome(name, home):
return True
_dscl(
'/Users/{0} NFSHomeDirectory {1!r} {2!r}'.format(
name, pre_info['home'], home
),
_cmd_quote(name),
_cmd_quote(pre_info['home']),
_cmd_quote(home)),
ctype='change'
)
# dscl buffers changes, sleep 1 second before checking if new value
@ -292,7 +308,8 @@ def chfullname(name, fullname):
if fullname == pre_info['fullname']:
return True
_dscl(
'/Users/{0} RealName {1!r}'.format(name, fullname),
'/Users/{0} RealName {1!r}'.format(_cmd_quote(name),
_cmd_quote(fullname)),
# use a "create" command, because a "change" command would fail if
# current fullname is an empty string. The "create" will just overwrite
# this field.
@ -347,14 +364,16 @@ def chgroups(name, groups, append=False):
# Add groups from which user is missing
for group in desired - ugrps:
_dscl(
'/Groups/{0} GroupMembership {1}'.format(group, name),
'/Groups/{0} GroupMembership {1}'.format(_cmd_quote(group),
_cmd_quote(name)),
ctype='append'
)
if not append:
# Remove from extra groups
for group in ugrps - desired:
_dscl(
'/Groups/{0} GroupMembership {1}'.format(group, name),
'/Groups/{0} GroupMembership {1}'.format(_cmd_quote(group),
_cmd_quote(name)),
ctype='delete'
)
time.sleep(1)

View file

@ -113,8 +113,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
return ret
ret = {}
cmd = 'port installed'
out = __salt__['cmd.run'](cmd, output_loglevel='trace')
cmd = ['port', 'installed']
out = __salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False)
for line in out.splitlines():
try:
name, version_num, active = re.split(r'\s+', line.lstrip())[0:3]
@ -223,8 +223,8 @@ def remove(name=None, pkgs=None, **kwargs):
targets = [x for x in pkg_params if x in old]
if not targets:
return {}
cmd = 'port uninstall {0}'.format(' '.join(targets))
__salt__['cmd.run_all'](cmd, output_loglevel='trace')
cmd = ['port', 'uninstall'].append(targets)
__salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return __salt__['saltutil.compare_dicts'](old, new)
@ -318,12 +318,10 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
for pname, pparams in pkg_params.items():
formulas_array.append(pname + (pparams or ''))
formulas = ' '.join(formulas_array)
old = list_pkgs()
cmd = 'port install {0}'.format(formulas)
cmd = ['port', 'install'].append(formulas_array)
__salt__['cmd.run'](cmd, output_loglevel='trace')
__salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
@ -403,9 +401,13 @@ def upgrade(refresh=True): # pylint: disable=W0613
'comment': '',
}
old = list_pkgs()
if refresh:
refresh_db()
__salt__['cmd.run_all']('port upgrade outdated', output_loglevel='trace')
old = list_pkgs()
cmd = ['port', 'upgrade', 'outdated']
__salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)

View file

@ -43,10 +43,8 @@ def _add_var(var, value):
fullvar = '{0}="{1}"'.format(var, value)
if __salt__['file.contains'](makeconf, layman):
# TODO perhaps make this a function in the file module?
cmd = r"sed -i '/{0}/ i\{1}' {2}".format(
layman.replace("/", "\\/"),
fullvar,
makeconf)
cmd = ['sed', '-i', '/{0}/'.format(layman.replace('/', '\\/')),
fullvar, makeconf]
__salt__['cmd.run'](cmd)
else:
__salt__['file.append'](makeconf, fullvar)

View file

@ -49,7 +49,7 @@ def list_():
'''
ret = {}
for line in (__salt__['cmd.run_stdout']
('mdadm --detail --scan').splitlines()):
('mdadm --detail --scan', python_shell=False).splitlines()):
if ' ' not in line:
continue
comps = line.split()
@ -80,8 +80,8 @@ def detail(device='/dev/md0'):
msg = "Device {0} doesn't exist!"
raise CommandExecutionError(msg.format(device))
cmd = 'mdadm --detail {0}'.format(device)
for line in __salt__['cmd.run_stdout'](cmd).splitlines():
cmd = ['mdadm', '--detail', device]
for line in __salt__['cmd.run_stdout'](cmd, python_shell=False).splitlines():
if line.startswith(device):
continue
if ' ' not in line:
@ -259,8 +259,8 @@ def save_config():
salt '*' raid.save_config
'''
scan = __salt__['cmd.run']('mdadm --detail --scan').splitlines()
# Issue with mdadm and ubuntu requires removal of name and metadata tags
scan = __salt__['cmd.run']('mdadm --detail --scan', python_shell=False).split()
# Issue with mdadm and ubuntu
# REF: http://askubuntu.com/questions/209702/why-is-my-raid-dev-md1-showing-up-as-dev-md126-is-mdadm-conf-being-ignored
if __grains__['os'] == 'Ubuntu':
buggy_ubuntu_tags = ['name', 'metadata']

View file

@ -420,6 +420,9 @@ def _grant_to_tokens(grant):
except IndexError:
break
elif phrase == 'tables':
database += token
elif phrase == 'user':
if dict_mode:
break

View file

@ -8,6 +8,11 @@ from __future__ import absolute_import
import re
import logging
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
# Import salt libs
import salt.utils
@ -30,7 +35,7 @@ def _pecl(command, defaults=False):
if salt.utils.is_true(defaults):
cmdline = "printf '\n' | " + cmdline
ret = __salt__['cmd.run_all'](cmdline)
ret = __salt__['cmd.run_all'](cmdline, python_shell=True)
if ret['retcode'] == 0:
return ret['stdout']
@ -62,12 +67,12 @@ def install(pecls, defaults=False, force=False, preferred_state='stable'):
salt '*' pecl.install fuse
'''
preferred_state = '-d preferred_state={0}'.format(preferred_state)
preferred_state = '-d preferred_state={0}'.format(_cmd_quote(preferred_state))
if force:
return _pecl('{0} install -f {1}'.format(preferred_state, pecls),
return _pecl('{0} install -f {1}'.format(preferred_state, _cmd_quote(pecls)),
defaults=defaults)
else:
_pecl('{0} install {1}'.format(preferred_state, pecls),
_pecl('{0} install {1}'.format(preferred_state, _cmd_quote(pecls)),
defaults=defaults)
if not isinstance(pecls, list):
pecls = [pecls]
@ -103,7 +108,7 @@ def uninstall(pecls):
salt '*' pecl.uninstall fuse
'''
return _pecl('uninstall {0}'.format(pecls))
return _pecl('uninstall {0}'.format(_cmd_quote(pecls)))
def update(pecls):
@ -119,7 +124,7 @@ def update(pecls):
salt '*' pecl.update fuse
'''
return _pecl('install -U {0}'.format(pecls))
return _pecl('install -U {0}'.format(_cmd_quote(pecls)))
def list_(channel=None):
@ -136,7 +141,7 @@ def list_(channel=None):
pecls = {}
command = 'list'
if channel:
command = '{0} -c {1}'.format(command, channel)
command = '{0} -c {1}'.format(command, _cmd_quote(channel))
lines = _pecl(command).splitlines()
lines = (l for l in lines if pecl_channel_pat.match(l))

View file

@ -88,10 +88,7 @@ def make_pkgng_aware(jname):
cdir)
# Added args to file
cmd = 'echo "WITH_PKGNG=yes" > {0}-make.conf'.format(
os.path.join(cdir, jname))
__salt__['cmd.run'](cmd)
__salt__['file.write']('{0}-make.conf'.format(os.path.join(cdir, jname)), 'WITH_PKGNG=yes')
if os.path.isfile(os.path.join(cdir, jname) + '-make.conf'):
ret['changes'] = 'Created {0}'.format(
@ -253,8 +250,7 @@ def delete_jail(name):
except (IOError, OSError):
return ('Deleted jail "{0}" but was unable to remove jail make '
'file').format(name)
cmd = 'rm -f {0}'.format(make_file)
__salt__['cmd.run'](cmd)
__salt__['file.remove'](make_file)
return 'Deleted jail {0}'.format(name)

View file

@ -11,6 +11,12 @@ import os
import re
import logging
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
# Set up logger
log = logging.getLogger(__name__)
@ -80,7 +86,7 @@ def _update_pyenv(path, runas=None):
return False
return 0 == __salt__['cmd.retcode'](
'cd {0} && git pull'.format(path), runas=runas)
'cd {0} && git pull'.format(_cmd_quote(path)), runas=runas)
def _update_python_build(path, runas=None):
@ -89,7 +95,7 @@ def _update_python_build(path, runas=None):
return False
return 0 == __salt__['cmd.retcode'](
'cd {0} && git pull'.format(path), runas=runas)
'cd {0} && git pull'.format(_cmd_quote(path)), runas=runas)
def install(runas=None, path=None):
@ -280,9 +286,14 @@ def do(cmdline=None, runas=None):
salt '*' pyenv.do 'gem list bundler' deploy
'''
path = _pyenv_path(runas)
cmd_split = cmdline.split()
quoted_line = ''
for cmd in cmd_split:
quoted_line = quoted_line + ' ' + _cmd_quote(cmd)
result = __salt__['cmd.run_all'](
'env PATH={0}/shims:$PATH {1}'.format(path, cmdline),
runas=runas
'env PATH={0}/shims:$PATH {1}'.format(_cmd_quote(path), quoted_line),
runas=runas,
python_shell=True
)
if result['retcode'] == 0:

View file

@ -847,7 +847,7 @@ def build_bond(iface, **settings):
__salt__['cmd.run'](
'sed -i -e "/^options\\s{0}.*/d" /etc/modprobe.conf'.format(iface)
)
__salt__['cmd.run']('cat {0} >> /etc/modprobe.conf'.format(path))
__salt__['file.append']('/etc/modprobe.conf', path)
__salt__['kmod.load']('bonding')
if settings['test']:

View file

@ -8,6 +8,10 @@ from __future__ import absolute_import
import re
import os
import logging
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
log = logging.getLogger(__name__)
@ -23,8 +27,10 @@ __opts__ = {
def _get_rvm_location(runas=None):
if runas:
rvmpath = '~{0}/.rvm/bin/rvm'.format(runas)
return os.path.expanduser(rvmpath)
runas_home = os.path.expanduser('~{0}'.format(runas))
rvmpath = '{0}/.rvm/bin/rvm'.format(runas_home)
if os.path.isdir(rvmpath):
return rvmpath
return '/usr/local/rvm/bin/rvm'
@ -34,9 +40,9 @@ def _rvm(command, arguments=None, runas=None, cwd=None):
if not is_installed(runas):
return False
cmd = [_get_rvm_location(runas), command]
cmd = [_get_rvm_location(runas), _cmd_quote(command)]
if arguments:
cmd.append(arguments)
cmd.extend([_cmd_quote(arg) for arg in arguments.split()])
ret = __salt__['cmd.run_all'](' '.join(cmd), runas=runas, cwd=cwd)
@ -81,7 +87,8 @@ def install(runas=None):
# the RVM installer automatically does a multi-user install when it is
# invoked with root privileges
'curl -Ls {installer} | bash -s stable'.format(installer=installer),
runas=runas
runas=runas,
python_shell=True
)
if ret['retcode'] > 0:
log.debug(

View file

@ -224,7 +224,7 @@ def install_agent(agent_key):
account_url = get_sd_auth('account_url')
__salt__['cmd.run'](
cmd='curl https://www.serverdensity.com/downloads/agent-install.sh > install.sh',
cmd='curl https://www.serverdensity.com/downloads/agent-install.sh -o install.sh',
cwd=work_dir
)
__salt__['cmd.run'](cmd='chmod +x install.sh', cwd=work_dir)

View file

@ -12,6 +12,11 @@ from salt.exceptions import CommandExecutionError
import salt.utils
import salt.utils.decorators as decorators
import salt.ext.six as six
try:
from shlex import quote as _cmd_quote # pylint: disable=E0611
except ImportError:
from pipes import quote as _cmd_quote
# Define the module's virtual name
__virtualname__ = 'virt'
@ -119,14 +124,14 @@ def init(**kwargs):
if all(key in kwargs for key in check_zone_args):
ret = _gen_zone_json(**kwargs)
# validation first
cmd = 'echo \'{0}\' | {1} validate create'.format(ret, vmadm)
res = __salt__['cmd.run_all'](cmd)
cmd = 'echo {0} | {1} validate create'.format(_cmd_quote(ret), _cmd_quote(vmadm))
res = __salt__['cmd.run_all'](cmd, python_shell=True)
retcode = res['retcode']
if retcode != 0:
return CommandExecutionError(_exit_status(retcode))
# if succeeded, proceed to the VM creation
cmd = 'echo \'{0}\' | {1} create'.format(ret, vmadm)
res = __salt__['cmd.run_all'](cmd)
# if succedeed, proceed to the VM creation
cmd = 'echo {0} | {1} create'.format(_cmd_quote(ret), _cmd_quote(vmadm))
res = __salt__['cmd.run_all'](cmd, python_shell=True)
retcode = res['retcode']
if retcode != 0:
return CommandExecutionError(_exit_status(retcode))

View file

@ -511,7 +511,9 @@ def all_status():
def pid(sig):
'''
Return the PID or an empty string if the process is running or not.
Pass a signature to use to find the process via ps.
Pass a signature to use to find the process via ps. Note you can pass
a Python-compatible regular expression to return all pids of
processes matching the regexp.
CLI Example:
@ -519,15 +521,20 @@ def pid(sig):
salt '*' status.pid <sig>
'''
# Check whether the sig is already quoted (we check at the end in case they
# send a sig like `-E 'someregex'` to use egrep) and doesn't begin with a
# dash (again, like `-E someregex`). Quote sigs that qualify.
if (not sig.endswith('"') and not sig.endswith("'") and
not sig.startswith('-')):
sig = "'" + sig + "'"
cmd = ("{0[ps]} | grep {1} | grep -v grep | fgrep -v status.pid | "
"awk '{{print $2}}'".format(__grains__, sig))
return __salt__['cmd.run_stdout'](cmd) or ''
cmd = __grains__['ps']
output = __salt__['cmd.run_stdout'](cmd)
pids = ''
for line in output.splitlines():
if 'status.pid' in line:
continue
if re.search(sig, line):
if pids:
pids += '\n'
pids += line.split()[1]
return pids
def version():

View file

@ -20,6 +20,7 @@ from salt.ext.six.moves.urllib.parse import urlparse
# pylint: enable=import-error,redefined-builtin,no-name-in-module
from xml.dom import minidom as dom
from contextlib import contextmanager as _contextmanager
# Import salt libs
import salt.utils
@ -129,11 +130,7 @@ def latest_version(*names, **kwargs):
# Split call to zypper into batches of 500 packages
while restpackages:
cmd = 'zypper info -t package {0}'.format(' '.join(restpackages[:500]))
output = __salt__['cmd.run_stdout'](
cmd,
output_loglevel='trace',
python_shell=True
)
output = __salt__['cmd.run_stdout'](cmd, output_loglevel='trace')
outputs.extend(re.split('Information for package \\S+:\n', output))
restpackages = restpackages[500:]
for package in outputs:
@ -220,13 +217,12 @@ def list_pkgs(versions_as_list=False, **kwargs):
__salt__['pkg_resource.stringify'](ret)
return ret
pkg_fmt = '%{NAME}_|-%{VERSION}_|-%{RELEASE}\\n'
cmd = 'rpm -qa --queryformat {0}'.format(_cmd_quote(pkg_fmt))
cmd = ('rpm', '-qa', '--queryformat', '%{NAME}_|-%{VERSION}_|-%{RELEASE}\\n')
ret = {}
out = __salt__['cmd.run'](
cmd,
output_loglevel='trace',
python_shell=True
python_shell=False
)
for line in out.splitlines():
name, pkgver, rel = line.split('_|-')
@ -608,26 +604,24 @@ def install(name=None,
old = list_pkgs()
downgrades = []
if fromrepo:
fromrepoopt = '--force --force-resolution --from {0} '.format(fromrepo)
fromrepoopt = ('--force', '--force-resolution', '--from', fromrepo)
log.info('Targeting repo {0!r}'.format(fromrepo))
else:
fromrepoopt = ''
# Split the targets into batches of 500 packages each, so that
# the maximal length of the command line is not broken
while targets:
# Quotes needed around package targets because of the possibility of
# output redirection characters "<" or ">" in zypper command.
quoted_targets = [_cmd_quote(target) for target in targets[:500]]
cmd = (
'zypper --non-interactive install --name '
'--auto-agree-with-licenses {0}{1}'
.format(fromrepoopt, ' '.join(quoted_targets))
)
cmd = ['zypper', '--non-interactive', 'install', '--name',
'--auto-agree-with-licenses']
if fromrepo:
cmd.extend(fromrepoopt)
cmd.extend(targets[:500])
targets = targets[500:]
out = __salt__['cmd.run'](
cmd,
output_loglevel='trace',
python_shell=True
python_shell=False
)
for line in out.splitlines():
match = re.match(
@ -638,13 +632,14 @@ def install(name=None,
downgrades.append(match.group(1))
while downgrades:
cmd = (
'zypper --non-interactive install --name '
'--auto-agree-with-licenses --force {0}{1}'
.format(fromrepoopt, ' '.join(downgrades[:500]))
)
__salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=True)
cmd = ['zypper', '--non-interactive', 'install', '--name',
'--auto-agree-with-licenses', '--force']
if fromrepo:
cmd.extend(fromrepoopt)
cmd.extend(downgrades[:500])
downgrades = downgrades[500:]
__salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=False)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.compare_dicts(old, new)
@ -708,7 +703,7 @@ def _uninstall(action='remove', name=None, pkgs=None):
'zypper --non-interactive remove {0} {1}'
.format(purge_arg, ' '.join(targets[:500]))
)
__salt__['cmd.run'](cmd, output_loglevel='trace', python_shell=True)
__salt__['cmd.run'](cmd, output_loglevel='trace')
targets = targets[500:]
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()

View file

@ -267,8 +267,9 @@ def get_jid(jid):
if os.path.isfile(outp):
ret[fn_]['out'] = serial.load(
salt.utils.fopen(outp, 'rb'))
except Exception:
pass
except Exception as exc:
if 'Permission denied:' in str(exc):
raise
return ret

View file

@ -194,7 +194,7 @@ def extracted(name,
else:
log.debug('Untar {0} in {1}'.format(filename, name))
results = __salt__['cmd.run_all']('tar {0} -f {1!r}'.format(
results = __salt__['cmd.run_all']('tar x{0} -f {1!r}'.format(
tar_options, filename), cwd=name)
if results['retcode'] != 0:
ret['result'] = False

View file

@ -215,7 +215,6 @@ def managed(name, type, enabled=True, **kwargs):
# to enhance the user experience. This does not look like
# it will cause a problem. Just giving a heads up in case
# it does create a problem.
ret = {
'name': name,
'changes': {},
@ -297,7 +296,16 @@ def managed(name, type, enabled=True, **kwargs):
# Bring up/shutdown interface
try:
# Get Interface current status
interface_status = salt.utils.network.interfaces()[name].get('up')
interfaces = salt.utils.network.interfaces()
interface_status = False
if name in interfaces:
interface_status = interfaces[name].get('up')
else:
for iface in interfaces:
if 'secondary' in interfaces[iface]:
for second in interfaces[iface]['secondary']:
if second.get('label', '') == 'name':
interface_status = True
if enabled:
if interface_status:
if ret['changes']:

View file

@ -305,10 +305,22 @@ def present(name,
Date that account expires, represented in days since epoch (January 1,
1970).
'''
fullname = str(fullname) if fullname is not None else fullname
roomnumber = str(roomnumber) if roomnumber is not None else roomnumber
workphone = str(workphone) if workphone is not None else workphone
homephone = str(homephone) if homephone is not None else homephone
try:
fullname = str(fullname) if fullname is not None else fullname
except UnicodeEncodeError:
fullname = fullname.encode('utf-8') if fullname is not None else fullname
try:
roomnumber = str(roomnumber) if roomnumber is not None else roomnumber
except UnicodeEncodeError:
roomnumber = roomnumber.encode('utf-8') if roomnumber is not None else roomnumber
try:
workphone = str(workphone) if workphone is not None else workphone
except UnicodeEncodeError:
workphone = workphone.encode('utf-8') if workphone is not None else workphone
try:
homephone = str(homephone) if homephone is not None else homephone
except UnicodeEncodeError:
homephone = homephone.encode('utf-8') if homephone is not None else homephone
ret = {'name': name,
'changes': {},

View file

@ -22,8 +22,7 @@ try:
from raet import raeting, nacling
from raet.lane.stacking import LaneStack
from raet.lane.yarding import RemoteYard
except ImportError:
except (ImportError, OSError):
# Don't die on missing transport libs since only one transport is required
pass

View file

@ -76,6 +76,8 @@ import salt.state
import salt.utils
import salt.utils.cache
from salt.ext.six import string_types
import salt.utils.process
from salt._compat import string_types
log = logging.getLogger(__name__)
# The SUB_EVENT set is for functions that require events fired based on
@ -718,6 +720,11 @@ class ReactWrap(object):
if ReactWrap.client_cache is None:
ReactWrap.client_cache = salt.utils.cache.CacheDict(opts['reactor_refresh_interval'])
self.pool = salt.utils.process. ThreadPool(
self.opts['reactor_worker_threads'], # number of workers for runner/wheel
queue_size=self.opts['reactor_worker_hwm'] # queue size for those workers
)
def run(self, low):
'''
Execute the specified function in the specified state by passing the
@ -726,14 +733,12 @@ class ReactWrap(object):
l_fun = getattr(self, low['state'])
try:
f_call = salt.utils.format_call(l_fun, low)
ret = l_fun(*f_call.get('args', ()), **f_call.get('kwargs', {}))
l_fun(*f_call.get('args', ()), **f_call.get('kwargs', {}))
except Exception:
log.error(
'Failed to execute {0}: {1}\n'.format(low['state'], l_fun),
exc_info=True
)
return False
return ret
def local(self, *args, **kwargs):
'''
@ -741,25 +746,25 @@ class ReactWrap(object):
'''
if 'local' not in self.client_cache:
self.client_cache['local'] = salt.client.LocalClient(self.opts['conf_file'])
return self.client_cache['local'].cmd_async(*args, **kwargs)
self.client_cache['local'].cmd_async(*args, **kwargs)
cmd = local
def runner(self, fun, **kwargs):
def runner(self, _, **kwargs):
'''
Wrap RunnerClient for executing :ref:`runner modules <all-salt.runners>`
'''
if 'runner' not in self.client_cache:
self.client_cache['runner'] = salt.runner.RunnerClient(self.opts)
return self.client_cache['runner'].async(fun, kwargs, fire_event=False)
self.pool.fire_async(self.client_cache['runner'].low, kwargs)
def wheel(self, fun, **kwargs):
def wheel(self, _, **kwargs):
'''
Wrap Wheel to enable executing :ref:`wheel modules <all-salt.wheel>`
'''
if 'wheel' not in self.client_cache:
self.client_cache['wheel'] = salt.wheel.Wheel(self.opts)
return self.client_cache['wheel'].async(fun, kwargs, fire_event=False)
self.pool.fire_async(self.client_cache['wheel'].low, kwargs)
class StateFire(object):

View file

@ -206,7 +206,7 @@ class NonBlockingPopen(subprocess.Popen):
if not conn.closed:
fcntl.fcntl(conn, fcntl.F_SETFL, flags)
def poll_and_read_until_finish(self):
def poll_and_read_until_finish(self, interval=0.01):
silent_iterations = 0
while self.poll() is None:
if self.stdout is not None:
@ -226,7 +226,7 @@ class NonBlockingPopen(subprocess.Popen):
log.debug(stdoutdata)
if stderrdata:
log.error(stderrdata)
time.sleep(0.01)
time.sleep(interval)
def communicate(self, input=None):
super(NonBlockingPopen, self).communicate(input)

View file

@ -10,6 +10,9 @@ import sys
import multiprocessing
import signal
import threading
import Queue
# Import salt libs
import salt.utils
import salt.ext.six as six
@ -122,6 +125,66 @@ def os_is_running(pid):
return False
class ThreadPool(object):
'''
This is a very VERY basic threadpool implementation
This was made instead of using multiprocessing ThreadPool because
we want to set max queue size and we want to daemonize threads (neither
is exposed in the stdlib version).
Since there isn't much use for this class as of right now this implementation
Only supports daemonized threads and will *not* return results
TODO: if this is found to be more generally useful it would be nice to pull
in the majority of code from upstream or from http://bit.ly/1wTeJtM
'''
def __init__(self,
num_threads=None,
queue_size=0):
# if no count passed, default to number of CPUs
if num_threads is None:
num_threads = multiprocessing.cpu_count()
self.num_threads = num_threads
# create a task queue of queue_size
self._job_queue = Queue.Queue(queue_size)
self._workers = []
# create worker threads
for idx in xrange(num_threads):
thread = threading.Thread(target=self._thread_target)
thread.daemon = True
thread.start()
self._workers.append(thread)
# intentionally not called "apply_async" since we aren't keeping track of
# the return at all, if we want to make this API compatible with multiprocessing
# threadpool we can in the future, and we won't have to worry about name collision
def fire_async(self, func, args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
try:
self._job_queue.put((func, args, kwargs), False)
return True
except Queue.Full:
return False
def _thread_target(self):
while True:
# 1s timeout so that if the parent dies this thread will die after 1s
try:
func, args, kwargs = self._job_queue.get(timeout=1)
except Queue.Empty:
continue
try:
func(*args, **kwargs)
except Exception:
pass
class ProcessManager(object):
'''
A class which will manage processes that should be running

View file

@ -24,6 +24,11 @@ try:
HAS_MSGPACK = True
except ImportError:
HAS_MSGPACK = False
try:
import certifi
HAS_CERTIFI = True
except ImportError:
HAS_CERTIFI = False
try:
import urllib3
HAS_URLLIB3 = True
@ -63,6 +68,13 @@ if __name__ == '__main__':
'''
def thin_path(cachedir):
'''
Return the path to the thin tarball
'''
return os.path.join(cachedir, 'thin', 'thin.tgz')
def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
'''
Generate the salt-thin tarball and print the location of the tarball
@ -88,7 +100,10 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
fp_.write(SALTCALL)
if os.path.isfile(thintar):
if overwrite or not os.path.isfile(thinver):
os.remove(thintar)
try:
os.remove(thintar)
except OSError:
pass
elif open(thinver).read() == salt.__version__:
return thintar
tops = [
@ -108,6 +123,9 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
if HAS_CHARDET:
tops.append(os.path.dirname(chardet.__file__))
if HAS_CERTIFI:
tops.append(os.path.dirname(certifi.__file__))
for mod in [m for m in extra_mods.split(',') if m]:
if mod not in locals() and mod not in globals():
try:

View file

@ -725,7 +725,7 @@ class SaltDistribution(distutils.dist.Distribution):
return {'console_scripts': scripts}
if IS_WINDOWS_PLATFORM:
scripts.extend(['salt-cp = salt.scripts:salt_cp'
scripts.extend(['salt-cp = salt.scripts:salt_cp',
'salt-minion = salt.scripts:salt_minion',
'salt-unity = salt.scripts:salt_unity'])
return {'console_scripts': scripts}

View file

@ -885,7 +885,7 @@ class TestDaemon(object):
jid_info = self.client.run_job(
list(targets), 'saltutil.sync_{0}'.format(modules_kind),
expr_form='list',
timeout=9999999999999999,
timeout=999999999999999,
)
if self.wait_for_jid(targets, jid_info['jid'], timeout) is False:

View file

@ -34,13 +34,15 @@ class TestSaltAPIHandler(SaltnadoTestCase):
Test the root path which returns the list of clients we support
'''
response = self.fetch('/')
assert response.code == 200
self.assertEqual(response.code, 200)
response_obj = json.loads(response.body)
assert response_obj['clients'] == ['runner',
'local_async',
'local',
'local_batch']
assert response_obj['return'] == 'Welcome'
self.assertEqual(response_obj['clients'],
['runner',
'local_async',
'local',
'local_batch']
)
self.assertEqual(response_obj['return'], 'Welcome')
def test_post_no_auth(self):
'''
@ -57,8 +59,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
headers={'Content-Type': self.content_type_map['json']},
follow_redirects=False
)
assert response.code == 302
assert response.headers['Location'] == '/login'
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], '/login')
# Local client tests
def test_simple_local_post(self):
@ -76,7 +78,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{'minion': True, 'sub_minion': True}]
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
def test_simple_local_post_no_tgt(self):
'''
@ -93,7 +95,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == ["No minions matched the target. No command was sent, no jid was assigned."]
self.assertEqual(response_obj['return'], ["No minions matched the target. No command was sent, no jid was assigned."])
# local_batch tests
def test_simple_local_batch_post(self):
@ -111,7 +113,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{'minion': True, 'sub_minion': True}]
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
# local_batch tests
def test_full_local_batch_post(self):
@ -130,7 +132,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{'minion': True, 'sub_minion': True}]
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
def test_simple_local_batch_post_no_tgt(self):
'''
@ -147,7 +149,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{}]
self.assertEqual(response_obj['return'], [{}])
# local_async tests
def test_simple_local_async_post(self):
@ -163,9 +165,9 @@ class TestSaltAPIHandler(SaltnadoTestCase):
)
response_obj = json.loads(response.body)
# TODO: verify pub function? Maybe look at how we test the publisher
assert len(response_obj['return']) == 1
assert 'jid' in response_obj['return'][0]
assert response_obj['return'][0]['minions'] == ['minion', 'sub_minion']
self.assertEqual(len(response_obj['return']), 1)
self.assertIn('jid', response_obj['return'][0])
self.assertEqual(response_obj['return'][0]['minions'], ['minion', 'sub_minion'])
def test_multi_local_async_post(self):
low = [{'client': 'local_async',
@ -183,11 +185,11 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert len(response_obj['return']) == 2
assert 'jid' in response_obj['return'][0]
assert 'jid' in response_obj['return'][1]
assert response_obj['return'][0]['minions'] == ['minion', 'sub_minion']
assert response_obj['return'][1]['minions'] == ['minion', 'sub_minion']
self.assertEqual(len(response_obj['return']), 2)
self.assertIn('jid', response_obj['return'][0])
self.assertIn('jid', response_obj['return'][1])
self.assertEqual(response_obj['return'][0]['minions'], ['minion', 'sub_minion'])
self.assertEqual(response_obj['return'][1]['minions'], ['minion', 'sub_minion'])
def test_multi_local_async_post_multitoken(self):
low = [{'client': 'local_async',
@ -212,12 +214,12 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert len(response_obj['return']) == 3 # make sure we got 3 responses
assert 'jid' in response_obj['return'][0] # the first 2 are regular returns
assert 'jid' in response_obj['return'][1]
assert 'Failed to authenticate' in response_obj['return'][2] # bad auth
assert response_obj['return'][0]['minions'] == ['minion', 'sub_minion']
assert response_obj['return'][1]['minions'] == ['minion', 'sub_minion']
self.assertEqual(len(response_obj['return']), 3) # make sure we got 3 responses
self.assertIn('jid', response_obj['return'][0]) # the first 2 are regular returns
self.assertIn('jid', response_obj['return'][1])
self.assertIn('Failed to authenticate', response_obj['return'][2]) # bad auth
self.assertEqual(response_obj['return'][0]['minions'], ['minion', 'sub_minion'])
self.assertEqual(response_obj['return'][1]['minions'], ['minion', 'sub_minion'])
def test_simple_local_async_post_no_tgt(self):
low = [{'client': 'local_async',
@ -231,7 +233,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{}]
self.assertEqual(response_obj['return'], [{}])
# runner tests
def test_simple_local_runner_post(self):
@ -245,7 +247,7 @@ class TestSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [['minion', 'sub_minion']]
self.assertEqual(response_obj['return'], [['minion', 'sub_minion']])
class TestMinionSaltAPIHandler(SaltnadoTestCase):
@ -267,12 +269,12 @@ class TestMinionSaltAPIHandler(SaltnadoTestCase):
follow_redirects=False,
)
response_obj = json.loads(response.body)
assert len(response_obj['return']) == 1
self.assertEqual(len(response_obj['return']), 1)
# one per minion
assert len(response_obj['return'][0]) == 2
self.assertEqual(len(response_obj['return'][0]), 2)
# check a single grain
for minion_id, grains in response_obj['return'][0].iteritems():
assert minion_id == grains['id']
self.assertEqual(minion_id, grains['id'])
def test_get(self):
response = self.fetch('/minions/minion',
@ -281,10 +283,10 @@ class TestMinionSaltAPIHandler(SaltnadoTestCase):
follow_redirects=False,
)
response_obj = json.loads(response.body)
assert len(response_obj['return']) == 1
assert len(response_obj['return'][0]) == 1
self.assertEqual(len(response_obj['return']), 1)
self.assertEqual(len(response_obj['return'][0]), 1)
# check a single grain
assert response_obj['return'][0]['minion']['id'] == 'minion'
self.assertEqual(response_obj['return'][0]['minion']['id'], 'minion')
def test_post(self):
low = [{'tgt': '*',
@ -298,9 +300,9 @@ class TestMinionSaltAPIHandler(SaltnadoTestCase):
)
response_obj = json.loads(response.body)
# TODO: verify pub function? Maybe look at how we test the publisher
assert len(response_obj['return']) == 1
assert 'jid' in response_obj['return'][0]
assert response_obj['return'][0]['minions'] == ['minion', 'sub_minion']
self.assertEqual(len(response_obj['return']), 1)
self.assertIn('jid', response_obj['return'][0])
self.assertEqual(response_obj['return'][0]['minions'], ['minion', 'sub_minion'])
def test_post_with_client(self):
# get a token for this test
@ -316,9 +318,9 @@ class TestMinionSaltAPIHandler(SaltnadoTestCase):
)
response_obj = json.loads(response.body)
# TODO: verify pub function? Maybe look at how we test the publisher
assert len(response_obj['return']) == 1
assert 'jid' in response_obj['return'][0]
assert response_obj['return'][0]['minions'] == ['minion', 'sub_minion']
self.assertEqual(len(response_obj['return']), 1)
self.assertIn('jid', response_obj['return'][0])
self.assertEqual(response_obj['return'][0]['minions'], ['minion', 'sub_minion'])
def test_post_with_incorrect_client(self):
'''
@ -336,7 +338,7 @@ class TestMinionSaltAPIHandler(SaltnadoTestCase):
headers={'Content-Type': self.content_type_map['json'],
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
assert response.code == 400
self.assertEqual(response.code, 400)
class TestJobsSaltAPIHandler(SaltnadoTestCase):
@ -363,12 +365,12 @@ class TestJobsSaltAPIHandler(SaltnadoTestCase):
response = self.wait(timeout=10)
response_obj = json.loads(response.body)['return'][0]
for jid, ret in response_obj.iteritems():
assert 'Function' in ret
assert 'Target' in ret
assert 'Target-type' in ret
assert 'User' in ret
assert 'StartTime' in ret
assert 'Arguments' in ret
self.assertIn('Function', ret)
self.assertIn('Target', ret)
self.assertIn('Target-type', ret)
self.assertIn('User', ret)
self.assertIn('StartTime', ret)
self.assertIn('Arguments', ret)
# test with a specific JID passed in
jid = response_obj.iterkeys().next()
@ -381,13 +383,13 @@ class TestJobsSaltAPIHandler(SaltnadoTestCase):
)
response = self.wait(timeout=10)
response_obj = json.loads(response.body)['return'][0]
assert 'Function' in response_obj
assert 'Target' in response_obj
assert 'Target-type' in response_obj
assert 'User' in response_obj
assert 'StartTime' in response_obj
assert 'Arguments' in response_obj
assert 'Result' in response_obj
self.assertIn('Function', response_obj)
self.assertIn('Target', response_obj)
self.assertIn('Target-type', response_obj)
self.assertIn('User', response_obj)
self.assertIn('StartTime', response_obj)
self.assertIn('Arguments', response_obj)
self.assertIn('Result', response_obj)
# TODO: run all the same tests from the root handler, but for now since they are
@ -415,7 +417,7 @@ class TestRunSaltAPIHandler(SaltnadoTestCase):
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['return'] == [{'minion': True, 'sub_minion': True}]
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
class TestEventsSaltAPIHandler(SaltnadoTestCase):
@ -459,8 +461,8 @@ class TestEventsSaltAPIHandler(SaltnadoTestCase):
# if we got a retry, just continue
if event != 'retry: 400':
tag, data = event.splitlines()
assert tag.startswith('tag: ')
assert data.startswith('data: ')
self.assertTrue(tag.startswith('tag: '))
self.assertTrue(data.startswith('data: '))
class TestWebhookSaltAPIHandler(SaltnadoTestCase):
@ -481,9 +483,9 @@ class TestWebhookSaltAPIHandler(SaltnadoTestCase):
'''
Verify that the event fired on the master matches what we sent
'''
assert event['tag'] == 'salt/netapi/hook'
assert 'headers' in event['data']
assert event['data']['post'] == {'foo': 'bar'}
self.assertEqual(event['tag'], 'salt/netapi/hook')
self.assertIn('headers', event['data'])
self.assertEqual(event['data']['post'], {'foo': 'bar'})
# get an event future
event = self.application.event_listener.get_event(self,
tag='salt/netapi/hook',
@ -496,7 +498,7 @@ class TestWebhookSaltAPIHandler(SaltnadoTestCase):
headers={saltnado.AUTH_TOKEN_HEADER: self.token['token']},
)
response_obj = json.loads(response.body)
assert response_obj['success'] is True
self.assertTrue(response_obj['success'])
if __name__ == '__main__':

View file

@ -244,7 +244,7 @@ def sync_minion(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
@ -320,7 +320,7 @@ def delete_vm(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
@ -371,7 +371,7 @@ def download_unittest_reports(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -425,7 +425,7 @@ def download_coverage_report(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -504,7 +504,7 @@ def download_remote_logs(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -572,7 +572,7 @@ def prepare_ssh_access(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -894,7 +894,7 @@ def main():
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
retcode = proc.returncode
@ -1017,7 +1017,7 @@ def main():
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print('Failed to execute the preparation SLS file. Exit code: {0}'.format(proc.returncode))
@ -1060,7 +1060,7 @@ def main():
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print('Failed to execute the 2nd preparation SLS file. Exit code: {0}'.format(proc.returncode))

View file

@ -113,7 +113,7 @@ def delete_vm(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
@ -207,7 +207,7 @@ def download_unittest_reports(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -247,7 +247,7 @@ def download_coverage_report(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -303,7 +303,7 @@ def download_remote_logs(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -349,7 +349,7 @@ def download_packages(options):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
if proc.returncode != 0:
print(
@ -411,7 +411,7 @@ def run(opts):
stderr=subprocess.PIPE,
stream_stds=True
)
proc.poll_and_read_until_finish()
proc.poll_and_read_until_finish(interval=0.5)
proc.communicate()
retcode = proc.returncode

View file

@ -101,41 +101,41 @@ class TestBaseSaltAPIHandler(SaltnadoTestCase):
# send NO accept header, should come back with json
response = self.fetch('/')
assert response.headers['Content-Type'] == self.content_type_map['json']
assert type(json.loads(response.body)) == dict
self.assertEqual(response.headers['Content-Type'], self.content_type_map['json'])
self.assertEqual(type(json.loads(response.body)), dict)
# send application/json
response = self.fetch('/', headers={'Accept': self.content_type_map['json']})
assert response.headers['Content-Type'] == self.content_type_map['json']
assert type(json.loads(response.body)) == dict
self.assertEqual(response.headers['Content-Type'], self.content_type_map['json'])
self.assertEqual(type(json.loads(response.body)), dict)
# send application/x-yaml
response = self.fetch('/', headers={'Accept': self.content_type_map['yaml']})
assert response.headers['Content-Type'] == self.content_type_map['yaml']
assert type(yaml.load(response.body)) == dict
self.assertEqual(response.headers['Content-Type'], self.content_type_map['yaml'])
self.assertEqual(type(yaml.load(response.body)), dict)
def test_token(self):
'''
Test that the token is returned correctly
'''
token = json.loads(self.fetch('/').body)['token']
assert token is None
self.assertIs(token, None)
# send a token as a header
response = self.fetch('/', headers={saltnado.AUTH_TOKEN_HEADER: 'foo'})
token = json.loads(response.body)['token']
assert token == 'foo'
self.assertEqual(token, 'foo')
# send a token as a cookie
response = self.fetch('/', headers={'Cookie': '{0}=foo'.format(saltnado.AUTH_COOKIE_NAME)})
token = json.loads(response.body)['token']
assert token == 'foo'
self.assertEqual(token, 'foo')
# send both, make sure its the header
response = self.fetch('/', headers={saltnado.AUTH_TOKEN_HEADER: 'foo',
'Cookie': '{0}=bar'.format(saltnado.AUTH_COOKIE_NAME)})
token = json.loads(response.body)['token']
assert token == 'foo'
self.assertEqual(token, 'foo')
def test_deserialize(self):
'''
@ -160,35 +160,35 @@ class TestBaseSaltAPIHandler(SaltnadoTestCase):
body=json.dumps(valid_lowstate),
headers={'Content-Type': self.content_type_map['json']})
assert valid_lowstate == json.loads(response.body)['lowstate']
self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
# send yaml as json (should break)
response = self.fetch('/',
method='POST',
body=yaml.dump(valid_lowstate),
headers={'Content-Type': self.content_type_map['json']})
assert response.code == 400
self.assertEqual(response.code, 400)
# send as yaml
response = self.fetch('/',
method='POST',
body=yaml.dump(valid_lowstate),
headers={'Content-Type': self.content_type_map['yaml']})
assert valid_lowstate == json.loads(response.body)['lowstate']
self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
# send json as yaml (works since yaml is a superset of json)
response = self.fetch('/',
method='POST',
body=json.dumps(valid_lowstate),
headers={'Content-Type': self.content_type_map['yaml']})
assert valid_lowstate == json.loads(response.body)['lowstate']
self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
# send json as text/plain
response = self.fetch('/',
method='POST',
body=json.dumps(valid_lowstate),
headers={'Content-Type': self.content_type_map['text']})
assert valid_lowstate == json.loads(response.body)['lowstate']
self.assertEqual(valid_lowstate, json.loads(response.body)['lowstate'])
# send form-urlencoded
form_lowstate = (
@ -203,13 +203,13 @@ class TestBaseSaltAPIHandler(SaltnadoTestCase):
body=urlencode(form_lowstate),
headers={'Content-Type': self.content_type_map['form']})
returned_lowstate = json.loads(response.body)['lowstate']
assert len(returned_lowstate) == 1
self.assertEqual(len(returned_lowstate), 1)
returned_lowstate = returned_lowstate[0]
assert returned_lowstate['client'] == 'local'
assert returned_lowstate['tgt'] == '*'
assert returned_lowstate['fun'] == 'test.fib'
assert returned_lowstate['arg'] == ['10', 'foo']
self.assertEqual(returned_lowstate['client'], 'local')
self.assertEqual(returned_lowstate['tgt'], '*')
self.assertEqual(returned_lowstate['fun'], 'test.fib')
self.assertEqual(returned_lowstate['arg'], ['10', 'foo'])
class TestSaltAuthHandler(SaltnadoTestCase):
@ -227,7 +227,7 @@ class TestSaltAuthHandler(SaltnadoTestCase):
We don't allow gets, so assert we get 401s
'''
response = self.fetch('/login')
assert response.code == 401
self.assertEqual(response.code, 401)
def test_login(self):
'''
@ -239,10 +239,10 @@ class TestSaltAuthHandler(SaltnadoTestCase):
headers={'Content-Type': self.content_type_map['form']})
response_obj = json.loads(response.body)['return'][0]
assert response_obj['perms'] == self.opts['external_auth']['auto'][self.auth_creds_dict['username']]
assert 'token' in response_obj # TODO: verify that its valid?
assert response_obj['user'] == self.auth_creds_dict['username']
assert response_obj['eauth'] == self.auth_creds_dict['eauth']
self.assertEqual(response_obj['perms'], self.opts['external_auth']['auto'][self.auth_creds_dict['username']])
self.assertIn('token', response_obj) # TODO: verify that its valid?
self.assertEqual(response_obj['user'], self.auth_creds_dict['username'])
self.assertEqual(response_obj['eauth'], self.auth_creds_dict['eauth'])
def test_login_missing_password(self):
'''
@ -258,7 +258,7 @@ class TestSaltAuthHandler(SaltnadoTestCase):
body=urlencode(bad_creds),
headers={'Content-Type': self.content_type_map['form']})
assert response.code == 400
self.assertEqual(response.code, 400)
def test_login_bad_creds(self):
'''
@ -274,7 +274,7 @@ class TestSaltAuthHandler(SaltnadoTestCase):
body=urlencode(bad_creds),
headers={'Content-Type': self.content_type_map['form']})
assert response.code == 401
self.assertEqual(response.code, 401)
if __name__ == '__main__':

View file

@ -36,12 +36,12 @@ from unit.utils.event_test import eventpublisher_process, event, SOCK_DIR # pyl
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
class TestUtils(TestCase):
def test_batching(self):
assert 1 == saltnado.get_batch_size('1', 10)
assert 2 == saltnado.get_batch_size('2', 10)
self.assertEqual(1, saltnado.get_batch_size('1', 10))
self.assertEqual(2, saltnado.get_batch_size('2', 10))
assert 1 == saltnado.get_batch_size('10%', 10)
self.assertEqual(1, saltnado.get_batch_size('10%', 10))
# TODO: exception in this case? The core doesn't so we shouldn't
assert 11 == saltnado.get_batch_size('110%', 10)
self.assertEqual(11, saltnado.get_batch_size('110%', 10))
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
@ -59,28 +59,28 @@ class TestSaltnadoUtils(AsyncTestCase):
# create an any future, make sure it isn't immediately done
any_ = saltnado.Any(futures)
assert any_.done() is False
self.assertIs(any_.done(), False)
# finish one, lets see who finishes
futures[0].set_result('foo')
self.wait()
assert any_.done() is True
assert futures[0].done() is True
assert futures[1].done() is False
assert futures[2].done() is False
self.assertIs(any_.done(), True)
self.assertIs(futures[0].done(), True)
self.assertIs(futures[1].done(), False)
self.assertIs(futures[2].done(), False)
# make sure it returned the one that finished
assert any_.result() == futures[0]
self.assertEqual(any_.result(), futures[0])
futures = futures[1:]
# re-wait on some other futures
any_ = saltnado.Any(futures)
futures[0].set_result('foo')
self.wait()
assert any_.done() is True
assert futures[0].done() is True
assert futures[1].done() is False
self.assertIs(any_.done(), True)
self.assertIs(futures[0].done(), True)
self.assertIs(futures[1].done(), False)
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
@ -105,9 +105,9 @@ class TestEventListener(AsyncTestCase):
self.wait() # wait for the future
# check that we got the event we wanted
assert event_future.done()
assert event_future.result()['tag'] == 'evt1'
assert event_future.result()['data']['data'] == 'foo1'
self.assertTrue(event_future.done())
self.assertEqual(event_future.result()['tag'], 'evt1')
self.assertEqual(event_future.result()['data']['data'], 'foo1')
if __name__ == '__main__':