mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into win_fix_dacl
This commit is contained in:
commit
b31e08946a
25 changed files with 696 additions and 53 deletions
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -92,7 +92,9 @@ tests/integration/cloud/providers/pki/minions
|
|||
# Ignore tox virtualenvs
|
||||
/.tox/
|
||||
|
||||
# Ignore kitchen stuff
|
||||
.kitchen
|
||||
.bundle
|
||||
# Kitchen tests files
|
||||
.kitchen.local.yml
|
||||
.kitchen/
|
||||
.bundle/
|
||||
Gemfile.lock
|
||||
/artifacts/
|
||||
|
|
31
.kitchen.yml
31
.kitchen.yml
|
@ -3,6 +3,7 @@
|
|||
<% version = '2017.7.1' %>
|
||||
<% platformsfile = ENV['SALT_KITCHEN_PLATFORMS'] || '.kitchen/platforms.yml' %>
|
||||
<% driverfile = ENV['SALT_KITCHEN_DRIVER'] || '.kitchen/driver.yml' %>
|
||||
<% verifierfile = ENV['SALT_KITCHEN_VERIFIER'] || '.kitchen/verifier.yml' %>
|
||||
|
||||
<% if File.exists?(driverfile) %>
|
||||
<%= ERB.new(File.read(driverfile)).result %>
|
||||
|
@ -52,7 +53,6 @@ provisioner:
|
|||
base:
|
||||
"*":
|
||||
- git.salt
|
||||
- kitchen
|
||||
<% if File.exists?(platformsfile) %>
|
||||
<%= ERB.new(File.read(platformsfile)).result %>
|
||||
<% else %>
|
||||
|
@ -156,6 +156,8 @@ platforms:
|
|||
<% end %>
|
||||
suites:
|
||||
- name: py2
|
||||
verifier:
|
||||
python_bin: python2.7
|
||||
provisioner:
|
||||
pillars:
|
||||
top.sls:
|
||||
|
@ -170,6 +172,8 @@ suites:
|
|||
excludes:
|
||||
- centos-6
|
||||
- ubuntu-14.04
|
||||
verifier:
|
||||
python_bin: python3
|
||||
provisioner:
|
||||
pillars:
|
||||
top.sls:
|
||||
|
@ -181,12 +185,23 @@ suites:
|
|||
clone_repo: false
|
||||
py3: true
|
||||
salttesting_namespec: salttesting==2017.6.1
|
||||
verifier:
|
||||
name: shell
|
||||
remote_exec: true
|
||||
live_stream: {}
|
||||
<% if ENV['TESTOPTS'].nil? %>
|
||||
command: 'sudo -E $(kitchen) /tmp/kitchen/testing/tests/runtests.py -v --run-destructive --sysinfo --transport=zeromq --output-columns=80 --ssh --coverage-xml=/tmp/coverage.xml --xml=/tmp/xml-unittests-output'
|
||||
|
||||
<% if File.exists?(verifierfile) %>
|
||||
<%= ERB.new(File.read(verifierfile)).result %>
|
||||
<% else %>
|
||||
command: 'sudo -E $(kitchen) /tmp/kitchen/testing/tests/runtests.py -v --run-destructive --output-columns 80 <%= ENV["TESTOPTS"] %>'
|
||||
verifier:
|
||||
name: runtests
|
||||
sudo: true
|
||||
verbose: true
|
||||
run_destructive: true
|
||||
transport: zeromq
|
||||
types:
|
||||
- ssh
|
||||
xml: /tmp/xml-unittests-output/
|
||||
coverage_xml: /tmp/coverage.xml
|
||||
save:
|
||||
/tmp/xml-unittests-output: artifacts/
|
||||
/tmp/coverage.xml: artifacts/coverage/coverage.xml
|
||||
/var/log/salt/minion: artifacts/logs/minion
|
||||
/tmp/salt-runtests.log: artifacts/logs/salt-runtests.log
|
||||
<% end %>
|
||||
|
|
2
Gemfile
2
Gemfile
|
@ -2,7 +2,7 @@
|
|||
|
||||
source 'https://rubygems.org'
|
||||
|
||||
gem 'test-kitchen'
|
||||
gem 'test-kitchen', :git => 'https://github.com/test-kitchen/test-kitchen.git'
|
||||
gem 'kitchen-salt', :git => 'https://github.com/saltstack/kitchen-salt.git'
|
||||
gem 'kitchen-sync'
|
||||
gem 'git'
|
||||
|
|
|
@ -12,6 +12,7 @@ idna==2.5
|
|||
ioloop==0.1a0
|
||||
ipaddress==1.0.18
|
||||
Jinja2==2.9.6
|
||||
libnacl==1.6.1
|
||||
Mako==1.0.6
|
||||
MarkupSafe==1.0
|
||||
msgpack-python==0.4.8
|
||||
|
|
|
@ -48,7 +48,6 @@ from salt.ext.six.moves import range # pylint: disable=import-error,redefined-b
|
|||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
LIBPAM = CDLL(find_library('pam'))
|
||||
LIBC = CDLL(find_library('c'))
|
||||
|
||||
CALLOC = LIBC.calloc
|
||||
|
@ -121,6 +120,7 @@ class PamConv(Structure):
|
|||
|
||||
|
||||
try:
|
||||
LIBPAM = CDLL(find_library('pam'))
|
||||
PAM_START = LIBPAM.pam_start
|
||||
PAM_START.restype = c_int
|
||||
PAM_START.argtypes = [c_char_p, c_char_p, POINTER(PamConv),
|
||||
|
|
|
@ -2146,6 +2146,9 @@ def script(source,
|
|||
if not salt.utils.is_windows():
|
||||
os.chmod(path, 320)
|
||||
os.chown(path, __salt__['file.user_to_uid'](runas), -1)
|
||||
|
||||
path = _cmd_quote(path)
|
||||
|
||||
ret = _run(path + ' ' + str(args) if args else path,
|
||||
cwd=cwd,
|
||||
stdin=stdin,
|
||||
|
|
|
@ -36,7 +36,7 @@ def _table_attrs(table):
|
|||
'''
|
||||
Helper function to find valid table attributes
|
||||
'''
|
||||
cmd = ['osqueryi'] + ['--json'] + ['pragma table_info{0}'.format(table)]
|
||||
cmd = ['osqueryi'] + ['--json'] + ['pragma table_info({0})'.format(table)]
|
||||
res = __salt__['cmd.run_all'](cmd)
|
||||
if res['retcode'] == 0:
|
||||
attrs = []
|
||||
|
@ -62,6 +62,7 @@ def _osquery(sql, format='json'):
|
|||
else:
|
||||
ret['result'] = False
|
||||
ret['error'] = res['stderr']
|
||||
log.debug('== {} =='.format(ret))
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -119,9 +120,13 @@ def version():
|
|||
|
||||
salt '*' osquery.version
|
||||
'''
|
||||
_false_return = {'result': False,
|
||||
'comment': 'OSQuery version unavailable.'}
|
||||
res = _osquery_cmd(table='osquery_info', attrs=['version'])
|
||||
if res and isinstance(res, list):
|
||||
return res[0].get('version', '') or False
|
||||
if 'result' in res and res['result']:
|
||||
if 'data' in res and isinstance(res['data'], list):
|
||||
return res['data'][0].get('version', '') or _false_return
|
||||
return _false_return
|
||||
|
||||
|
||||
def rpm_packages(attrs=None, where=None):
|
||||
|
@ -136,7 +141,8 @@ def rpm_packages(attrs=None, where=None):
|
|||
'''
|
||||
if __grains__['os_family'] == 'RedHat':
|
||||
return _osquery_cmd(table='rpm_packages', attrs=attrs, where=where)
|
||||
return {'result': False, 'comment': 'Only available on Red Hat based systems.'}
|
||||
return {'result': False,
|
||||
'comment': 'Only available on Red Hat based systems.'}
|
||||
|
||||
|
||||
def kernel_integrity(attrs=None, where=None):
|
||||
|
|
|
@ -82,6 +82,7 @@ import re
|
|||
import shutil
|
||||
import logging
|
||||
import sys
|
||||
from pkg_resources import parse_version
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -1216,3 +1217,79 @@ def upgrade(bin_env=None,
|
|||
ret['changes'] = salt.utils.compare_dicts(old, new)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def list_all_versions(pkg,
|
||||
bin_env=None,
|
||||
include_alpha=False,
|
||||
include_beta=False,
|
||||
include_rc=False,
|
||||
user=None,
|
||||
cwd=None):
|
||||
'''
|
||||
.. versionadded:: 2017.7.3
|
||||
|
||||
List all available versions of a pip package
|
||||
|
||||
pkg
|
||||
The package to check
|
||||
|
||||
bin_env
|
||||
Path to pip bin or path to virtualenv. If doing a system install,
|
||||
and want to use a specific pip bin (pip-2.7, pip-2.6, etc..) just
|
||||
specify the pip bin you want.
|
||||
|
||||
include_alpha
|
||||
Include alpha versions in the list
|
||||
|
||||
include_beta
|
||||
Include beta versions in the list
|
||||
|
||||
include_rc
|
||||
Include release candidates versions in the list
|
||||
|
||||
user
|
||||
The user under which to run pip
|
||||
|
||||
cwd
|
||||
Current working directory to run pip from
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pip.list_all_versions <package name>
|
||||
'''
|
||||
pip_bin = _get_pip_bin(bin_env)
|
||||
|
||||
cmd = [pip_bin, 'install', '{0}==versions'.format(pkg)]
|
||||
|
||||
cmd_kwargs = dict(cwd=cwd, runas=user, output_loglevel='quiet', redirect_stderr=True)
|
||||
if bin_env and os.path.isdir(bin_env):
|
||||
cmd_kwargs['env'] = {'VIRTUAL_ENV': bin_env}
|
||||
|
||||
result = __salt__['cmd.run_all'](cmd, **cmd_kwargs)
|
||||
|
||||
filtered = []
|
||||
if not include_alpha:
|
||||
filtered.append('a')
|
||||
if not include_beta:
|
||||
filtered.append('b')
|
||||
if not include_rc:
|
||||
filtered.append('rc')
|
||||
if filtered:
|
||||
excludes = re.compile(r'^((?!{0}).)*$'.format('|'.join(filtered)))
|
||||
else:
|
||||
excludes = re.compile(r'')
|
||||
|
||||
versions = []
|
||||
for line in result['stdout'].splitlines():
|
||||
match = re.search(r'\s*Could not find a version.* \(from versions: (.*)\)', line)
|
||||
if match:
|
||||
versions = [v for v in match.group(1).split(', ') if v and excludes.match(v)]
|
||||
versions.sort(key=parse_version)
|
||||
break
|
||||
if not versions:
|
||||
return None
|
||||
|
||||
return versions
|
||||
|
|
|
@ -234,13 +234,13 @@ def list_keys(hive, key=None, use_32bit_registry=False):
|
|||
for i in range(win32api.RegQueryInfoKey(handle)[0]):
|
||||
subkey = win32api.RegEnumKey(handle, i)
|
||||
if PY2:
|
||||
subkeys.append(_to_unicode(subkey))
|
||||
subkeys.append(_to_mbcs(subkey))
|
||||
else:
|
||||
subkeys.append(subkey)
|
||||
|
||||
handle.Close()
|
||||
|
||||
except WindowsError as exc: # pylint: disable=E0602
|
||||
except pywintypes.error as exc: # pylint: disable=E0602
|
||||
log.debug(exc)
|
||||
log.debug('Cannot find key: {0}\\{1}'.format(hive, key))
|
||||
return False, 'Cannot find key: {0}\\{1}'.format(hive, key)
|
||||
|
@ -300,7 +300,7 @@ def list_values(hive, key=None, use_32bit_registry=False, include_default=True):
|
|||
'vtype': registry.vtype_reverse[vtype],
|
||||
'success': True}
|
||||
values.append(value)
|
||||
except WindowsError as exc: # pylint: disable=E0602
|
||||
except pywintypes.error as exc: # pylint: disable=E0602
|
||||
log.debug(exc)
|
||||
log.debug(r'Cannot find key: {0}\{1}'.format(hive, key))
|
||||
return False, r'Cannot find key: {0}\{1}'.format(hive, key)
|
||||
|
|
|
@ -97,7 +97,10 @@ def _find_utmp():
|
|||
result[os.stat(utmp).st_mtime] = utmp
|
||||
except Exception:
|
||||
pass
|
||||
return result[sorted(result).pop()]
|
||||
if result > 0:
|
||||
return result[sorted(result).pop()]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def _default_runlevel():
|
||||
|
@ -148,12 +151,14 @@ def _runlevel():
|
|||
'''
|
||||
if 'upstart._runlevel' in __context__:
|
||||
return __context__['upstart._runlevel']
|
||||
out = __salt__['cmd.run'](['runlevel', '{0}'.format(_find_utmp())], python_shell=False)
|
||||
try:
|
||||
ret = out.split()[1]
|
||||
except IndexError:
|
||||
# The runlevel is unknown, return the default
|
||||
ret = _default_runlevel()
|
||||
ret = _default_runlevel()
|
||||
utmp = _find_utmp()
|
||||
if utmp:
|
||||
out = __salt__['cmd.run'](['runlevel', '{0}'.format(utmp)], python_shell=False)
|
||||
try:
|
||||
ret = out.split()[1]
|
||||
except IndexError:
|
||||
pass
|
||||
__context__['upstart._runlevel'] = ret
|
||||
return ret
|
||||
|
||||
|
|
|
@ -155,9 +155,9 @@ def latest_version(*names, **kwargs):
|
|||
|
||||
# check, whether latest available version
|
||||
# is newer than latest installed version
|
||||
if compare_versions(ver1=str(latest_available),
|
||||
oper='>',
|
||||
ver2=str(latest_installed)):
|
||||
if compare_versions(ver1=six.text_type(latest_available),
|
||||
oper=six.text_type('>'),
|
||||
ver2=six.text_type(latest_installed)):
|
||||
log.debug('Upgrade of {0} from {1} to {2} '
|
||||
'is available'.format(name,
|
||||
latest_installed,
|
||||
|
@ -467,7 +467,7 @@ def _get_reg_software():
|
|||
if d_name not in ignore_list:
|
||||
# some MS Office updates don't register a product name which means
|
||||
# their information is useless
|
||||
reg_software.update({d_name: str(d_vers)})
|
||||
reg_software.update({d_name: six.text_type(d_vers)})
|
||||
|
||||
for reg_key in __salt__['reg.list_keys'](hive, key):
|
||||
update(hive, key, reg_key, False)
|
||||
|
@ -876,7 +876,7 @@ def _get_source_sum(source_hash, file_path, saltenv):
|
|||
schemes = ('salt', 'http', 'https', 'ftp', 'swift', 's3', 'file')
|
||||
invalid_hash_msg = ("Source hash '{0}' format is invalid. It must be in "
|
||||
"the format <hash type>=<hash>").format(source_hash)
|
||||
source_hash = str(source_hash)
|
||||
source_hash = six.text_type(source_hash)
|
||||
source_hash_scheme = _urlparse(source_hash).scheme
|
||||
|
||||
if source_hash_scheme in schemes:
|
||||
|
@ -1128,7 +1128,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
# as a float it must be converted to a string in order for
|
||||
# string matching to work.
|
||||
if not isinstance(version_num, six.string_types) and version_num is not None:
|
||||
version_num = str(version_num)
|
||||
version_num = six.text_type(version_num)
|
||||
|
||||
if not version_num:
|
||||
# following can be version number or latest
|
||||
|
@ -1347,6 +1347,10 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
__salt__['system.set_reboot_required_witnessed']()
|
||||
ret[pkg_name] = {'install status': 'success, reboot required'}
|
||||
changed.append(pkg_name)
|
||||
elif result['retcode'] == 1641:
|
||||
# 1641 is ERROR_SUCCESS_REBOOT_INITIATED
|
||||
ret[pkg_name] = {'install status': 'success, reboot initiated'}
|
||||
changed.append(pkg_name)
|
||||
else:
|
||||
log.error('Failed to install {0}'.format(pkg_name))
|
||||
log.error('retcode {0}'.format(result['retcode']))
|
||||
|
@ -1485,7 +1489,7 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
|
|||
# as a float it must be converted to a string in order for
|
||||
# string matching to work.
|
||||
if not isinstance(version_num, six.string_types) and version_num is not None:
|
||||
version_num = str(version_num)
|
||||
version_num = six.text_type(version_num)
|
||||
if version_num not in pkginfo and 'latest' in pkginfo:
|
||||
version_num = 'latest'
|
||||
elif 'latest' in pkginfo:
|
||||
|
@ -1639,6 +1643,18 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
|
|||
if not result['retcode']:
|
||||
ret[pkgname] = {'uninstall status': 'success'}
|
||||
changed.append(pkgname)
|
||||
elif result['retcode'] == 3010:
|
||||
# 3010 is ERROR_SUCCESS_REBOOT_REQUIRED
|
||||
report_reboot_exit_codes = kwargs.pop(
|
||||
'report_reboot_exit_codes', True)
|
||||
if report_reboot_exit_codes:
|
||||
__salt__['system.set_reboot_required_witnessed']()
|
||||
ret[pkgname] = {'uninstall status': 'success, reboot required'}
|
||||
changed.append(pkgname)
|
||||
elif result['retcode'] == 1641:
|
||||
# 1641 is ERROR_SUCCESS_REBOOT_INITIATED
|
||||
ret[pkgname] = {'uninstall status': 'success, reboot initiated'}
|
||||
changed.append(pkgname)
|
||||
else:
|
||||
log.error('Failed to remove %s', pkgname)
|
||||
log.error('retcode %s', result['retcode'])
|
||||
|
@ -1826,9 +1842,9 @@ def compare_versions(ver1='', oper='==', ver2=''):
|
|||
|
||||
# Support version being the special meaning of 'latest'
|
||||
if ver1 == 'latest':
|
||||
ver1 = str(sys.maxsize)
|
||||
ver1 = six.text_type(sys.maxsize)
|
||||
if ver2 == 'latest':
|
||||
ver2 = str(sys.maxsize)
|
||||
ver2 = six.text_type(sys.maxsize)
|
||||
# Support version being the special meaning of 'Not Found'
|
||||
if ver1 == 'Not Found':
|
||||
ver1 = '0.0.0.0.0'
|
||||
|
|
|
@ -274,7 +274,7 @@ def _get_extra_options(**kwargs):
|
|||
ret = []
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
for key, value in six.iteritems(kwargs):
|
||||
if isinstance(key, six.string_types):
|
||||
if isinstance(value, six.string_types):
|
||||
ret.append('--{0}=\'{1}\''.format(key, value))
|
||||
elif value is True:
|
||||
ret.append('--{0}'.format(key))
|
||||
|
|
|
@ -56,6 +56,9 @@ def generate_token(minion_id, signature, impersonated_by_master=False):
|
|||
'metadata': audit_data
|
||||
}
|
||||
|
||||
if payload['policies'] == []:
|
||||
return {'error': 'No policies matched minion'}
|
||||
|
||||
log.trace('Sending token creation request to Vault')
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ requisite to a pkg.installed state for the package which provides pip
|
|||
from __future__ import absolute_import
|
||||
import re
|
||||
import logging
|
||||
from pkg_resources import parse_version
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -94,7 +95,7 @@ def _fulfills_version_spec(version, version_spec):
|
|||
for oper, spec in version_spec:
|
||||
if oper is None:
|
||||
continue
|
||||
if not salt.utils.compare_versions(ver1=version, oper=oper, ver2=spec):
|
||||
if not salt.utils.compare_versions(ver1=version, oper=oper, ver2=spec, cmp_func=_pep440_version_cmp):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -212,10 +213,70 @@ def _check_if_installed(prefix, state_pkg_name, version_spec,
|
|||
ret['comment'] = ('Python package {0} was already '
|
||||
'installed'.format(state_pkg_name))
|
||||
return ret
|
||||
if force_reinstall is False and upgrade:
|
||||
# Check desired version (if any) against currently-installed
|
||||
include_alpha = False
|
||||
include_beta = False
|
||||
include_rc = False
|
||||
if any(version_spec):
|
||||
for spec in version_spec:
|
||||
if 'a' in spec[1]:
|
||||
include_alpha = True
|
||||
if 'b' in spec[1]:
|
||||
include_beta = True
|
||||
if 'rc' in spec[1]:
|
||||
include_rc = True
|
||||
available_versions = __salt__['pip.list_all_versions'](
|
||||
prefix_realname, bin_env=bin_env, include_alpha=include_alpha,
|
||||
include_beta=include_beta, include_rc=include_rc, user=user,
|
||||
cwd=cwd)
|
||||
desired_version = ''
|
||||
if any(version_spec):
|
||||
for version in reversed(available_versions):
|
||||
if _fulfills_version_spec(version, version_spec):
|
||||
desired_version = version
|
||||
break
|
||||
else:
|
||||
desired_version = available_versions[-1]
|
||||
if not desired_version:
|
||||
ret['result'] = True
|
||||
ret['comment'] = ('Python package {0} was already '
|
||||
'installed and\nthe available upgrade '
|
||||
'doesn\'t fulfills the version '
|
||||
'requirements'.format(prefix_realname))
|
||||
return ret
|
||||
if _pep440_version_cmp(pip_list[prefix_realname], desired_version) == 0:
|
||||
ret['result'] = True
|
||||
ret['comment'] = ('Python package {0} was already '
|
||||
'installed'.format(state_pkg_name))
|
||||
return ret
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False):
|
||||
'''
|
||||
Compares two version strings using pkg_resources.parse_version.
|
||||
Return -1 if version1 < version2, 0 if version1 ==version2,
|
||||
and 1 if version1 > version2. Return None if there was a problem
|
||||
making the comparison.
|
||||
'''
|
||||
normalize = lambda x: str(x).split('!', 1)[-1] if ignore_epoch else str(x)
|
||||
pkg1 = normalize(pkg1)
|
||||
pkg2 = normalize(pkg2)
|
||||
|
||||
try:
|
||||
if parse_version(pkg1) < parse_version(pkg2):
|
||||
return -1
|
||||
if parse_version(pkg1) == parse_version(pkg2):
|
||||
return 0
|
||||
if parse_version(pkg1) > parse_version(pkg2):
|
||||
return 1
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
return None
|
||||
|
||||
|
||||
def installed(name,
|
||||
pkgs=None,
|
||||
pip_bin=None,
|
||||
|
|
|
@ -92,7 +92,7 @@ from salt.exceptions import (
|
|||
from salt.modules.pkg_resource import _repack_pkgs
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext import six
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
|
||||
|
@ -2845,8 +2845,11 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
if isinstance(refresh, bool):
|
||||
try:
|
||||
packages = __salt__['pkg.list_upgrades'](refresh=refresh, **kwargs)
|
||||
expected = {pkgname: {'new': pkgver, 'old': __salt__['pkg.version'](pkgname)}
|
||||
for pkgname, pkgver in six.iteritems(packages)}
|
||||
if isinstance(pkgs, list):
|
||||
packages = [pkg for pkg in packages if pkg in pkgs]
|
||||
expected = {pkgname: pkgver for pkgname, pkgver in six.iteritems(expected) if pkgname in pkgs}
|
||||
except Exception as exc:
|
||||
ret['comment'] = str(exc)
|
||||
return ret
|
||||
|
@ -2860,6 +2863,7 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
return ret
|
||||
elif __opts__['test']:
|
||||
ret['comment'] = 'System update will be performed'
|
||||
ret['changes'] = expected
|
||||
ret['result'] = None
|
||||
return ret
|
||||
|
||||
|
@ -2876,8 +2880,17 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
'packages: {0}'.format(exc))
|
||||
return ret
|
||||
|
||||
ret['comment'] = 'Upgrade ran successfully'
|
||||
ret['result'] = True
|
||||
# If a package list was provided, ensure those packages were updated
|
||||
missing = []
|
||||
if isinstance(pkgs, list):
|
||||
missing = [pkg for pkg in six.iterkeys(expected) if pkg not in ret['changes']]
|
||||
|
||||
if missing:
|
||||
ret['comment'] = 'The following package(s) failed to update: {0}'.format(', '.join(missing))
|
||||
ret['result'] = False
|
||||
else:
|
||||
ret['comment'] = 'Upgrade ran successfully'
|
||||
ret['result'] = True
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ def process(name):
|
|||
'data': {}} # Data field for monitoring state
|
||||
|
||||
data = __salt__['status.pid'](name)
|
||||
if name not in data:
|
||||
if not data:
|
||||
ret['result'] = False
|
||||
ret['comment'] += 'Process signature "{0}" not found '.format(
|
||||
name
|
||||
|
|
|
@ -429,6 +429,8 @@ def query(url,
|
|||
'charset' in res_params and \
|
||||
not isinstance(result_text, six.text_type):
|
||||
result_text = result_text.decode(res_params['charset'])
|
||||
if six.PY3 and isinstance(result_text, bytes):
|
||||
result_text = result.body.decode('utf-8')
|
||||
ret['body'] = result_text
|
||||
else:
|
||||
# Tornado
|
||||
|
|
|
@ -148,15 +148,25 @@ def nodegroup_comp(nodegroup, nodegroups, skip=None, first_call=True):
|
|||
# No compound operators found in nodegroup definition. Check for
|
||||
# group type specifiers
|
||||
group_type_re = re.compile('^[A-Z]@')
|
||||
regex_chars = ['(', '[', '{', '\\', '?''}])']
|
||||
if not [x for x in ret if '*' in x or group_type_re.match(x)]:
|
||||
# No group type specifiers and no wildcards. Treat this as a
|
||||
# list of nodenames.
|
||||
joined = 'L@' + ','.join(ret)
|
||||
log.debug(
|
||||
'Nodegroup \'%s\' (%s) detected as list of nodenames. '
|
||||
'Assuming compound matching syntax of \'%s\'',
|
||||
nodegroup, ret, joined
|
||||
)
|
||||
# No group type specifiers and no wildcards.
|
||||
# Treat this as an expression.
|
||||
if [x for x in ret if x in [x for y in regex_chars if y in x]]:
|
||||
joined = 'E@' + ','.join(ret)
|
||||
log.debug(
|
||||
'Nodegroup \'%s\' (%s) detected as an expression. '
|
||||
'Assuming compound matching syntax of \'%s\'',
|
||||
nodegroup, ret, joined
|
||||
)
|
||||
else:
|
||||
# Treat this as a list of nodenames.
|
||||
joined = 'L@' + ','.join(ret)
|
||||
log.debug(
|
||||
'Nodegroup \'%s\' (%s) detected as list of nodenames. '
|
||||
'Assuming compound matching syntax of \'%s\'',
|
||||
nodegroup, ret, joined
|
||||
)
|
||||
# Return data must be a list of compound matching components
|
||||
# to be fed into compound matcher. Enclose return data in list.
|
||||
return [joined]
|
||||
|
|
|
@ -26,7 +26,7 @@ else:
|
|||
# Psuedo "from psutil import *"
|
||||
_globals = globals()
|
||||
for attr in psutil.__all__:
|
||||
_temp = __import__('psutil', globals(), locals(), [attr], -1)
|
||||
_temp = __import__('psutil', globals(), locals(), [attr], -1 if six.PY2 else 0)
|
||||
try:
|
||||
_globals[attr] = getattr(_temp, attr)
|
||||
except AttributeError:
|
||||
|
|
|
@ -54,4 +54,4 @@ AcceptEnv LANG LC_*
|
|||
|
||||
Subsystem sftp /usr/lib/openssh/sftp-server
|
||||
|
||||
#UsePAM yes
|
||||
UsePAM yes
|
||||
|
|
|
@ -67,6 +67,7 @@ def get_invalid_docs():
|
|||
'nspawn.stop',
|
||||
'pkg.expand_repo_def',
|
||||
'pip.iteritems',
|
||||
'pip.parse_version',
|
||||
'runtests_decorators.depends',
|
||||
'runtests_decorators.depends_will_fallback',
|
||||
'runtests_decorators.missing_depends',
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing libs
|
||||
|
@ -13,6 +14,7 @@ from tests.support.helpers import (
|
|||
skip_if_binaries_missing,
|
||||
skip_if_not_root
|
||||
)
|
||||
from tests.support.paths import TMP
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -20,7 +22,6 @@ import salt.utils
|
|||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
|
||||
AVAILABLE_PYTHON_EXECUTABLE = salt.utils.which_bin([
|
||||
'python',
|
||||
'python2',
|
||||
|
@ -139,6 +140,28 @@ class CMDModuleTest(ModuleCase):
|
|||
ret = self.run_function('cmd.script_retcode', [script])
|
||||
self.assertEqual(ret, 0)
|
||||
|
||||
def test_script_cwd(self):
|
||||
'''
|
||||
cmd.script with cwd
|
||||
'''
|
||||
tmp_cwd = tempfile.mkdtemp(dir=TMP)
|
||||
args = 'saltines crackers biscuits=yes'
|
||||
script = 'salt://script.py'
|
||||
ret = self.run_function('cmd.script', [script, args], cwd=tmp_cwd)
|
||||
self.assertEqual(ret['stdout'], args)
|
||||
|
||||
def test_script_cwd_with_space(self):
|
||||
'''
|
||||
cmd.script with cwd
|
||||
'''
|
||||
tmp_cwd = "{0}{1}test 2".format(tempfile.mkdtemp(dir=TMP), os.path.sep)
|
||||
os.mkdir(tmp_cwd)
|
||||
|
||||
args = 'saltines crackers biscuits=yes'
|
||||
script = 'salt://script.py'
|
||||
ret = self.run_function('cmd.script', [script, args], cwd=tmp_cwd)
|
||||
self.assertEqual(ret['stdout'], args)
|
||||
|
||||
@destructiveTest
|
||||
def test_tty(self):
|
||||
'''
|
||||
|
|
217
tests/unit/modules/test_osquery.py
Normal file
217
tests/unit/modules/test_osquery.py
Normal file
|
@ -0,0 +1,217 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Gareth J. Greenaway <gareth@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.osquery as osquery
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class OSQueryTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.modules.iptables
|
||||
'''
|
||||
def setup_loader_modules(self):
|
||||
return {osquery: {}}
|
||||
|
||||
def test_version(self):
|
||||
'''
|
||||
Test the version returned from OSQuery
|
||||
'''
|
||||
_table_attrs_results = [u'pid',
|
||||
u'uuid',
|
||||
u'instance_id',
|
||||
u'version',
|
||||
u'config_hash',
|
||||
u'config_valid',
|
||||
u'extensions',
|
||||
u'build_platform',
|
||||
u'build_distro',
|
||||
u'start_time',
|
||||
u'watcher']
|
||||
|
||||
_os_query_results = {'data': [{u'version': u'2.6.1'}], 'result': True}
|
||||
|
||||
with patch.object(osquery, '_table_attrs',
|
||||
MagicMock(return_value=_table_attrs_results)):
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
self.assertEqual(osquery.version(), u'2.6.1')
|
||||
|
||||
def test_deb_packages(self):
|
||||
'''
|
||||
Test the results returned from the deb_packages function
|
||||
'''
|
||||
_os_query_results = {'data': [
|
||||
{'arch': 'amd64', 'name': 'accountsservice', 'revision': '1',
|
||||
'size': '451', 'source': '', 'version': '0.6.45-1'},
|
||||
{'arch': 'amd64', 'name': 'acetoneiso', 'revision': '2+b2',
|
||||
'size': '1820', 'source': 'acetoneiso (2.4-2)',
|
||||
'version': '2.4-2+b2'},
|
||||
{'arch': 'amd64', 'name': 'acl', 'revision': '3+b1',
|
||||
'size': '200', 'source': 'acl (2.2.52-3)',
|
||||
'version': '2.2.52-3+b1'},
|
||||
{'arch': 'amd64', 'name': 'adb', 'revision': '2', 'size': '189',
|
||||
'source': 'android-platform-system-core',
|
||||
'version': '1: 7.0.0+r33-2'}],
|
||||
'result': True
|
||||
}
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.deb_packages(), _os_query_results)
|
||||
|
||||
def test_deb_packages_with_attrs(self):
|
||||
'''
|
||||
Test the results returned from the deb_packages function
|
||||
with attributes
|
||||
'''
|
||||
_table_attrs_results = [u'name',
|
||||
u'version',
|
||||
u'source',
|
||||
u'size',
|
||||
u'arch',
|
||||
u'revision']
|
||||
|
||||
_os_query_results = {'data': [
|
||||
{'name': 'accountsservice', 'version': '0.6.45-1'},
|
||||
{'name': 'acetoneiso', 'version': '2.4-2+b2'},
|
||||
{'name': 'acl', 'version': '2.2.52-3+b1'},
|
||||
{'name': 'adb', 'version': '1: 7.0.0+r33-2'}],
|
||||
'result': True}
|
||||
|
||||
with patch.object(osquery, '_table_attrs',
|
||||
MagicMock(return_value=_table_attrs_results)):
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.deb_packages(attrs=['name',
|
||||
'version']),
|
||||
_os_query_results)
|
||||
|
||||
def test_kernel_modules(self):
|
||||
'''
|
||||
Test the results returned from the kernel_modules function
|
||||
'''
|
||||
_os_query_results = {'data': [
|
||||
{'address': '0xffffffffc14f2000', 'name': 'nls_utf8',
|
||||
'size': '16384', 'status': 'Live', 'used_by': '-'},
|
||||
{'address': '0xffffffffc1599000', 'name': 'udf',
|
||||
'size': '90112', 'status': 'Live', 'used_by': '-'},
|
||||
{'address': '0xffffffffc14b5000', 'name': 'crc_itu_t',
|
||||
'size': '16384', 'status': 'Live', 'used_by': 'udf'}],
|
||||
'result': True
|
||||
}
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.kernel_modules(),
|
||||
_os_query_results)
|
||||
|
||||
def test_kernel_modules_with_attrs(self):
|
||||
'''
|
||||
Test the results returned from the kernel_modules function
|
||||
with attributes
|
||||
'''
|
||||
_table_attrs_results = [u'address',
|
||||
u'name',
|
||||
u'size',
|
||||
u'status',
|
||||
u'used_by']
|
||||
|
||||
_os_query_results = {'data': [
|
||||
{'name': 'nls_utf8', 'status': 'Live'},
|
||||
{'name': 'udf', 'status': 'Live'},
|
||||
{'name': 'crc_itu_t', 'status': 'Live'}],
|
||||
'result': True
|
||||
}
|
||||
with patch.object(osquery, '_table_attrs',
|
||||
MagicMock(return_value=_table_attrs_results)):
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.kernel_modules(attrs=['name',
|
||||
'status']),
|
||||
_os_query_results)
|
||||
|
||||
def test_osquery_info(self):
|
||||
'''
|
||||
Test the results returned from the kernel_modules function
|
||||
with attributes
|
||||
'''
|
||||
_table_attrs_results = [u'pid',
|
||||
u'uuid',
|
||||
u'instance_id',
|
||||
u'version',
|
||||
u'config_hash',
|
||||
u'config_valid',
|
||||
u'extensions',
|
||||
u'build_platform',
|
||||
u'build_distro',
|
||||
u'start_time',
|
||||
u'watcher']
|
||||
|
||||
_os_query_results = {'data': [
|
||||
{u'build_platform': u'ubuntu', u'start_time': u'1514484833',
|
||||
u'uuid': u'D31FD400-7277-11E3-ABA6-B8AEED7E173B',
|
||||
u'build_distro': u'xenial',
|
||||
u'pid': u'24288',
|
||||
u'watcher': u'-1',
|
||||
u'instance_id': u'dff196b0-5c91-4105-962b-28660d7aa282',
|
||||
u'version': u'2.6.1',
|
||||
u'extensions': u'inactive',
|
||||
u'config_valid': u'0',
|
||||
u'config_hash': u''}],
|
||||
'result': True}
|
||||
|
||||
with patch.object(osquery, '_table_attrs',
|
||||
MagicMock(return_value=_table_attrs_results)):
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.osquery_info(),
|
||||
_os_query_results)
|
||||
|
||||
def test_osquery_info_with_attrs(self):
|
||||
'''
|
||||
Test the results returned from the kernel_modules function
|
||||
with attributes
|
||||
'''
|
||||
_table_attrs_results = [u'pid',
|
||||
u'uuid',
|
||||
u'instance_id',
|
||||
u'version',
|
||||
u'config_hash',
|
||||
u'config_valid',
|
||||
u'extensions',
|
||||
u'build_platform',
|
||||
u'build_distro',
|
||||
u'start_time',
|
||||
u'watcher']
|
||||
|
||||
_os_query_results = {'data': [
|
||||
{u'build_platform': u'ubuntu', u'start_time': u'1514484833'}],
|
||||
'result': True}
|
||||
|
||||
with patch.object(osquery, '_table_attrs',
|
||||
MagicMock(return_value=_table_attrs_results)):
|
||||
with patch.object(osquery, '_osquery',
|
||||
MagicMock(return_value=_os_query_results)):
|
||||
with patch.dict(osquery.__grains__, {'os_family': 'Debian'}):
|
||||
self.assertEqual(osquery.osquery_info(attrs=['build_platform',
|
||||
'start_time']),
|
||||
_os_query_results)
|
178
tests/unit/states/test_pkg.py
Normal file
178
tests/unit/states/test_pkg.py
Normal file
|
@ -0,0 +1,178 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.mock import (
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
MagicMock,
|
||||
patch)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.ext import six
|
||||
import salt.states.pkg as pkg
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.states.pkg
|
||||
'''
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
pkg: {
|
||||
'__grains__': {
|
||||
'os': 'CentOS'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_uptodate_with_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value=self.pkgs)
|
||||
version = MagicMock(side_effect=lambda pkgname: self.pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], self.pkgs)
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], self.pkgs)
|
||||
|
||||
def test_uptodate_with_pkgs_with_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated changes
|
||||
'''
|
||||
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value=self.pkgs)
|
||||
version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
||||
|
||||
def test_uptodate_no_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with no changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={})
|
||||
upgrade = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade}):
|
||||
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
def test_uptodate_with_pkgs_no_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with no changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={})
|
||||
upgrade = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
def test_uptodate_with_failed_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated failed changes
|
||||
'''
|
||||
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value={})
|
||||
version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertFalse(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
|
@ -127,6 +127,16 @@ class NodegroupCompTest(TestCase):
|
|||
expected_ret = ['L@foo.domain.com,bar.domain.com,baz.domain.com', 'or', 'bl*.domain.com']
|
||||
self.assertListEqual(ret, expected_ret)
|
||||
|
||||
def test_simple_expression_nodegroup(self):
|
||||
'''
|
||||
Smoke test a nodegroup with a simple expression. No recursion.
|
||||
'''
|
||||
simple_nodegroup = {'group1': '[foo,bar,baz].domain.com'}
|
||||
|
||||
ret = salt.utils.minions.nodegroup_comp('group1', simple_nodegroup)
|
||||
expected_ret = ['E@[foo,bar,baz].domain.com']
|
||||
self.assertListEqual(ret, expected_ret)
|
||||
|
||||
def test_simple_recurse(self):
|
||||
'''
|
||||
Test a case where one nodegroup contains a second nodegroup
|
||||
|
|
Loading…
Add table
Reference in a new issue