Merge branch '2017.7' into fix-add-saltenv-pillarenv-to-pillar-item

This commit is contained in:
Daniel Ferenci 2018-03-21 08:45:18 +01:00 committed by GitHub
commit b7d39caa86
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 10426 additions and 656 deletions

View file

@ -31,7 +31,7 @@ provisioner:
salt_version: latest
salt_bootstrap_url: https://bootstrap.saltstack.com
salt_bootstrap_options: -X -p rsync stable <%= version %>
log_level: info
log_level: debug
sudo: true
require_chef: false
retry_on_exit_code:
@ -213,7 +213,6 @@ suites:
verifier:
name: runtests
sudo: true
verbose: true
run_destructive: true
transport: zeromq
types:

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-API" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-API" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-api \- salt-api Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CALL" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-CALL" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-call \- salt-call Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CLOUD" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-CLOUD" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-cloud \- Salt Cloud Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CP" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-CP" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-cp \- salt-cp Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-KEY" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-KEY" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-key \- salt-key Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MASTER" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-MASTER" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-master \- salt-master Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MINION" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-MINION" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-minion \- salt-minion Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-PROXY" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-PROXY" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-proxy \- salt-proxy Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-RUN" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-RUN" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-run \- salt-run Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SSH" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-SSH" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-ssh \- salt-ssh Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SYNDIC" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-SYNDIC" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-syndic \- salt-syndic Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-UNITY" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT-UNITY" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt-unity \- salt-unity Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SALT" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
salt \- salt
.

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SPM" "1" "Jan 24, 2018" "2017.7.3" "Salt"
.TH "SPM" "1" "Mar 13, 2018" "2017.7.5" "Salt"
.SH NAME
spm \- Salt Package Manager Command
.

File diff suppressed because it is too large Load diff

View file

@ -587,7 +587,7 @@ FunctionEnd
# Push "this is some string"
# Push "some"
# Call StrStr
# Pop $0 ; "some string"
# Pop $0 # "some string"
#------------------------------------------------------------------------------
!macro StrStr un
Function ${un}StrStr
@ -693,7 +693,7 @@ Function AddToPath
# Make sure the new length isn't over the NSIS_MAX_STRLEN
IntCmp $2 ${NSIS_MAX_STRLEN} +4 +4 0
DetailPrint "AddToPath: new length $2 > ${NSIS_MAX_STRLEN}"
DetailPrint "AddToPath Failed: new length $2 > ${NSIS_MAX_STRLEN}"
MessageBox MB_OK \
"You may add C:\salt to the %PATH% for convenience when issuing local salt commands from the command line." \
/SD IDOK

View file

@ -2275,6 +2275,7 @@ class Minion(MinionBase):
self.opts,
self.functions,
self.returners,
utils=self.utils,
cleanup=[master_event(type='alive')])
try:

View file

@ -420,6 +420,9 @@ def build(runas,
# use default /var/cache/pbuilder/result
results_dir = '/var/cache/pbuilder/result'
## ensure clean
__salt__['cmd.run']('rm -fR {0}'.format(results_dir))
# dscs should only contain salt orig and debian tarballs and dsc file
for dsc in dscs:
afile = os.path.basename(dsc)
@ -430,10 +433,10 @@ def build(runas,
try:
__salt__['cmd.run']('chown {0} -R {1}'.format(runas, dbase))
cmd = 'pbuilder --update --override-config'
cmd = 'pbuilder update --override-config'
__salt__['cmd.run'](cmd, runas=runas, python_shell=True)
cmd = 'pbuilder --build {0}'.format(dsc)
cmd = 'pbuilder build --debbuildopts "-sa" {0}'.format(dsc)
__salt__['cmd.run'](cmd, runas=runas, python_shell=True)
# ignore local deps generated package file

View file

@ -157,7 +157,7 @@ def install(pkg=None,
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = ' '.join(cmd)
result = __salt__['cmd.run_all'](cmd, python_shell=True, cwd=dir, runas=runas, env=env)
@ -236,7 +236,7 @@ def uninstall(pkg, dir=None, runas=None, env=None):
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = ['npm', 'uninstall', '"{0}"'.format(pkg)]
if not dir:
@ -295,7 +295,7 @@ def list_(pkg=None, dir=None, runas=None, env=None, depth=None):
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = ['npm', 'list', '--json', '--silent']
@ -358,7 +358,7 @@ def cache_clean(path=None, runas=None, env=None, force=False):
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = ['npm', 'cache', 'clean']
if path:
@ -405,7 +405,7 @@ def cache_list(path=None, runas=None, env=None):
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = ['npm', 'cache', 'ls']
if path:
@ -445,7 +445,7 @@ def cache_path(runas=None, env=None):
if runas:
uid = salt.utils.get_uid(runas)
if uid:
env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''})
env.update({'SUDO_UID': uid, 'SUDO_USER': ''})
cmd = 'npm config get cache'

View file

@ -3430,7 +3430,7 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
this_element_value = b''.join([this_element_value.encode('utf-16-le'),
encoded_null])
elif etree.QName(element).localname == 'multiText':
this_vtype = 'REG_MULTI_SZ'
this_vtype = 'REG_MULTI_SZ' if not check_deleted else 'REG_SZ'
if this_element_value is not None:
this_element_value = '{0}{1}{1}'.format(chr(0).join(this_element_value), chr(0))
elif etree.QName(element).localname == 'list':

View file

@ -2663,20 +2663,18 @@ def mod_repo(repo, basedir=None, **kwargs):
filerepos[repo].update(repo_opts)
content = header
for stanza in six.iterkeys(filerepos):
comments = ''
if 'comments' in six.iterkeys(filerepos[stanza]):
comments = salt.utils.pkg.rpm.combine_comments(
filerepos[stanza]['comments'])
del filerepos[stanza]['comments']
content += '\n[{0}]'.format(stanza)
comments = salt.utils.pkg.rpm.combine_comments(
filerepos[stanza].pop('comments', [])
)
content += '[{0}]\n'.format(stanza)
for line in six.iterkeys(filerepos[stanza]):
content += '\n{0}={1}'.format(
content += '{0}={1}\n'.format(
line,
filerepos[stanza][line]
if not isinstance(filerepos[stanza][line], bool)
else _bool_to_str(filerepos[stanza][line])
)
content += '\n{0}\n'.format(comments)
content += comments + '\n'
with salt.utils.fopen(repofile, 'w') as fileout:
fileout.write(content)
@ -2704,14 +2702,29 @@ def _parse_repo_file(filename):
section_dict.pop('__name__', None)
config[section] = section_dict
# Try to extract leading comments
# Try to extract header comments, as well as comments for each repo. Read
# from the beginning of the file and assume any leading comments are
# header comments. Continue to read each section header and then find the
# comments for each repo.
headers = ''
with salt.utils.fopen(filename, 'r') as rawfile:
for line in rawfile:
if line.strip().startswith('#'):
headers += '{0}\n'.format(line.strip())
else:
break
section = None
with salt.utils.fopen(filename, 'r') as repofile:
for line in repofile:
line = line.strip()
if line.startswith('#'):
if section is None:
headers += line + '\n'
else:
try:
comments = config[section].setdefault('comments', [])
comments.append(line[1:].lstrip())
except KeyError:
log.debug(
'Found comment in %s which does not appear to '
'belong to any repo section: %s', filename, line
)
elif line.startswith('[') and line.endswith(']'):
section = line[1:-1]
return (headers, config)

View file

@ -706,6 +706,123 @@ def list_pkgs(versions_as_list=False, **kwargs):
return ret
def list_repo_pkgs(*args, **kwargs):
'''
.. versionadded:: 2017.7.5,2018.3.1
Returns all available packages. Optionally, package names (and name globs)
can be passed and the results will be filtered to packages matching those
names. This is recommended as it speeds up the function considerably.
This function can be helpful in discovering the version or repo to specify
in a :mod:`pkg.installed <salt.states.pkg.installed>` state.
The return data will be a dictionary mapping package names to a list of
version numbers, ordered from newest to oldest. If ``byrepo`` is set to
``True``, then the return dictionary will contain repository names at the
top level, and each repository will map packages to lists of version
numbers. For example:
.. code-block:: python
# With byrepo=False (default)
{
'bash': ['4.3-83.3.1',
'4.3-82.6'],
'vim': ['7.4.326-12.1']
}
{
'OSS': {
'bash': ['4.3-82.6'],
'vim': ['7.4.326-12.1']
},
'OSS Update': {
'bash': ['4.3-83.3.1']
}
}
fromrepo : None
Only include results from the specified repo(s). Multiple repos can be
specified, comma-separated.
byrepo : False
When ``True``, the return data for each package will be organized by
repository.
CLI Examples:
.. code-block:: bash
salt '*' pkg.list_repo_pkgs
salt '*' pkg.list_repo_pkgs foo bar baz
salt '*' pkg.list_repo_pkgs 'python2-*' byrepo=True
salt '*' pkg.list_repo_pkgs 'python2-*' fromrepo='OSS Updates'
'''
byrepo = kwargs.pop('byrepo', False)
fromrepo = kwargs.pop('fromrepo', '') or ''
ret = {}
targets = [
arg if isinstance(arg, six.string_types) else six.text_type(arg)
for arg in args
]
def _is_match(pkgname):
'''
When package names are passed to a zypper search, they will be matched
anywhere in the package name. This makes sure that only exact or
fnmatch matches are identified.
'''
if not args:
# No package names passed, everyone's a winner!
return True
for target in targets:
if fnmatch.fnmatch(pkgname, target):
return True
return False
for node in __zypper__.xml.call('se', '-s', *targets).getElementsByTagName('solvable'):
pkginfo = dict(node.attributes.items())
try:
if pkginfo['kind'] != 'package':
continue
reponame = pkginfo['repository']
if fromrepo and reponame != fromrepo:
continue
pkgname = pkginfo['name']
pkgversion = pkginfo['edition']
except KeyError:
continue
else:
if _is_match(pkgname):
repo_dict = ret.setdefault(reponame, {})
version_list = repo_dict.setdefault(pkgname, set())
version_list.add(pkgversion)
if byrepo:
for reponame in ret:
# Sort versions newest to oldest
for pkgname in ret[reponame]:
sorted_versions = sorted(
[LooseVersion(x) for x in ret[reponame][pkgname]],
reverse=True
)
ret[reponame][pkgname] = [x.vstring for x in sorted_versions]
return ret
else:
byrepo_ret = {}
for reponame in ret:
for pkgname in ret[reponame]:
byrepo_ret.setdefault(pkgname, []).extend(ret[reponame][pkgname])
for pkgname in byrepo_ret:
sorted_versions = sorted(
[LooseVersion(x) for x in byrepo_ret[pkgname]],
reverse=True
)
byrepo_ret[pkgname] = [x.vstring for x in sorted_versions]
return byrepo_ret
def _get_configured_repos():
'''
Get all the info about repositories from the configurations.
@ -1088,6 +1205,15 @@ def install(name=None,
return {}
version_num = Wildcard(__zypper__)(name, version)
if version_num:
if pkgs is None and sources is None:
# Allow "version" to work for single package target
pkg_params = {name: version_num}
else:
log.warning('"version" parameter will be ignored for multiple '
'package targets')
if pkg_type == 'repository':
targets = []
problems = []

View file

@ -2207,7 +2207,8 @@ class State(object):
if r_state == 'prereq' and not run_dict[tag]['result'] is None:
fun_stats.add('pre')
else:
fun_stats.add('met')
if run_dict[tag].get('__state_ran__', True):
fun_stats.add('met')
if 'unmet' in fun_stats:
status = 'unmet'
@ -2462,6 +2463,7 @@ class State(object):
'duration': duration,
'start_time': start_time,
'comment': 'State was not run because onfail req did not change',
'__state_ran__': False,
'__run_num__': self.__run_num,
'__sls__': low['__sls__']}
self.__run_num += 1
@ -2472,6 +2474,7 @@ class State(object):
'duration': duration,
'start_time': start_time,
'comment': 'State was not run because none of the onchanges reqs changed',
'__state_ran__': False,
'__run_num__': self.__run_num,
'__sls__': low['__sls__']}
self.__run_num += 1

View file

@ -11,7 +11,6 @@ import subprocess
# Import 3rd-party libs
from salt.ext import six
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
log = logging.getLogger(__name__)
@ -112,10 +111,10 @@ def combine_comments(comments):
'''
if not isinstance(comments, list):
comments = [comments]
for idx in range(len(comments)):
if not isinstance(comments[idx], six.string_types):
comments[idx] = str(comments[idx])
comments[idx] = comments[idx].strip()
if not comments[idx].startswith('#'):
comments[idx] = '#' + comments[idx]
return '\n'.join(comments)
ret = []
for comment in comments:
if not isinstance(comment, six.string_types):
comment = str(comment)
# Normalize for any spaces (or lack thereof) after the #
ret.append('# {0}\n'.format(comment.lstrip('#').lstrip()))
return ''.join(ret)

View file

@ -382,7 +382,7 @@ class Schedule(object):
'''
instance = None
def __new__(cls, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None):
def __new__(cls, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None, utils=None):
'''
Only create one instance of Schedule
'''
@ -392,20 +392,21 @@ class Schedule(object):
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
cls.instance = object.__new__(cls)
cls.instance.__singleton_init__(opts, functions, returners, intervals, cleanup, proxy)
cls.instance.__singleton_init__(opts, functions, returners, intervals, cleanup, proxy, utils)
else:
log.debug('Re-using Schedule')
return cls.instance
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None):
def __init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None, utils=None):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None):
def __singleton_init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None, utils=None):
self.opts = opts
self.proxy = proxy
self.functions = functions
self.utils = utils
if isinstance(intervals, dict):
self.intervals = intervals
else:
@ -751,10 +752,11 @@ class Schedule(object):
# This also needed for ZeroMQ transport to reset all functions
# context data that could keep paretns connections. ZeroMQ will
# hang on polling parents connections from the child process.
utils = self.utils or salt.loader.utils(self.opts)
if self.opts['__role'] == 'master':
self.functions = salt.loader.runner(self.opts)
self.functions = salt.loader.runner(self.opts, utils=utils)
else:
self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy)
self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy, utils=utils)
self.returners = salt.loader.returners(self.opts, self.functions, proxy=self.proxy)
ret = {'id': self.opts.get('id', 'master'),
'fun': func,
@ -1393,6 +1395,8 @@ class Schedule(object):
self.functions = {}
returners = self.returners
self.returners = {}
utils = self.utils
self.utils = {}
try:
if multiprocessing_enabled:
thread_cls = SignalHandlingMultiprocessingProcess
@ -1418,6 +1422,7 @@ class Schedule(object):
# Restore our function references.
self.functions = functions
self.returners = returners
self.utils = utils
def clean_proc_dir(opts):

View file

@ -0,0 +1,25 @@
a:
cmd.run:
- name: exit 1
b:
cmd.run:
- name: echo b
- onfail:
- cmd: a
c:
cmd.run:
- name: echo c
- onfail:
- cmd: a
- require:
- cmd: b
d:
cmd.run:
- name: echo d
- onfail:
- cmd: a
- require:
- cmd: c

View file

@ -0,0 +1,25 @@
a:
cmd.run:
- name: exit 0
b:
cmd.run:
- name: echo b
- onfail:
- cmd: a
c:
cmd.run:
- name: echo c
- onfail:
- cmd: a
- require:
- cmd: b
d:
cmd.run:
- name: echo d
- onfail:
- cmd: a
- require:
- cmd: c

View file

@ -272,38 +272,53 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
self.run_function('pkg.refresh_db')
if os_family == 'Suse':
# pkg.latest version returns empty if the latest version is already installed
vim_version_dict = self.run_function('pkg.latest_version', ['vim'])
vim_info = self.run_function('pkg.info_available', ['vim'])['vim']
if vim_version_dict == {}:
# Latest version is installed, get its version and construct
# a version selector so the immediately previous version is selected
vim_version = 'version=<'+vim_info['version']
else:
# Vim was not installed, so pkg.latest_version returns the latest one.
# Construct a version selector so immediately previous version is selected
vim_version = 'version=<'+vim_version_dict
# This test assumes that there are multiple possible versions of a
# package available. That makes it brittle if you pick just one
# target, as changes in the available packages will break the test.
# Therefore, we'll choose from several packages to make sure we get
# one that is suitable for this test.
packages = ('hwinfo', 'avrdude', 'diffoscope', 'vim')
# Only install a new version of vim if vim is up-to-date, otherwise we don't
# need this check. (And the test will fail when we check for the empty dict
# since vim gets upgraded in the install step.)
if 'out-of-date' not in vim_info['status']:
# Install a version of vim that should need upgrading
ret = self.run_function('pkg.install', ['vim', vim_version])
if not isinstance(ret, dict):
if ret.startswith('ERROR'):
self.skipTest('Could not install earlier vim to complete test.')
available = self.run_function('pkg.list_repo_pkgs', packages)
versions = self.run_function('pkg.version', packages)
for package in packages:
try:
new, old = available[package][:2]
except (KeyError, ValueError):
# Package not available, or less than 2 versions
# available. This is not a suitable target.
continue
else:
self.assertNotEqual(ret, {})
target = package
current = versions[target]
break
else:
# None of the packages have more than one version available, so
# we need to find new package(s). pkg.list_repo_pkgs can be
# used to get an overview of the available packages. We should
# try to find packages with few dependencies and small download
# sizes, to keep this test from taking longer than necessary.
self.fail('No suitable package found for this test')
# Run a system upgrade, which should catch the fact that Vim needs upgrading, and upgrade it.
# Make sure we have the 2nd-oldest available version installed
ret = self.run_function('pkg.install', [target], version=old)
if not isinstance(ret, dict):
if ret.startswith('ERROR'):
self.skipTest(
'Could not install older {0} to complete '
'test.'.format(target)
)
# Run a system upgrade, which should catch the fact that the
# targeted package needs upgrading, and upgrade it.
ret = self.run_function(func)
# The changes dictionary should not be empty.
if 'changes' in ret:
self.assertIn('vim', ret['changes'])
self.assertIn(target, ret['changes'])
else:
self.assertIn('vim', ret)
self.assertIn(target, ret)
else:
ret = self.run_function('pkg.list_upgrades')
if ret == '' or ret == {}:

View file

@ -1095,6 +1095,56 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
test_data = state_run['cmd_|-test_non_failing_state_|-echo "Should not run"_|-run']
self.assertIn('duration', test_data)
def test_multiple_onfail_requisite_with_required(self):
'''
test to ensure multiple states are run
when specified as onfails for a single state.
This is a test for the issue:
https://github.com/saltstack/salt/issues/46552
'''
state_run = self.run_function('state.sls', mods='requisites.onfail_multiple_required')
retcode = state_run['cmd_|-b_|-echo b_|-run']['changes']['retcode']
self.assertEqual(retcode, 0)
retcode = state_run['cmd_|-c_|-echo c_|-run']['changes']['retcode']
self.assertEqual(retcode, 0)
retcode = state_run['cmd_|-d_|-echo d_|-run']['changes']['retcode']
self.assertEqual(retcode, 0)
stdout = state_run['cmd_|-b_|-echo b_|-run']['changes']['stdout']
self.assertEqual(stdout, 'b')
stdout = state_run['cmd_|-c_|-echo c_|-run']['changes']['stdout']
self.assertEqual(stdout, 'c')
stdout = state_run['cmd_|-d_|-echo d_|-run']['changes']['stdout']
self.assertEqual(stdout, 'd')
def test_multiple_onfail_requisite_with_required_no_run(self):
'''
test to ensure multiple states are not run
when specified as onfails for a single state
which fails.
This is a test for the issue:
https://github.com/saltstack/salt/issues/46552
'''
state_run = self.run_function('state.sls', mods='requisites.onfail_multiple_required_no_run')
expected = 'State was not run because onfail req did not change'
stdout = state_run['cmd_|-b_|-echo b_|-run']['comment']
self.assertEqual(stdout, expected)
stdout = state_run['cmd_|-c_|-echo c_|-run']['comment']
self.assertEqual(stdout, expected)
stdout = state_run['cmd_|-d_|-echo d_|-run']['comment']
self.assertEqual(stdout, expected)
# listen tests
def test_listen_requisite(self):

View file

@ -6,12 +6,14 @@
'''
# Import Python libs
from __future__ import absolute_import
import os
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest, requires_network
from tests.support.mixins import SaltReturnAssertsMixin
from tests.support.runtests import RUNTIME_VARS
# Import salt libs
import salt.utils
@ -42,10 +44,14 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin):
'''
Determine if URL-referenced NPM module can be successfully installed.
'''
ret = self.run_state('npm.installed', name='request/request#v2.81.1')
user = os.environ.get('SUDO_USER', 'root')
npm_dir = os.path.join(RUNTIME_VARS.TMP, 'git-install-npm')
self.run_state('file.directory', name=npm_dir, user=user, dir_mode='755')
ret = self.run_state('npm.installed', name='request/request#v2.81.1', runas=user, dir=npm_dir)
self.assertSaltTrueReturn(ret)
ret = self.run_state('npm.removed', name='git://github.com/request/request')
ret = self.run_state('npm.removed', name='git://github.com/request/request', runas=user, dir=npm_dir)
self.assertSaltTrueReturn(ret)
self.run_state('file.absent', name=npm_dir)
@requires_network()
@destructiveTest

View file

@ -14,6 +14,7 @@ import os
import pwd
import glob
import shutil
import sys
# Import Salt Testing libs
from tests.support.mixins import SaltReturnAssertsMixin
@ -523,6 +524,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
if os.path.isdir(venv_dir):
shutil.rmtree(venv_dir)
@skipIf(sys.version_info[:2] >= (3, 6), 'Old version of virtualenv too old for python3.6')
def test_46127_pip_env_vars(self):
'''
Test that checks if env_vars passed to pip.installed are also passed

View file

@ -22,16 +22,16 @@ import salt.utils
import salt.ext.six as six
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin):
'''
pkgrepo state tests
'''
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
@requires_system_grains
def test_pkgrepo_01_managed(self, grains):
'''
This is a destructive test as it adds a repository.
Test adding a repo
'''
os_grain = self.run_function('grains.item', ['os'])['os']
os_release_info = tuple(self.run_function('grains.item', ['osrelease_info'])['osrelease_info'])
@ -56,12 +56,9 @@ class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin):
for state_id, state_result in six.iteritems(ret):
self.assertSaltTrueReturn(dict([(state_id, state_result)]))
@destructiveTest
@skipIf(salt.utils.is_windows(), 'minion is windows')
def test_pkgrepo_02_absent(self):
'''
This is a destructive test as it removes the repository added in the
above test.
Test removing the repo from the above test
'''
os_grain = self.run_function('grains.item', ['os'])['os']
os_release_info = tuple(self.run_function('grains.item', ['osrelease_info'])['osrelease_info'])
@ -78,3 +75,56 @@ class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin):
self.assertReturnNonEmptySaltType(ret)
for state_id, state_result in six.iteritems(ret):
self.assertSaltTrueReturn(dict([(state_id, state_result)]))
@requires_system_grains
def test_pkgrepo_03_with_comments(self, grains):
'''
Test adding a repo with comments
'''
os_family = grains['os_family'].lower()
if os_family in ('redhat', 'suse'):
kwargs = {
'name': 'examplerepo',
'baseurl': 'http://example.com/repo',
'enabled': False,
'comments': ['This is a comment']
}
elif os_family in ('debian',):
self.skipTest('Debian/Ubuntu test case needed')
else:
self.skipTest("No test case for os_family '{0}'".format(os_family))
try:
# Run the state to add the repo
ret = self.run_state('pkgrepo.managed', **kwargs)
self.assertSaltTrueReturn(ret)
# Run again with modified comments
kwargs['comments'].append('This is another comment')
ret = self.run_state('pkgrepo.managed', **kwargs)
self.assertSaltTrueReturn(ret)
ret = ret[next(iter(ret))]
self.assertEqual(
ret['changes'],
{
'comments': {
'old': ['This is a comment'],
'new': ['This is a comment',
'This is another comment']
}
}
)
# Run a third time, no changes should be made
ret = self.run_state('pkgrepo.managed', **kwargs)
self.assertSaltTrueReturn(ret)
ret = ret[next(iter(ret))]
self.assertFalse(ret['changes'])
self.assertEqual(
ret['comment'],
"Package repo '{0}' already configured".format(kwargs['name'])
)
finally:
# Clean up
self.run_state('pkgrepo.absent', name=kwargs['name'])

View file

@ -694,6 +694,7 @@ class SaltTestsuiteParser(SaltCoverageTestingParser):
continue
results = self.run_suite('', name, suffix='test_*.py', load_from_name=True)
status.append(results)
return status
for suite in TEST_SUITES:
if suite != 'unit' and getattr(self.options, suite):
status.append(self.run_integration_suite(**TEST_SUITES[suite]))