mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into test_directory
This commit is contained in:
commit
e6178fe6d4
23 changed files with 339 additions and 53 deletions
16
.kitchen.yml
16
.kitchen.yml
|
@ -13,7 +13,7 @@ driver:
|
|||
use_sudo: false
|
||||
hostname: salt
|
||||
privileged: true
|
||||
username: root
|
||||
username: kitchen
|
||||
volume:
|
||||
- /var/run/docker.sock:/docker.sock
|
||||
cap_add:
|
||||
|
@ -22,17 +22,17 @@ driver:
|
|||
provision_command:
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
transport:
|
||||
name: sftp
|
||||
name: rsync
|
||||
<% end %>
|
||||
|
||||
sudo: true
|
||||
provisioner:
|
||||
name: salt_solo
|
||||
salt_install: bootstrap
|
||||
salt_version: latest
|
||||
salt_bootstrap_url: https://bootstrap.saltstack.com
|
||||
salt_bootstrap_options: -X stable <%= version %>
|
||||
salt_bootstrap_options: -X -p rsync stable <%= version %>
|
||||
log_level: info
|
||||
sudo: true
|
||||
require_chef: false
|
||||
remote_states:
|
||||
name: git://github.com/saltstack/salt-jenkins.git
|
||||
|
@ -62,7 +62,7 @@ platforms:
|
|||
image: fedora:latest
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
salt_bootstrap_options: -X -p rsync git v<%= version %> >/dev/null
|
||||
- name: centos-7
|
||||
driver_config:
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
|
@ -72,7 +72,7 @@ platforms:
|
|||
provision_command:
|
||||
- yum install -y upstart
|
||||
provisioner:
|
||||
salt_bootstrap_options: -P -y -x python2.7 -X git v<%= version %> >/dev/null
|
||||
salt_bootstrap_options: -P -p rsync -y -x python2.7 -X git v<%= version %> >/dev/null
|
||||
- name: ubuntu-rolling
|
||||
driver_config:
|
||||
image: ubuntu:rolling
|
||||
|
@ -106,7 +106,7 @@ platforms:
|
|||
- systemctl enable sshd
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
salt_bootstrap_options: -X -p rsync git v<%= version %> >/dev/null
|
||||
- name: opensuse
|
||||
driver_config:
|
||||
run_command: /usr/lib/systemd/systemd
|
||||
|
@ -114,7 +114,7 @@ platforms:
|
|||
- systemctl enable sshd.service
|
||||
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
|
||||
provisioner:
|
||||
salt_bootstrap_options: -X git v<%= version %> >/dev/null
|
||||
salt_bootstrap_options: -X -p rsync git v<%= version %> >/dev/null
|
||||
<% if vagrant != false %>
|
||||
- name: windows-2012r2
|
||||
driver:
|
||||
|
|
|
@ -86,7 +86,7 @@ Dependencies
|
|||
|
||||
Salt should run on any Unix-like platform so long as the dependencies are met.
|
||||
|
||||
* `Python 2.6`_ >= 2.6 <3.0
|
||||
* `Python 2.7`_ >= 2.7 <3.0
|
||||
* `msgpack-python`_ - High-performance message interchange format
|
||||
* `YAML`_ - Python YAML bindings
|
||||
* `Jinja2`_ - parsing Salt States (configurable in the master settings)
|
||||
|
|
|
@ -4,6 +4,7 @@ After=network.target salt-master.service
|
|||
|
||||
[Service]
|
||||
Type=notify
|
||||
KillMode=process
|
||||
NotifyAccess=all
|
||||
LimitNOFILE=8192
|
||||
ExecStart=/usr/bin/salt-minion
|
||||
|
|
|
@ -4,6 +4,7 @@ Documentation=man:salt-minion(1) file:///usr/share/doc/salt/html/contents.html h
|
|||
After=network.target salt-master.service
|
||||
|
||||
[Service]
|
||||
KillMode=process
|
||||
Type=notify
|
||||
NotifyAccess=all
|
||||
LimitNOFILE=8192
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import os
|
||||
import getopt
|
||||
|
||||
|
||||
|
@ -16,7 +16,9 @@ def display_help():
|
|||
print('# Parameters: #')
|
||||
print('# -f, --file : target file #')
|
||||
print('# -s, --search : term to search for #')
|
||||
print('# default is "C:\Python" #')
|
||||
print('# Default is the base path for the python #')
|
||||
print('# executable that is running this script. #')
|
||||
print('# In Py2 that would be C:\\Python27 #')
|
||||
print('# -r, --replace : replace with this #')
|
||||
print('# default is ".." #')
|
||||
print('# #')
|
||||
|
@ -29,16 +31,12 @@ def display_help():
|
|||
|
||||
def main(argv):
|
||||
target = ''
|
||||
python_dir = 'Python{0}{1}'.format(sys.version_info[0], sys.version_info[1])
|
||||
if sys.version_info >= (3, 5):
|
||||
from win32com.shell import shellcon, shell
|
||||
search = shell.SHGetFolderPath(0, shellcon.CSIDL_PROGRAM_FILES, 0, 0)
|
||||
search = os.path.join(search, python_dir)
|
||||
else:
|
||||
search = os.path.join('C:\\', python_dir)
|
||||
search = os.path.dirname(sys.executable)
|
||||
replace = '..'
|
||||
try:
|
||||
opts, args = getopt.getopt(argv,"hf:s:r:",["file=","search=", "replace="])
|
||||
opts, args = getopt.getopt(argv,
|
||||
"hf:s:r:",
|
||||
["file=", "search=", "replace="])
|
||||
except getopt.GetoptError:
|
||||
display_help()
|
||||
for opt, arg in opts:
|
||||
|
@ -56,10 +54,10 @@ def main(argv):
|
|||
if sys.version_info >= (3, 0):
|
||||
search = search.encode('utf-8')
|
||||
replace = replace.encode('utf-8')
|
||||
f = open( target, 'rb' ).read()
|
||||
f = f.replace( search, replace )
|
||||
f = f.replace( search.lower(), replace )
|
||||
open( target, 'wb' ).write(f)
|
||||
f = open(target, 'rb').read()
|
||||
f = f.replace(search, replace)
|
||||
f = f.replace(search.lower(), replace)
|
||||
open(target, 'wb').write(f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -72,6 +72,12 @@ class SSHHighState(salt.state.BaseHighState):
|
|||
self.matcher = salt.minion.Matcher(self.opts)
|
||||
self.tops = salt.loader.tops(self.opts)
|
||||
|
||||
self._pydsl_all_decls = {}
|
||||
self._pydsl_render_stack = []
|
||||
|
||||
def push_active(self):
|
||||
salt.state.HighState.stack.append(self)
|
||||
|
||||
def load_dynamic(self, matches):
|
||||
'''
|
||||
Stub out load_dynamic
|
||||
|
|
|
@ -87,6 +87,28 @@ def _merge_extra_filerefs(*args):
|
|||
return ','.join(ret)
|
||||
|
||||
|
||||
def _cleanup_slsmod_low_data(low_data):
|
||||
'''
|
||||
Set "slsmod" keys to None to make
|
||||
low_data JSON serializable
|
||||
'''
|
||||
for i in low_data:
|
||||
if 'slsmod' in i:
|
||||
i['slsmod'] = None
|
||||
|
||||
|
||||
def _cleanup_slsmod_high_data(high_data):
|
||||
'''
|
||||
Set "slsmod" keys to None to make
|
||||
high_data JSON serializable
|
||||
'''
|
||||
for i in six.itervalues(high_data):
|
||||
if 'stateconf' in i:
|
||||
stateconf_data = i['stateconf'][1]
|
||||
if 'slsmod' in stateconf_data:
|
||||
stateconf_data['slsmod'] = None
|
||||
|
||||
|
||||
def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
|
||||
'''
|
||||
Create the seed file for a state.sls run
|
||||
|
@ -99,6 +121,7 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.push_active()
|
||||
if isinstance(mods, str):
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
|
@ -130,6 +153,7 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
|
|||
)
|
||||
)
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__['fileclient'],
|
||||
|
@ -371,6 +395,7 @@ def high(data, **kwargs):
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.push_active()
|
||||
chunks = st_.state.compile_high_data(data)
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
|
@ -380,6 +405,7 @@ def high(data, **kwargs):
|
|||
)
|
||||
)
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__['fileclient'],
|
||||
|
@ -600,6 +626,7 @@ def highstate(test=None, **kwargs):
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks()
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
|
@ -614,6 +641,7 @@ def highstate(test=None, **kwargs):
|
|||
__context__['retcode'] = 1
|
||||
return chunks
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__['fileclient'],
|
||||
|
@ -680,6 +708,7 @@ def top(topfn, test=None, **kwargs):
|
|||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.opts['state_top'] = os.path.join('salt://', topfn)
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks()
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
|
@ -689,6 +718,7 @@ def top(topfn, test=None, **kwargs):
|
|||
)
|
||||
)
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__opts__,
|
||||
__context__['fileclient'],
|
||||
|
@ -746,7 +776,10 @@ def show_highstate():
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
return st_.compile_highstate()
|
||||
st_.push_active()
|
||||
chunks = st_.compile_highstate()
|
||||
_cleanup_slsmod_high_data(chunks)
|
||||
return chunks
|
||||
|
||||
|
||||
def show_lowstate():
|
||||
|
@ -765,7 +798,10 @@ def show_lowstate():
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
return st_.compile_low_chunks()
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks()
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
return chunks
|
||||
|
||||
|
||||
def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
||||
|
@ -884,6 +920,7 @@ def show_sls(mods, saltenv='base', test=None, **kwargs):
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.push_active()
|
||||
if isinstance(mods, string_types):
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
|
@ -898,6 +935,7 @@ def show_sls(mods, saltenv='base', test=None, **kwargs):
|
|||
# Verify that the high data is structurally sound
|
||||
if errors:
|
||||
return errors
|
||||
_cleanup_slsmod_high_data(high_data)
|
||||
return high_data
|
||||
|
||||
|
||||
|
@ -927,6 +965,7 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs):
|
|||
__pillar__,
|
||||
__salt__,
|
||||
__context__['fileclient'])
|
||||
st_.push_active()
|
||||
if isinstance(mods, string_types):
|
||||
mods = mods.split(',')
|
||||
high_data, errors = st_.render_highstate({saltenv: mods})
|
||||
|
@ -942,6 +981,7 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs):
|
|||
if errors:
|
||||
return errors
|
||||
ret = st_.state.compile_high_data(high_data)
|
||||
_cleanup_slsmod_low_data(ret)
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -2155,7 +2155,8 @@ def minion_config(path,
|
|||
defaults=None,
|
||||
cache_minion_id=False,
|
||||
ignore_config_errors=True,
|
||||
minion_id=None):
|
||||
minion_id=None,
|
||||
role='minion'):
|
||||
'''
|
||||
Reads in the minion configuration file and sets up special options
|
||||
|
||||
|
@ -2195,6 +2196,7 @@ def minion_config(path,
|
|||
opts = apply_minion_config(overrides, defaults,
|
||||
cache_minion_id=cache_minion_id,
|
||||
minion_id=minion_id)
|
||||
opts['__role'] = role
|
||||
apply_sdb(opts)
|
||||
_validate_opts(opts)
|
||||
return opts
|
||||
|
|
|
@ -739,7 +739,11 @@ class MasterMinion(object):
|
|||
matcher=True,
|
||||
whitelist=None,
|
||||
ignore_config_errors=True):
|
||||
self.opts = salt.config.minion_config(opts['conf_file'], ignore_config_errors=ignore_config_errors)
|
||||
self.opts = salt.config.minion_config(
|
||||
opts['conf_file'],
|
||||
ignore_config_errors=ignore_config_errors,
|
||||
role='master'
|
||||
)
|
||||
self.opts.update(opts)
|
||||
self.whitelist = whitelist
|
||||
self.opts['grains'] = salt.loader.grains(opts)
|
||||
|
|
|
@ -421,7 +421,7 @@ def status(name, sig=None, runas=None):
|
|||
for line in output.splitlines():
|
||||
if 'PID' in line:
|
||||
continue
|
||||
if re.search(name, line):
|
||||
if re.search(name, line.split()[-1]):
|
||||
if line.split()[0].isdigit():
|
||||
if pids:
|
||||
pids += '\n'
|
||||
|
|
|
@ -274,7 +274,7 @@ def _get_extra_options(**kwargs):
|
|||
ret = []
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
for key, value in six.iteritems(kwargs):
|
||||
if isinstance(key, six.string_types):
|
||||
if isinstance(value, six.string_types):
|
||||
ret.append('--{0}=\'{1}\''.format(key, value))
|
||||
elif value is True:
|
||||
ret.append('--{0}'.format(key))
|
||||
|
|
|
@ -664,6 +664,8 @@ class Pillar(object):
|
|||
log.error(msg)
|
||||
errors.append(msg)
|
||||
else:
|
||||
# render included state(s)
|
||||
include_states = []
|
||||
for sub_sls in state.pop('include'):
|
||||
if isinstance(sub_sls, dict):
|
||||
sub_sls, v = next(six.iteritems(sub_sls))
|
||||
|
@ -685,16 +687,23 @@ class Pillar(object):
|
|||
nstate = {
|
||||
key_fragment: nstate
|
||||
}
|
||||
|
||||
include_states.append(nstate)
|
||||
if err:
|
||||
errors += err
|
||||
if include_states:
|
||||
# merge included state(s) with the current state merged last
|
||||
include_states.append(state)
|
||||
state = None
|
||||
for s in include_states:
|
||||
if state is None:
|
||||
state = s
|
||||
else:
|
||||
state = merge(
|
||||
state,
|
||||
nstate,
|
||||
s,
|
||||
self.merge_strategy,
|
||||
self.opts.get('renderer', 'yaml'),
|
||||
self.opts.get('pillar_merge_lists', False))
|
||||
|
||||
if err:
|
||||
errors += err
|
||||
return state, mods, errors
|
||||
|
||||
def render_pillar(self, matches, errors=None):
|
||||
|
|
|
@ -92,7 +92,7 @@ from salt.exceptions import (
|
|||
from salt.modules.pkg_resource import _repack_pkgs
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext import six
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
|
||||
|
@ -2845,8 +2845,11 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
if isinstance(refresh, bool):
|
||||
try:
|
||||
packages = __salt__['pkg.list_upgrades'](refresh=refresh, **kwargs)
|
||||
expected = {pkgname: {'new': pkgver, 'old': __salt__['pkg.version'](pkgname)}
|
||||
for pkgname, pkgver in six.iteritems(packages)}
|
||||
if isinstance(pkgs, list):
|
||||
packages = [pkg for pkg in packages if pkg in pkgs]
|
||||
expected = {pkgname: pkgver for pkgname, pkgver in six.iteritems(expected) if pkgname in pkgs}
|
||||
except Exception as exc:
|
||||
ret['comment'] = str(exc)
|
||||
return ret
|
||||
|
@ -2860,6 +2863,7 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
return ret
|
||||
elif __opts__['test']:
|
||||
ret['comment'] = 'System update will be performed'
|
||||
ret['changes'] = expected
|
||||
ret['result'] = None
|
||||
return ret
|
||||
|
||||
|
@ -2876,8 +2880,17 @@ def uptodate(name, refresh=False, pkgs=None, **kwargs):
|
|||
'packages: {0}'.format(exc))
|
||||
return ret
|
||||
|
||||
ret['comment'] = 'Upgrade ran successfully'
|
||||
ret['result'] = True
|
||||
# If a package list was provided, ensure those packages were updated
|
||||
missing = []
|
||||
if isinstance(pkgs, list):
|
||||
missing = [pkg for pkg in six.iterkeys(expected) if pkg not in ret['changes']]
|
||||
|
||||
if missing:
|
||||
ret['comment'] = 'The following package(s) failed to update: {0}'.format(', '.join(missing))
|
||||
ret['result'] = False
|
||||
else:
|
||||
ret['comment'] = 'Upgrade ran successfully'
|
||||
ret['result'] = True
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ def sdb_get(uri, opts, utils=None):
|
|||
return uri
|
||||
|
||||
if utils is None:
|
||||
utils = {}
|
||||
utils = salt.loader.utils(opts)
|
||||
|
||||
sdlen = len('sdb://')
|
||||
indx = uri.find('/', sdlen)
|
||||
|
@ -56,7 +56,7 @@ def sdb_set(uri, value, opts, utils=None):
|
|||
return False
|
||||
|
||||
if utils is None:
|
||||
utils = {}
|
||||
utils = salt.loader.utils(opts)
|
||||
|
||||
sdlen = len('sdb://')
|
||||
indx = uri.find('/', sdlen)
|
||||
|
@ -87,7 +87,7 @@ def sdb_delete(uri, opts, utils=None):
|
|||
return False
|
||||
|
||||
if utils is None:
|
||||
utils = {}
|
||||
utils = salt.loader.utils(opts)
|
||||
|
||||
sdlen = len('sdb://')
|
||||
indx = uri.find('/', sdlen)
|
||||
|
@ -122,7 +122,7 @@ def sdb_get_or_set_hash(uri,
|
|||
return False
|
||||
|
||||
if utils is None:
|
||||
utils = {}
|
||||
utils = salt.loader.utils(opts)
|
||||
|
||||
ret = sdb_get(uri, opts, utils=utils)
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ def _get_vault_connection():
|
|||
Get the connection details for calling Vault, from local configuration if
|
||||
it exists, or from the master otherwise
|
||||
'''
|
||||
if 'vault' in __opts__ and not __opts__.get('__role', 'minion') == 'master':
|
||||
if 'vault' in __opts__ and __opts__.get('__role', 'minion') == 'master':
|
||||
log.debug('Using Vault connection details from local config')
|
||||
try:
|
||||
return {
|
||||
|
|
|
@ -867,7 +867,10 @@ def dacl(obj_name=None, obj_type='file'):
|
|||
'''
|
||||
# Get the principal from the sid (object sid)
|
||||
sid = win32security.ConvertSidToStringSid(ace[2])
|
||||
principal = get_name(sid)
|
||||
try:
|
||||
principal = get_name(sid)
|
||||
except CommandExecutionError:
|
||||
principal = sid
|
||||
|
||||
# Get the ace type
|
||||
ace_type = self.ace_type[ace[0][0]]
|
||||
|
@ -1170,17 +1173,17 @@ def get_owner(obj_name):
|
|||
|
||||
except MemoryError:
|
||||
# Generic Memory Error (Windows Server 2003+)
|
||||
owner_sid = 'S-1-1-0'
|
||||
owner_sid = 'S-1-0-0'
|
||||
|
||||
except pywintypes.error as exc:
|
||||
# Incorrect function error (Windows Server 2008+)
|
||||
if exc.winerror == 1 or exc.winerror == 50:
|
||||
owner_sid = 'S-1-1-0'
|
||||
owner_sid = 'S-1-0-0'
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
'Failed to get owner: {0}'.format(exc.strerror))
|
||||
|
||||
return get_name(win32security.ConvertSidToStringSid(owner_sid))
|
||||
return get_name(owner_sid)
|
||||
|
||||
|
||||
def get_primary_group(obj_name):
|
||||
|
@ -1210,12 +1213,12 @@ def get_primary_group(obj_name):
|
|||
|
||||
except MemoryError:
|
||||
# Generic Memory Error (Windows Server 2003+)
|
||||
primary_group_gid = 'S-1-1-0'
|
||||
primary_group_gid = 'S-1-0-0'
|
||||
|
||||
except pywintypes.error as exc:
|
||||
# Incorrect function error (Windows Server 2008+)
|
||||
if exc.winerror == 1 or exc.winerror == 50:
|
||||
primary_group_gid = 'S-1-1-0'
|
||||
primary_group_gid = 'S-1-0-0'
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
'Failed to set permissions: {0}'.format(exc.strerror))
|
||||
|
|
|
@ -54,4 +54,4 @@ AcceptEnv LANG LC_*
|
|||
|
||||
Subsystem sftp /usr/lib/openssh/sftp-server
|
||||
|
||||
#UsePAM yes
|
||||
UsePAM yes
|
||||
|
|
|
@ -25,6 +25,8 @@ class ServiceModuleTest(ModuleCase):
|
|||
elif os_family == 'Arch':
|
||||
self.service_name = 'systemd-journald'
|
||||
cmd_name = 'systemctl'
|
||||
elif os_family == 'MacOS':
|
||||
self.service_name = 'org.ntp.ntpd'
|
||||
|
||||
if salt.utils.which(cmd_name) is None:
|
||||
self.skipTest('{0} is not installed'.format(cmd_name))
|
||||
|
|
|
@ -124,6 +124,10 @@ class RunnerReturnsTest(ShellCase):
|
|||
self.clean_return(deserialized)
|
||||
|
||||
# Now we have something sane we can reliably compare in an assert.
|
||||
if 'SUDO_USER' in os.environ:
|
||||
user = 'sudo_{0}'.format(os.environ['SUDO_USER'])
|
||||
else:
|
||||
user = RUNTIME_VARS.RUNNING_TESTS_USER
|
||||
self.assertEqual(
|
||||
deserialized,
|
||||
{'fun': 'runner.test.arg',
|
||||
|
@ -131,5 +135,5 @@ class RunnerReturnsTest(ShellCase):
|
|||
'jid': jid,
|
||||
'return': {'args': ['foo'], 'kwargs': {'bar': 'hello world!'}},
|
||||
'success': True,
|
||||
'user': RUNTIME_VARS.RUNNING_TESTS_USER if 'SUDO_USER' not in os.environ else 'root'}
|
||||
'user': user}
|
||||
)
|
||||
|
|
22
tests/integration/spm/test_man_spm.py
Normal file
22
tests/integration/spm/test_man_spm.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Tests man spm
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
|
||||
|
||||
class SPMManTest(ModuleCase):
|
||||
'''
|
||||
Validate man spm
|
||||
'''
|
||||
def test_man_spm(self):
|
||||
'''
|
||||
test man spm
|
||||
'''
|
||||
cmd = self.run_function('cmd.run', ['man spm'])
|
||||
self.assertIn('Salt Package Manager', cmd)
|
||||
self.assertIn('command for managing Salt packages', cmd)
|
|
@ -21,5 +21,8 @@ class SSHGrainsTest(SSHCase):
|
|||
test grains.items with salt-ssh
|
||||
'''
|
||||
ret = self.run_function('grains.items')
|
||||
self.assertEqual(ret['kernel'], 'Linux')
|
||||
grain = 'Linux'
|
||||
if salt.utils.is_darwin():
|
||||
grain = 'Darwin'
|
||||
self.assertEqual(ret['kernel'], grain)
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
|
|
178
tests/unit/states/test_pkg.py
Normal file
178
tests/unit/states/test_pkg.py
Normal file
|
@ -0,0 +1,178 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.mock import (
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
MagicMock,
|
||||
patch)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.ext import six
|
||||
import salt.states.pkg as pkg
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class PkgTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.states.pkg
|
||||
'''
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
pkg: {
|
||||
'__grains__': {
|
||||
'os': 'CentOS'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_uptodate_with_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value=self.pkgs)
|
||||
version = MagicMock(side_effect=lambda pkgname: self.pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], self.pkgs)
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], self.pkgs)
|
||||
|
||||
def test_uptodate_with_pkgs_with_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated changes
|
||||
'''
|
||||
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value=self.pkgs)
|
||||
version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
||||
|
||||
def test_uptodate_no_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with no changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={})
|
||||
upgrade = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade}):
|
||||
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True)
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
def test_uptodate_with_pkgs_no_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with no changes
|
||||
'''
|
||||
list_upgrades = MagicMock(return_value={})
|
||||
upgrade = MagicMock(return_value={})
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertTrue(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
def test_uptodate_with_failed_changes(self):
|
||||
'''
|
||||
Test pkg.uptodate with simulated failed changes
|
||||
'''
|
||||
|
||||
pkgs = {
|
||||
'pkga': {'old': '1.0.1', 'new': '2.0.1'},
|
||||
'pkgb': {'old': '1.0.2', 'new': '2.0.2'},
|
||||
'pkgc': {'old': '1.0.3', 'new': '2.0.3'}
|
||||
}
|
||||
|
||||
list_upgrades = MagicMock(return_value={
|
||||
pkgname: pkgver['new'] for pkgname, pkgver in six.iteritems(self.pkgs)
|
||||
})
|
||||
upgrade = MagicMock(return_value={})
|
||||
version = MagicMock(side_effect=lambda pkgname: pkgs[pkgname]['old'])
|
||||
|
||||
with patch.dict(pkg.__salt__,
|
||||
{'pkg.list_upgrades': list_upgrades,
|
||||
'pkg.upgrade': upgrade,
|
||||
'pkg.version': version}):
|
||||
# Run state with test=false
|
||||
with patch.dict(pkg.__opts__, {'test': False}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertFalse(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], {})
|
||||
|
||||
# Run state with test=true
|
||||
with patch.dict(pkg.__opts__, {'test': True}):
|
||||
ret = pkg.uptodate('dummy', test=True, pkgs=[pkgname for pkgname in six.iterkeys(self.pkgs)])
|
||||
self.assertIsNone(ret['result'])
|
||||
self.assertDictEqual(ret['changes'], pkgs)
|
|
@ -126,7 +126,7 @@ class PillarTestCase(TestCase):
|
|||
]
|
||||
self.assertEqual(
|
||||
pillar.render_pillar({'base': ['foo.sls']}),
|
||||
({'foo': 'bar2'}, [])
|
||||
({'foo': 'bar'}, [])
|
||||
)
|
||||
|
||||
# Test includes using empty key directive
|
||||
|
@ -136,7 +136,7 @@ class PillarTestCase(TestCase):
|
|||
]
|
||||
self.assertEqual(
|
||||
pillar.render_pillar({'base': ['foo.sls']}),
|
||||
({'foo': 'bar2'}, [])
|
||||
({'foo': 'bar'}, [])
|
||||
)
|
||||
|
||||
# Test includes using simple non-nested key
|
||||
|
|
Loading…
Add table
Reference in a new issue