mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2016.3' into 'carbon'
Conflicts: - conf/master - doc/ref/configuration/master.rst - salt/cli/daemons.py - salt/client/ssh/__init__.py - salt/config/__init__.py - salt/modules/archive.py - salt/modules/cmdmod.py - tests/unit/states/service_test.py
This commit is contained in:
commit
112903ad7e
25 changed files with 680 additions and 128 deletions
|
@ -1022,5 +1022,13 @@
|
|||
############################################
|
||||
# Default match type for filtering events tags: startswith, endswith, find, regex, fnmatch
|
||||
#event_match_type: startswith
|
||||
|
||||
# Save runner returns to the job cache
|
||||
#runner_returns: True
|
||||
|
||||
# Permanently include any available Python 3rd party modules into Salt Thin
|
||||
# when they are generated for Salt-SSH or other purposes.
|
||||
# The modules should be named by the names they are actually imported inside the Python.
|
||||
# The value of the parameters can be either one module or a comma separated list of them.
|
||||
#thin_extra_mods: foo,bar
|
||||
|
||||
|
|
|
@ -746,6 +746,16 @@ authentication with minions
|
|||
|
||||
ssh_use_home_key: False
|
||||
|
||||
``thin_extra_mods``
|
||||
-------------------
|
||||
|
||||
Default: None
|
||||
|
||||
List of additional modules, needed to be included into the Salt Thin.
|
||||
Pass a list of importable Python modules that are typically located in
|
||||
the `site-packages` Python directory so they will be also always included
|
||||
into the Salt Thin, once generated.
|
||||
|
||||
Master Security Settings
|
||||
========================
|
||||
|
||||
|
|
|
@ -298,8 +298,10 @@ class Minion(parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-in
|
|||
self.action_log_info('An instance is already running. Exiting')
|
||||
self.shutdown(1)
|
||||
|
||||
transport = self.config.get('transport').lower()
|
||||
|
||||
# TODO: AIO core is separate from transport
|
||||
if self.config['transport'].lower() in ('zeromq', 'tcp', 'detect'):
|
||||
if transport in ('zeromq', 'tcp', 'detect'):
|
||||
# Late import so logging works correctly
|
||||
import salt.minion
|
||||
# If the minion key has not been accepted, then Salt enters a loop
|
||||
|
@ -311,11 +313,19 @@ class Minion(parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-in
|
|||
if self.config.get('master_type') == 'func':
|
||||
salt.minion.eval_master_func(self.config)
|
||||
self.minion = salt.minion.MinionManager(self.config)
|
||||
else:
|
||||
elif transport == 'raet':
|
||||
import salt.daemons.flo
|
||||
self.daemonize_if_required()
|
||||
self.set_pidfile()
|
||||
self.minion = salt.daemons.flo.IofloMinion(self.config)
|
||||
else:
|
||||
log.error(
|
||||
'The transport \'{0}\' is not supported. Please use one of the following: '
|
||||
'tcp, '
|
||||
'raet, '
|
||||
'or zeromq.'.format(transport)
|
||||
)
|
||||
self.shutdown(1)
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
|
|
|
@ -305,6 +305,7 @@ class SSH(object):
|
|||
self.returners = salt.loader.returners(self.opts, {})
|
||||
self.fsclient = salt.fileclient.FSClient(self.opts)
|
||||
self.thin = salt.utils.thin.gen_thin(self.opts['cachedir'],
|
||||
extra_mods=self.opts.get('thin_extra_mods'),
|
||||
overwrite=self.opts['regen_thin'],
|
||||
python2_bin=self.opts['python2_bin'],
|
||||
python3_bin=self.opts['python3_bin'])
|
||||
|
|
|
@ -911,6 +911,9 @@ VALID_OPTS = {
|
|||
|
||||
# Minion data cache driver (one of satl.cache.* modules)
|
||||
'cache': str,
|
||||
|
||||
# Extra modules for Salt Thin
|
||||
'thin_extra_mods': str,
|
||||
}
|
||||
|
||||
# default configurations
|
||||
|
@ -1420,6 +1423,7 @@ DEFAULT_MASTER_OPTS = {
|
|||
'python2_bin': 'python2',
|
||||
'python3_bin': 'python3',
|
||||
'cache': 'localfs',
|
||||
'thin_extra_mods': '',
|
||||
}
|
||||
|
||||
|
||||
|
@ -1465,8 +1469,8 @@ CLOUD_CONFIG_DEFAULTS = {
|
|||
|
||||
DEFAULT_API_OPTS = {
|
||||
# ----- Salt master settings overridden by Salt-API --------------------->
|
||||
'api_pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-api.pid'),
|
||||
'api_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'api'),
|
||||
'pidfile': '/var/run/salt-api.pid',
|
||||
'logfile': '/var/log/salt/api',
|
||||
'rest_timeout': 300,
|
||||
# <---- Salt master settings overridden by Salt-API ----------------------
|
||||
}
|
||||
|
@ -3369,15 +3373,12 @@ def api_config(path):
|
|||
Read in the salt master config file and add additional configs that
|
||||
need to be stubbed out for salt-api
|
||||
'''
|
||||
# Let's grab a copy of salt's master opts
|
||||
opts = client_config(path, defaults=DEFAULT_MASTER_OPTS)
|
||||
# Let's grab a copy of salt's master default opts
|
||||
defaults = DEFAULT_MASTER_OPTS
|
||||
# Let's override them with salt-api's required defaults
|
||||
api_opts = {
|
||||
'log_file': opts.get('api_logfile', DEFAULT_API_OPTS['api_logfile']),
|
||||
'pidfile': opts.get('api_pidfile', DEFAULT_API_OPTS['api_pidfile'])
|
||||
}
|
||||
opts.update(api_opts)
|
||||
return opts
|
||||
defaults.update(DEFAULT_API_OPTS)
|
||||
|
||||
return client_config(path, defaults=defaults)
|
||||
|
||||
|
||||
def spm_config(path):
|
||||
|
|
|
@ -282,7 +282,8 @@ def latest_version(*names, **kwargs):
|
|||
cmd.extend(repo)
|
||||
out = __salt__['cmd.run_all'](cmd,
|
||||
output_loglevel='trace',
|
||||
python_shell=False)
|
||||
python_shell=False,
|
||||
env={'LC_ALL': 'C', 'LANG': 'C'})
|
||||
candidate = ''
|
||||
for line in out['stdout'].splitlines():
|
||||
if 'Candidate' in line:
|
||||
|
|
|
@ -41,7 +41,7 @@ def __virtual__():
|
|||
commands = ('tar', 'gzip', 'gunzip', 'zip', 'unzip', 'rar', 'unrar')
|
||||
# If none of the above commands are in $PATH this module is a no-go
|
||||
if not any(salt.utils.which(cmd) for cmd in commands):
|
||||
return (False, 'The archive module could not be loaded: unable to find commands tar,gzip,gunzip,zip,unzip,rar,unrar')
|
||||
return (False, 'Unable to find commands tar,gzip,gunzip,zip,unzip,rar,unrar')
|
||||
return True
|
||||
|
||||
|
||||
|
@ -135,7 +135,7 @@ def tar(options, tarfile, sources=None, dest=None,
|
|||
|
||||
|
||||
@salt.utils.decorators.which('gzip')
|
||||
def gzip(sourcefile, template=None, runas=None):
|
||||
def gzip(sourcefile, template=None, runas=None, options=None):
|
||||
'''
|
||||
Uses the gzip command to create gzip files
|
||||
|
||||
|
@ -147,14 +147,27 @@ def gzip(sourcefile, template=None, runas=None):
|
|||
|
||||
salt '*' archive.gzip template=jinja /tmp/{{grains.id}}.txt
|
||||
|
||||
runas : None
|
||||
The user with which to run the gzip command line
|
||||
|
||||
options : None
|
||||
Pass any additional arguments to gzip
|
||||
|
||||
.. versionadded:: 2016.3.4
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Create /tmp/sourcefile.txt.gz
|
||||
salt '*' archive.gzip /tmp/sourcefile.txt
|
||||
salt '*' archive.gzip /tmp/sourcefile.txt options='-9 --verbose'
|
||||
'''
|
||||
cmd = ['gzip', '{0}'.format(sourcefile)]
|
||||
cmd = ['gzip']
|
||||
if options:
|
||||
cmd.append(options)
|
||||
cmd.append('{0}'.format(sourcefile))
|
||||
|
||||
return __salt__['cmd.run'](cmd,
|
||||
template=template,
|
||||
runas=runas,
|
||||
|
@ -162,7 +175,7 @@ def gzip(sourcefile, template=None, runas=None):
|
|||
|
||||
|
||||
@salt.utils.decorators.which('gunzip')
|
||||
def gunzip(gzipfile, template=None, runas=None):
|
||||
def gunzip(gzipfile, template=None, runas=None, options=None):
|
||||
'''
|
||||
Uses the gunzip command to unpack gzip files
|
||||
|
||||
|
@ -174,14 +187,27 @@ def gunzip(gzipfile, template=None, runas=None):
|
|||
|
||||
salt '*' archive.gunzip template=jinja /tmp/{{grains.id}}.txt.gz
|
||||
|
||||
runas : None
|
||||
The user with which to run the gzip command line
|
||||
|
||||
options : None
|
||||
Pass any additional arguments to gzip
|
||||
|
||||
.. versionadded:: 2016.3.4
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Create /tmp/sourcefile.txt
|
||||
salt '*' archive.gunzip /tmp/sourcefile.txt.gz
|
||||
salt '*' archive.gunzip /tmp/sourcefile.txt options='--verbose'
|
||||
'''
|
||||
cmd = ['gunzip', '{0}'.format(gzipfile)]
|
||||
cmd = ['gunzip']
|
||||
if options:
|
||||
cmd.append(options)
|
||||
cmd.append('{0}'.format(gzipfile))
|
||||
|
||||
return __salt__['cmd.run'](cmd,
|
||||
template=template,
|
||||
runas=runas,
|
||||
|
@ -467,6 +493,7 @@ def cmd_unzip(zip_file, dest, excludes=None,
|
|||
return _trim_files(files, trim_output)
|
||||
|
||||
|
||||
@salt.utils.decorators.depends('zipfile', fallback_function=cmd_unzip)
|
||||
def unzip(zip_file, dest, excludes=None, options=None, template=None,
|
||||
runas=None, trim_output=False, password=None, extract_perms=False):
|
||||
'''
|
||||
|
|
|
@ -21,6 +21,7 @@ import traceback
|
|||
import fnmatch
|
||||
import base64
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -28,7 +29,8 @@ import salt.utils.timed_subprocess
|
|||
import salt.grains.extra
|
||||
import salt.ext.six as six
|
||||
from salt.utils import vt
|
||||
from salt.exceptions import CommandExecutionError, TimedProcTimeoutError
|
||||
from salt.exceptions import CommandExecutionError, TimedProcTimeoutError, \
|
||||
SaltInvocationError
|
||||
from salt.log import LOG_LEVELS
|
||||
from salt.ext.six.moves import range, zip
|
||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||
|
@ -2032,8 +2034,8 @@ def script(source,
|
|||
|
||||
def _cleanup_tempfile(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
except (IOError, OSError) as exc:
|
||||
__salt__['file.remove'](path)
|
||||
except (SaltInvocationError, CommandExecutionError) as exc:
|
||||
log.error(
|
||||
'cmd.script: Unable to clean tempfile \'{0}\': {1}'.format(
|
||||
path,
|
||||
|
@ -2050,6 +2052,12 @@ def script(source,
|
|||
)
|
||||
kwargs.pop('__env__')
|
||||
|
||||
if salt.utils.is_windows() and runas and cwd is None:
|
||||
cwd = tempfile.mkdtemp(dir=__opts__['cachedir'])
|
||||
__salt__['win_dacl.add_ace'](
|
||||
cwd, 'File', runas, 'READ&EXECUTE', 'ALLOW',
|
||||
'FOLDER&SUBFOLDERS&FILES')
|
||||
|
||||
path = salt.utils.mkstemp(dir=cwd, suffix=os.path.splitext(source)[1])
|
||||
|
||||
if template:
|
||||
|
@ -2062,7 +2070,10 @@ def script(source,
|
|||
saltenv,
|
||||
**kwargs)
|
||||
if not fn_:
|
||||
_cleanup_tempfile(path)
|
||||
if salt.utils.is_windows() and runas:
|
||||
_cleanup_tempfile(cwd)
|
||||
else:
|
||||
_cleanup_tempfile(path)
|
||||
return {'pid': 0,
|
||||
'retcode': 1,
|
||||
'stdout': '',
|
||||
|
@ -2071,7 +2082,10 @@ def script(source,
|
|||
else:
|
||||
fn_ = __salt__['cp.cache_file'](source, saltenv)
|
||||
if not fn_:
|
||||
_cleanup_tempfile(path)
|
||||
if salt.utils.is_windows() and runas:
|
||||
_cleanup_tempfile(cwd)
|
||||
else:
|
||||
_cleanup_tempfile(path)
|
||||
return {'pid': 0,
|
||||
'retcode': 1,
|
||||
'stdout': '',
|
||||
|
@ -2098,7 +2112,10 @@ def script(source,
|
|||
bg=bg,
|
||||
password=password,
|
||||
**kwargs)
|
||||
_cleanup_tempfile(path)
|
||||
if salt.utils.is_windows() and runas:
|
||||
_cleanup_tempfile(cwd)
|
||||
else:
|
||||
_cleanup_tempfile(path)
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -819,7 +819,10 @@ def trust_key(keyid=None,
|
|||
|
||||
if not fingerprint:
|
||||
if keyid:
|
||||
key = get_key(keyid)
|
||||
if user:
|
||||
key = get_key(keyid, user=user)
|
||||
else:
|
||||
key = get_key(keyid)
|
||||
if key:
|
||||
if 'fingerprint' not in key:
|
||||
ret['res'] = False
|
||||
|
|
|
@ -2,11 +2,14 @@
|
|||
'''
|
||||
Support for the Mercurial SCM
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
from salt import utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import salt.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -15,7 +18,7 @@ def __virtual__():
|
|||
'''
|
||||
Only load if hg is installed
|
||||
'''
|
||||
if utils.which('hg') is None:
|
||||
if salt.utils.which('hg') is None:
|
||||
return (False,
|
||||
'The hg execution module cannot be loaded: hg unavailable.')
|
||||
else:
|
||||
|
@ -187,7 +190,14 @@ def pull(cwd, opts=None, user=None, identity=None, repository=None):
|
|||
cmd.append(opt)
|
||||
if repository is not None:
|
||||
cmd.append(repository)
|
||||
return __salt__['cmd.run'](cmd, cwd=cwd, runas=user, python_shell=False)
|
||||
|
||||
ret = __salt__['cmd.run_all'](cmd, cwd=cwd, runas=user, python_shell=False)
|
||||
if ret['retcode'] != 0:
|
||||
raise CommandExecutionError(
|
||||
'Hg command failed: {0}'.format(ret.get('stderr', ret['stdout']))
|
||||
)
|
||||
|
||||
return ret['stdout']
|
||||
|
||||
|
||||
def update(cwd, rev, force=False, user=None):
|
||||
|
@ -215,7 +225,14 @@ def update(cwd, rev, force=False, user=None):
|
|||
cmd = ['hg', 'update', '{0}'.format(rev)]
|
||||
if force:
|
||||
cmd.append('-C')
|
||||
return __salt__['cmd.run'](cmd, cwd=cwd, runas=user, python_shell=False)
|
||||
|
||||
ret = __salt__['cmd.run_all'](cmd, cwd=cwd, runas=user, python_shell=False)
|
||||
if ret['retcode'] != 0:
|
||||
raise CommandExecutionError(
|
||||
'Hg command failed: {0}'.format(ret.get('stderr', ret['stdout']))
|
||||
)
|
||||
|
||||
return ret['stdout']
|
||||
|
||||
|
||||
def clone(cwd, repository, opts=None, user=None, identity=None):
|
||||
|
@ -251,7 +268,14 @@ def clone(cwd, repository, opts=None, user=None, identity=None):
|
|||
cmd.append('{0}'.format(opt))
|
||||
if identity:
|
||||
cmd.extend(_ssh_flag(identity))
|
||||
return __salt__['cmd.run'](cmd, runas=user, python_shell=False)
|
||||
|
||||
ret = __salt__['cmd.run_all'](cmd, runas=user, python_shell=False)
|
||||
if ret['retcode'] != 0:
|
||||
raise CommandExecutionError(
|
||||
'Hg command failed: {0}'.format(ret.get('stderr', ret['stdout']))
|
||||
)
|
||||
|
||||
return ret['stdout']
|
||||
|
||||
|
||||
def status(cwd, opts=None, user=None):
|
||||
|
@ -298,7 +322,7 @@ def status(cwd, opts=None, user=None):
|
|||
ret[t].append(f)
|
||||
return ret
|
||||
|
||||
if utils.is_iter(cwd):
|
||||
if salt.utils.is_iter(cwd):
|
||||
return dict((cwd, _status(cwd)) for cwd in cwd)
|
||||
else:
|
||||
return _status(cwd)
|
||||
|
|
|
@ -16,7 +16,11 @@ import salt.utils.mac_utils
|
|||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = "xattr"
|
||||
__func_alias__ = {
|
||||
'list_': 'list',
|
||||
}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -28,7 +32,7 @@ def __virtual__():
|
|||
return False
|
||||
|
||||
|
||||
def list(path, hex=False):
|
||||
def list_(path, **kwargs):
|
||||
'''
|
||||
List all of the extended attributes on the given file/directory
|
||||
|
||||
|
@ -49,7 +53,12 @@ def list(path, hex=False):
|
|||
salt '*' xattr.list /path/to/file
|
||||
salt '*' xattr.list /path/to/file hex=True
|
||||
'''
|
||||
cmd = 'xattr "{0}"'.format(path)
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
hex_ = kwargs.pop('hex', False)
|
||||
if kwargs:
|
||||
salt.utils.invalid_kwargs(kwargs)
|
||||
|
||||
cmd = ['xattr', path]
|
||||
try:
|
||||
ret = salt.utils.mac_utils.execute_return_result(cmd)
|
||||
except CommandExecutionError as exc:
|
||||
|
@ -63,13 +72,13 @@ def list(path, hex=False):
|
|||
attrs_ids = ret.split("\n")
|
||||
attrs = {}
|
||||
|
||||
for id in attrs_ids:
|
||||
attrs[id] = read(path, id, hex)
|
||||
for id_ in attrs_ids:
|
||||
attrs[id_] = read(path, id_, **{'hex': hex_})
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
def read(path, attribute, hex=False):
|
||||
def read(path, attribute, **kwargs):
|
||||
'''
|
||||
Read the given attributes on the given file/directory
|
||||
|
||||
|
@ -92,11 +101,15 @@ def read(path, attribute, hex=False):
|
|||
salt '*' xattr.read /path/to/file com.test.attr
|
||||
salt '*' xattr.read /path/to/file com.test.attr hex=True
|
||||
'''
|
||||
hex_flag = ""
|
||||
if hex:
|
||||
hex_flag = "-x"
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
hex_ = kwargs.pop('hex', False)
|
||||
if kwargs:
|
||||
salt.utils.invalid_kwargs(kwargs)
|
||||
|
||||
cmd = 'xattr -p {0} "{1}" "{2}"'.format(hex_flag, attribute, path)
|
||||
cmd = ['xattr', '-p']
|
||||
if hex_:
|
||||
cmd.append('-x')
|
||||
cmd.extend([attribute, path])
|
||||
|
||||
try:
|
||||
ret = salt.utils.mac_utils.execute_return_result(cmd)
|
||||
|
@ -110,7 +123,7 @@ def read(path, attribute, hex=False):
|
|||
return ret
|
||||
|
||||
|
||||
def write(path, attribute, value, hex=False):
|
||||
def write(path, attribute, value, **kwargs):
|
||||
'''
|
||||
Causes the given attribute name to be assigned the given value
|
||||
|
||||
|
@ -134,11 +147,16 @@ def write(path, attribute, value, hex=False):
|
|||
salt '*' xattr.write /path/to/file "com.test.attr" "value"
|
||||
|
||||
'''
|
||||
hex_flag = ""
|
||||
if hex:
|
||||
hex_flag = "-x"
|
||||
kwargs = salt.utils.clean_kwargs(**kwargs)
|
||||
hex_ = kwargs.pop('hex', False)
|
||||
if kwargs:
|
||||
salt.utils.invalid_kwargs(kwargs)
|
||||
|
||||
cmd = ['xattr', '-w']
|
||||
if hex_:
|
||||
cmd.append('-x')
|
||||
cmd.extend([attribute, value, path])
|
||||
|
||||
cmd = 'xattr -w {0} "{1}" "{2}" "{3}"'.format(hex_flag, attribute, value, path)
|
||||
try:
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
except CommandExecutionError as exc:
|
||||
|
@ -146,7 +164,7 @@ def write(path, attribute, value, hex=False):
|
|||
raise CommandExecutionError('File not found: {0}'.format(path))
|
||||
raise CommandExecutionError('Unknown Error: {0}'.format(exc.strerror))
|
||||
|
||||
return read(path, attribute, hex) == value
|
||||
return read(path, attribute, **{'hex': hex_}) == value
|
||||
|
||||
|
||||
def delete(path, attribute):
|
||||
|
@ -180,7 +198,7 @@ def delete(path, attribute):
|
|||
raise CommandExecutionError('Attribute not found: {0}'.format(attribute))
|
||||
raise CommandExecutionError('Unknown Error: {0}'.format(exc.strerror))
|
||||
|
||||
return attribute not in list(path)
|
||||
return attribute not in list_(path)
|
||||
|
||||
|
||||
def clear(path):
|
||||
|
@ -207,4 +225,4 @@ def clear(path):
|
|||
raise CommandExecutionError('File not found: {0}'.format(path))
|
||||
raise CommandExecutionError('Unknown Error: {0}'.format(exc.strerror))
|
||||
|
||||
return list(path) == {}
|
||||
return list_(path) == {}
|
||||
|
|
|
@ -71,6 +71,23 @@ def _canonical_unit_name(name):
|
|||
return '%s.service' % name
|
||||
|
||||
|
||||
def _check_available(name):
|
||||
'''
|
||||
Returns boolean telling whether or not the named service is available
|
||||
'''
|
||||
out = _systemctl_status(name).lower()
|
||||
for line in salt.utils.itertools.split(out, '\n'):
|
||||
match = re.match(r'\s+loaded:\s+(\S+)', line)
|
||||
if match:
|
||||
ret = match.group(1) != 'not-found'
|
||||
break
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
'Failed to get information on unit \'%s\'' % name
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def _check_for_unit_changes(name):
|
||||
'''
|
||||
Check for modified/updated unit files, and run a daemon-reload if any are
|
||||
|
@ -298,7 +315,7 @@ def _untracked_custom_unit_found(name):
|
|||
'''
|
||||
unit_path = os.path.join('/etc/systemd/system',
|
||||
_canonical_unit_name(name))
|
||||
return os.access(unit_path, os.R_OK) and not available(name)
|
||||
return os.access(unit_path, os.R_OK) and not _check_available(name)
|
||||
|
||||
|
||||
def _unit_file_changed(name):
|
||||
|
@ -520,17 +537,8 @@ def available(name):
|
|||
|
||||
salt '*' service.available sshd
|
||||
'''
|
||||
out = _systemctl_status(name).lower()
|
||||
for line in salt.utils.itertools.split(out, '\n'):
|
||||
match = re.match(r'\s+loaded:\s+(\S+)', line)
|
||||
if match:
|
||||
ret = match.group(1) != 'not-found'
|
||||
break
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
'Failed to get information on unit \'%s\'' % name
|
||||
)
|
||||
return ret
|
||||
_check_for_unit_changes(name)
|
||||
return _check_available(name)
|
||||
|
||||
|
||||
def missing(name):
|
||||
|
|
|
@ -144,6 +144,7 @@ class Serial(object):
|
|||
'This often happens when trying to read a file not in binary mode'
|
||||
'To see message payload, enable debug logging and retry. Exception: {0}'.format(exc))
|
||||
log.debug('Msgpack deserialization failure on message: {0}'.format(msg))
|
||||
gc.collect()
|
||||
raise
|
||||
finally:
|
||||
gc.enable()
|
||||
|
|
|
@ -31,6 +31,10 @@ def generate(extra_mods='', overwrite=False, so_mods='',
|
|||
salt-run thin.generate mako,wempy 1
|
||||
salt-run thin.generate overwrite=1
|
||||
'''
|
||||
conf_mods = __opts__.get('thin_extra_mods')
|
||||
if conf_mods:
|
||||
extra_mods = ','.join([conf_mods, extra_mods])
|
||||
|
||||
return salt.utils.thin.gen_thin(__opts__['cachedir'],
|
||||
extra_mods,
|
||||
overwrite,
|
||||
|
|
|
@ -13,15 +13,16 @@ in ~/.ssh/known_hosts, and the remote host has this host's public key.
|
|||
- rev: tip
|
||||
- target: /tmp/example_repo
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.states.git import _fail, _neutral_test
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -130,12 +131,27 @@ def _update_repo(ret, name, target, clean, user, identity, rev, opts):
|
|||
ret,
|
||||
test_result)
|
||||
|
||||
pull_out = __salt__['hg.pull'](target, user=user, identity=identity, opts=opts, repository=name)
|
||||
try:
|
||||
pull_out = __salt__['hg.pull'](target, user=user, identity=identity, opts=opts, repository=name)
|
||||
except CommandExecutionError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = err
|
||||
return ret
|
||||
|
||||
if rev:
|
||||
__salt__['hg.update'](target, rev, force=clean, user=user)
|
||||
try:
|
||||
__salt__['hg.update'](target, rev, force=clean, user=user)
|
||||
except CommandExecutionError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = err
|
||||
return ret
|
||||
else:
|
||||
__salt__['hg.update'](target, 'tip', force=clean, user=user)
|
||||
try:
|
||||
__salt__['hg.update'](target, 'tip', force=clean, user=user)
|
||||
except CommandExecutionError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = err
|
||||
return ret
|
||||
|
||||
new_rev = __salt__['hg.revision'](cwd=target, user=user, rev='.')
|
||||
|
||||
|
@ -172,13 +188,23 @@ def _handle_existing(ret, target, force):
|
|||
|
||||
|
||||
def _clone_repo(ret, target, name, user, identity, rev, opts):
|
||||
result = __salt__['hg.clone'](target, name, user=user, identity=identity, opts=opts)
|
||||
try:
|
||||
result = __salt__['hg.clone'](target, name, user=user, identity=identity, opts=opts)
|
||||
except CommandExecutionError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = err
|
||||
return ret
|
||||
|
||||
if not os.path.isdir(target):
|
||||
return _fail(ret, result)
|
||||
|
||||
if rev:
|
||||
__salt__['hg.update'](target, rev, user=user)
|
||||
try:
|
||||
__salt__['hg.update'](target, rev, user=user)
|
||||
except CommandExecutionError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = err
|
||||
return ret
|
||||
|
||||
new_rev = __salt__['hg.revision'](cwd=target, user=user)
|
||||
message = 'Repository {0} cloned to {1}'.format(name, target)
|
||||
|
|
|
@ -110,6 +110,9 @@ def _enable(name, started, result=True, **kwargs):
|
|||
ret['comment'] = exc.strerror
|
||||
return ret
|
||||
|
||||
# Set default expected result
|
||||
ret['result'] = result
|
||||
|
||||
# Check to see if this minion supports enable
|
||||
if 'service.enable' not in __salt__ or 'service.enabled' not in __salt__:
|
||||
if started is True:
|
||||
|
@ -148,10 +151,9 @@ def _enable(name, started, result=True, **kwargs):
|
|||
return ret
|
||||
|
||||
if __salt__['service.enable'](name, **kwargs):
|
||||
after_toggle_enable_status = __salt__['service.enabled'](name,
|
||||
**kwargs)
|
||||
# Service has been enabled
|
||||
ret['changes'] = {}
|
||||
after_toggle_enable_status = __salt__['service.enabled'](name, **kwargs)
|
||||
# on upstart, certain services like apparmor will always return
|
||||
# False, even if correctly activated
|
||||
# do not trigger a change
|
||||
|
@ -169,16 +171,14 @@ def _enable(name, started, result=True, **kwargs):
|
|||
return ret
|
||||
|
||||
# Service failed to be enabled
|
||||
ret['result'] = False
|
||||
if started is True:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('Failed when setting service {0} to start at boot,'
|
||||
' but the service is running').format(name)
|
||||
elif started is None:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('Failed when setting service {0} to start at boot,'
|
||||
' but the service was already running').format(name)
|
||||
else:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('Failed when setting service {0} to start at boot,'
|
||||
' and the service is dead').format(name)
|
||||
return ret
|
||||
|
@ -200,6 +200,9 @@ def _disable(name, started, result=True, **kwargs):
|
|||
ret['comment'] = exc.strerror
|
||||
return ret
|
||||
|
||||
# Set default expected result
|
||||
ret['result'] = result
|
||||
|
||||
# is enable/disable available?
|
||||
if 'service.disable' not in __salt__ or 'service.disabled' not in __salt__:
|
||||
if started is True:
|
||||
|
@ -214,8 +217,8 @@ def _disable(name, started, result=True, **kwargs):
|
|||
' service {0} is dead').format(name)
|
||||
return ret
|
||||
|
||||
before_toggle_disable_status = __salt__['service.disabled'](name)
|
||||
# Service can be disabled
|
||||
before_toggle_disable_status = __salt__['service.disabled'](name)
|
||||
if before_toggle_disable_status:
|
||||
# Service is disabled
|
||||
if started is True:
|
||||
|
@ -241,8 +244,6 @@ def _disable(name, started, result=True, **kwargs):
|
|||
# Service has been disabled
|
||||
ret['changes'] = {}
|
||||
after_toggle_disable_status = __salt__['service.disabled'](name)
|
||||
# Service has been disabled
|
||||
ret['changes'] = {}
|
||||
# on upstart, certain services like apparmor will always return
|
||||
# False, even if correctly activated
|
||||
# do not trigger a change
|
||||
|
@ -268,7 +269,6 @@ def _disable(name, started, result=True, **kwargs):
|
|||
ret['comment'] = ('Failed when setting service {0} to not start'
|
||||
' at boot, but the service was already running'
|
||||
).format(name)
|
||||
return ret
|
||||
else:
|
||||
ret['comment'] = ('Failed when setting service {0} to not start'
|
||||
' at boot, and the service is dead').format(name)
|
||||
|
@ -373,12 +373,10 @@ def running(name, enable=None, sig=None, init_delay=None, **kwargs):
|
|||
ret.update(_enable(name, False, result=False, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, False, result=False, **kwargs))
|
||||
else:
|
||||
ret['comment'] = 'Started Service {0}'.format(name)
|
||||
if enable is True:
|
||||
ret.update(_enable(name, True, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, True, **kwargs))
|
||||
return ret
|
||||
|
||||
if init_delay:
|
||||
time.sleep(init_delay)
|
||||
|
||||
# only force a change state if we have explicitly detected them
|
||||
after_toggle_status = __salt__['service.status'](name)
|
||||
|
@ -387,17 +385,27 @@ def running(name, enable=None, sig=None, init_delay=None, **kwargs):
|
|||
else:
|
||||
after_toggle_enable_status = True
|
||||
if (
|
||||
(before_toggle_enable_status != after_toggle_enable_status) or
|
||||
(before_toggle_status != after_toggle_status)
|
||||
(before_toggle_enable_status != after_toggle_enable_status) or
|
||||
(before_toggle_status != after_toggle_status)
|
||||
) and not ret.get('changes', {}):
|
||||
ret['changes'][name] = func_ret
|
||||
ret['changes'][name] = after_toggle_status
|
||||
|
||||
if after_toggle_status:
|
||||
ret['comment'] = 'Started Service {0}'.format(name)
|
||||
else:
|
||||
ret['comment'] = 'Service {0} failed to start'.format(name)
|
||||
|
||||
if enable is True:
|
||||
ret.update(_enable(name, after_toggle_status, result=after_toggle_status, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, after_toggle_status, result=after_toggle_status, **kwargs))
|
||||
|
||||
if init_delay:
|
||||
time.sleep(init_delay)
|
||||
ret['comment'] = (
|
||||
'{0}\nDelayed return for {1} seconds'
|
||||
.format(ret['comment'], init_delay)
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -428,7 +436,6 @@ def dead(name, enable=None, sig=None, **kwargs):
|
|||
# Check if the service is available
|
||||
try:
|
||||
if not _available(name, ret):
|
||||
ret['result'] = True
|
||||
return ret
|
||||
except CommandExecutionError as exc:
|
||||
ret['result'] = False
|
||||
|
@ -442,6 +449,8 @@ def dead(name, enable=None, sig=None, **kwargs):
|
|||
before_toggle_enable_status = __salt__['service.enabled'](name)
|
||||
else:
|
||||
before_toggle_enable_status = True
|
||||
|
||||
# See if the service is already dead
|
||||
if not before_toggle_status:
|
||||
ret['comment'] = 'The service {0} is already dead'.format(name)
|
||||
if enable is True and not before_toggle_enable_status:
|
||||
|
@ -450,12 +459,12 @@ def dead(name, enable=None, sig=None, **kwargs):
|
|||
ret.update(_disable(name, None, **kwargs))
|
||||
return ret
|
||||
|
||||
# Run the tests
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = 'Service {0} is set to be killed'.format(name)
|
||||
return ret
|
||||
|
||||
# be sure to stop, in case we mis detected in the check
|
||||
func_ret = __salt__['service.stop'](name)
|
||||
if not func_ret:
|
||||
ret['result'] = False
|
||||
|
@ -464,12 +473,8 @@ def dead(name, enable=None, sig=None, **kwargs):
|
|||
ret.update(_enable(name, True, result=False, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, True, result=False, **kwargs))
|
||||
else:
|
||||
ret['comment'] = 'Service {0} was killed'.format(name)
|
||||
if enable is True:
|
||||
ret.update(_enable(name, False, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, False, **kwargs))
|
||||
return ret
|
||||
|
||||
# only force a change state if we have explicitly detected them
|
||||
after_toggle_status = __salt__['service.status'](name)
|
||||
if 'service.enabled' in __salt__:
|
||||
|
@ -477,10 +482,23 @@ def dead(name, enable=None, sig=None, **kwargs):
|
|||
else:
|
||||
after_toggle_enable_status = True
|
||||
if (
|
||||
(before_toggle_enable_status != after_toggle_enable_status) or
|
||||
(before_toggle_status != after_toggle_status)
|
||||
(before_toggle_enable_status != after_toggle_enable_status) or
|
||||
(before_toggle_status != after_toggle_status)
|
||||
) and not ret.get('changes', {}):
|
||||
ret['changes'][name] = func_ret
|
||||
ret['changes'][name] = after_toggle_status
|
||||
|
||||
# be sure to stop, in case we mis detected in the check
|
||||
if after_toggle_status:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Service {0} failed to die'.format(name)
|
||||
else:
|
||||
ret['comment'] = 'Service {0} was killed'.format(name)
|
||||
|
||||
if enable is True:
|
||||
ret.update(_enable(name, after_toggle_status, result=not after_toggle_status, **kwargs))
|
||||
elif enable is False:
|
||||
ret.update(_disable(name, after_toggle_status, result=not after_toggle_status, **kwargs))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -660,6 +660,17 @@ class GitProvider(object):
|
|||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_checkout_target(self):
|
||||
'''
|
||||
Resolve dynamically-set branch
|
||||
'''
|
||||
if self.branch == '__env__':
|
||||
target = self.opts.get('environment') or 'base'
|
||||
return self.opts['{0}_base'.format(self.role)] \
|
||||
if target == 'base' \
|
||||
else target
|
||||
return self.branch
|
||||
|
||||
def get_tree(self, tgt_env):
|
||||
'''
|
||||
This function must be overridden in a sub-class
|
||||
|
@ -713,6 +724,7 @@ class GitPython(GitProvider):
|
|||
GitPython when running these functions vary in different versions of
|
||||
GitPython.
|
||||
'''
|
||||
tgt_ref = self.get_checkout_target()
|
||||
try:
|
||||
head_sha = self.repo.rev_parse('HEAD').hexsha
|
||||
except Exception:
|
||||
|
@ -720,11 +732,11 @@ class GitPython(GitProvider):
|
|||
# we fetch first before ever checking anything out.
|
||||
head_sha = None
|
||||
|
||||
# 'origin/' + self.branch ==> matches a branch head
|
||||
# 'tags/' + self.branch + '@{commit}' ==> matches tag's commit
|
||||
# 'origin/' + tgt_ref ==> matches a branch head
|
||||
# 'tags/' + tgt_ref + '@{commit}' ==> matches tag's commit
|
||||
for rev_parse_target, checkout_ref in (
|
||||
('origin/' + self.branch, 'origin/' + self.branch),
|
||||
('tags/' + self.branch + '@{commit}', 'tags/' + self.branch)):
|
||||
('origin/' + tgt_ref, 'origin/' + tgt_ref),
|
||||
('tags/' + tgt_ref, 'tags/' + tgt_ref)):
|
||||
try:
|
||||
target_sha = self.repo.rev_parse(rev_parse_target).hexsha
|
||||
except Exception:
|
||||
|
@ -768,7 +780,7 @@ class GitPython(GitProvider):
|
|||
return self.check_root()
|
||||
log.error(
|
||||
'Failed to checkout %s from %s remote \'%s\': remote ref does '
|
||||
'not exist', self.branch, self.role, self.id
|
||||
'not exist', tgt_ref, self.role, self.id
|
||||
)
|
||||
return None
|
||||
|
||||
|
@ -1024,9 +1036,10 @@ class Pygit2(GitProvider):
|
|||
'''
|
||||
Checkout the configured branch/tag
|
||||
'''
|
||||
local_ref = 'refs/heads/' + self.branch
|
||||
remote_ref = 'refs/remotes/origin/' + self.branch
|
||||
tag_ref = 'refs/tags/' + self.branch
|
||||
tgt_ref = self.get_checkout_target()
|
||||
local_ref = 'refs/heads/' + tgt_ref
|
||||
remote_ref = 'refs/remotes/origin/' + tgt_ref
|
||||
tag_ref = 'refs/tags/' + tgt_ref
|
||||
|
||||
try:
|
||||
local_head = self.repo.lookup_reference('HEAD')
|
||||
|
@ -1193,7 +1206,7 @@ class Pygit2(GitProvider):
|
|||
except Exception as exc:
|
||||
log.error(
|
||||
'Failed to checkout {0} from {1} remote \'{2}\': {3}'.format(
|
||||
self.branch,
|
||||
tgt_ref,
|
||||
self.role,
|
||||
self.id,
|
||||
exc
|
||||
|
@ -1203,7 +1216,7 @@ class Pygit2(GitProvider):
|
|||
return None
|
||||
log.error(
|
||||
'Failed to checkout {0} from {1} remote \'{2}\': remote ref '
|
||||
'does not exist'.format(self.branch, self.role, self.id)
|
||||
'does not exist'.format(tgt_ref, self.role, self.id)
|
||||
)
|
||||
return None
|
||||
|
||||
|
|
|
@ -378,8 +378,8 @@ class SerializerExtension(Extension, object):
|
|||
def format_yaml(self, value, flow_style=True):
|
||||
yaml_txt = yaml.dump(value, default_flow_style=flow_style,
|
||||
Dumper=OrderedDictDumper).strip()
|
||||
if yaml_txt.endswith('\n...\n'):
|
||||
yaml_txt = yaml_txt[:len(yaml_txt-5)]
|
||||
if yaml_txt.endswith('\n...'):
|
||||
yaml_txt = yaml_txt[:len(yaml_txt)-4]
|
||||
return Markup(yaml_txt)
|
||||
|
||||
def format_yaml_safe(self, value, flow_style=True):
|
||||
|
|
|
@ -2812,6 +2812,12 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
|
|||
default=None,
|
||||
help='Pass in extra files to include in the state tarball.'
|
||||
)
|
||||
self.add_option(
|
||||
'--thin-extra-modules',
|
||||
dest='thin_extra_mods',
|
||||
default=None,
|
||||
help='One or comma-separated list of extra Python modules'
|
||||
'to be included into Thin Salt.')
|
||||
self.add_option(
|
||||
'-v', '--verbose',
|
||||
default=False,
|
||||
|
|
268
tests/integration/modules/archive.py
Normal file
268
tests/integration/modules/archive.py
Normal file
|
@ -0,0 +1,268 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Tests for the archive state
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing libs
|
||||
from salttesting import skipIf
|
||||
from salttesting.helpers import (
|
||||
destructiveTest,
|
||||
ensure_in_syspath
|
||||
)
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
# Import 3rd party libs
|
||||
try:
|
||||
import zipfile # pylint: disable=W0611
|
||||
HAS_ZIPFILE = True
|
||||
except ImportError:
|
||||
HAS_ZIPFILE = False
|
||||
|
||||
|
||||
@destructiveTest
|
||||
class ArchiveTest(integration.ModuleCase):
|
||||
'''
|
||||
Validate the archive module
|
||||
'''
|
||||
# Base path used for test artifacts
|
||||
base_path = os.path.join(integration.TMP, 'modules', 'archive')
|
||||
|
||||
def _set_artifact_paths(self, arch_fmt):
|
||||
'''
|
||||
Define the paths for the source, archive, and destination files
|
||||
|
||||
:param str arch_fmt: The archive format used in the test
|
||||
'''
|
||||
self.src = os.path.join(self.base_path, '{0}_src_dir'.format(arch_fmt))
|
||||
self.src_file = os.path.join(self.src, 'file')
|
||||
self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt))
|
||||
self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt))
|
||||
|
||||
def _set_up(self, arch_fmt):
|
||||
'''
|
||||
Create source file tree and destination directory
|
||||
|
||||
:param str arch_fmt: The archive format used in the test
|
||||
'''
|
||||
# Setup artifact paths
|
||||
self._set_artifact_paths(arch_fmt)
|
||||
|
||||
# Remove the artifacts if any present
|
||||
if any([os.path.exists(f) for f in (self.src, self.arch, self.dst)]):
|
||||
self._tear_down()
|
||||
|
||||
# Create source
|
||||
os.makedirs(self.src)
|
||||
with salt.utils.fopen(os.path.join(self.src, 'file'), 'w') as theorem:
|
||||
theorem.write(textwrap.dedent(r'''\
|
||||
Compression theorem of computational complexity theory:
|
||||
|
||||
Given a Gödel numbering $φ$ of the computable functions and a
|
||||
Blum complexity measure $Φ$ where a complexity class for a
|
||||
boundary function $f$ is defined as
|
||||
|
||||
$\mathrm C(f) := \{φ_i ∈ \mathbb R^{(1)} | (∀^∞ x) Φ_i(x) ≤ f(x)\}$.
|
||||
|
||||
Then there exists a total computable function $f$ so that for
|
||||
all $i$
|
||||
|
||||
$\mathrm{Dom}(φ_i) = \mathrm{Dom}(φ_{f(i)})$
|
||||
|
||||
and
|
||||
|
||||
$\mathrm C(φ_i) ⊊ \mathrm{C}(φ_{f(i)})$.
|
||||
'''))
|
||||
|
||||
# Create destination
|
||||
os.makedirs(self.dst)
|
||||
|
||||
def _tear_down(self):
|
||||
'''
|
||||
Remove source file tree, archive, and destination file tree
|
||||
'''
|
||||
for f in (self.src, self.arch, self.dst):
|
||||
if os.path.exists(f):
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f, ignore_errors=True)
|
||||
else:
|
||||
os.remove(f)
|
||||
|
||||
def _assert_artifacts_in_ret(self, ret, file_only=False):
|
||||
'''
|
||||
Assert that the artifact source files are printed in the source command
|
||||
output
|
||||
'''
|
||||
# Try to find source directory and file in output lines
|
||||
dir_in_ret = None
|
||||
file_in_ret = None
|
||||
for line in ret:
|
||||
if self.src.lstrip('/') in line \
|
||||
and not self.src_file.lstrip('/') in line:
|
||||
dir_in_ret = True
|
||||
if self.src_file.lstrip('/') in line:
|
||||
file_in_ret = True
|
||||
|
||||
# Assert number of lines, reporting of source directory and file
|
||||
self.assertTrue(len(ret) >= 1 if file_only else 2)
|
||||
if not file_only:
|
||||
self.assertTrue(dir_in_ret)
|
||||
self.assertTrue(file_in_ret)
|
||||
|
||||
@skipIf(not salt.utils.which('tar'), 'Cannot find tar executable')
|
||||
def test_tar_pack(self):
|
||||
'''
|
||||
Validate using the tar function to create archives
|
||||
'''
|
||||
self._set_up(arch_fmt='tar')
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('tar'), 'Cannot find tar executable')
|
||||
def test_tar_unpack(self):
|
||||
'''
|
||||
Validate using the tar function to extract archives
|
||||
'''
|
||||
self._set_up(arch_fmt='tar')
|
||||
self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
|
||||
|
||||
# Test extract archive
|
||||
ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst)
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('gzip'), 'Cannot find gzip executable')
|
||||
def test_gzip(self):
|
||||
'''
|
||||
Validate using the gzip function
|
||||
'''
|
||||
self._set_up(arch_fmt='gz')
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.gzip', [self.src_file], options='-v')
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret, file_only=True)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('gunzip'), 'Cannot find gunzip executable')
|
||||
def test_gunzip(self):
|
||||
'''
|
||||
Validate using the gunzip function
|
||||
'''
|
||||
self._set_up(arch_fmt='gz')
|
||||
self.run_function('archive.gzip', [self.src_file], options='-v')
|
||||
|
||||
# Test extract archive
|
||||
ret = self.run_function('archive.gunzip', [self.src_file + '.gz'], options='-v')
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret, file_only=True)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('zip'), 'Cannot find zip executable')
|
||||
def test_cmd_zip(self):
|
||||
'''
|
||||
Validate using the cmd_zip function
|
||||
'''
|
||||
self._set_up(arch_fmt='zip')
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.cmd_zip', [self.arch, self.src])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('unzip'), 'Cannot find unzip executable')
|
||||
def test_cmd_unzip(self):
|
||||
'''
|
||||
Validate using the cmd_unzip function
|
||||
'''
|
||||
self._set_up(arch_fmt='zip')
|
||||
self.run_function('archive.cmd_zip', [self.arch, self.src])
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.cmd_unzip', [self.arch, self.dst])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not HAS_ZIPFILE, 'Cannot find zip python module')
|
||||
def test_zip(self):
|
||||
'''
|
||||
Validate using the zip function
|
||||
'''
|
||||
self._set_up(arch_fmt='zip')
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.zip', [self.arch, self.src])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not HAS_ZIPFILE, 'Cannot find zip python module')
|
||||
def test_unzip(self):
|
||||
'''
|
||||
Validate using the unzip function
|
||||
'''
|
||||
self._set_up(arch_fmt='zip')
|
||||
self.run_function('archive.zip', [self.arch, self.src])
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.unzip', [self.arch, self.dst])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which('rar'), 'Cannot find rar executable')
|
||||
def test_rar(self):
|
||||
'''
|
||||
Validate using the rar function
|
||||
'''
|
||||
self._set_up(arch_fmt='rar')
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.rar', [self.arch, self.src])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.which_bin(('rar', 'unrar')), 'Cannot find rar or unrar executable')
|
||||
def test_unrar(self):
|
||||
'''
|
||||
Validate using the unrar function
|
||||
'''
|
||||
self._set_up(arch_fmt='rar')
|
||||
self.run_function('archive.rar', [self.arch, self.src])
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.unrar', [self.arch, self.dst])
|
||||
self.assertTrue(isinstance(ret, list))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(ArchiveTest)
|
|
@ -38,6 +38,14 @@ class PkgModuleTest(integration.ModuleCase,
|
|||
eq = ['0.2.4-0ubuntu1', '0.2.4-0ubuntu1']
|
||||
gt = ['0.2.4.1-0ubuntu1', '0.2.4-0ubuntu1']
|
||||
|
||||
self.assertEqual(self.run_function(func, lt), -1)
|
||||
self.assertEqual(self.run_function(func, eq), 0)
|
||||
self.assertEqual(self.run_function(func, gt), 1)
|
||||
elif os_family == 'Suse':
|
||||
lt = ['2.3.0-1', '2.3.1-15.1']
|
||||
eq = ['2.3.1-15.1', '2.3.1-15.1']
|
||||
gt = ['2.3.2-15.1', '2.3.1-15.1']
|
||||
|
||||
self.assertEqual(self.run_function(func, lt), -1)
|
||||
self.assertEqual(self.run_function(func, eq), 0)
|
||||
self.assertEqual(self.run_function(func, gt), 1)
|
||||
|
@ -55,11 +63,11 @@ class PkgModuleTest(integration.ModuleCase,
|
|||
try:
|
||||
repo = None
|
||||
if os_grain == 'Ubuntu':
|
||||
repo = 'ppa:otto-kesselgulasch/gimp-edge'
|
||||
uri = 'http://ppa.launchpad.net/otto-kesselgulasch/gimp-edge/ubuntu'
|
||||
repo = 'ppa:silvenga/3proxy'
|
||||
uri = 'http://ppa.launchpad.net/silvenga/3proxy/ubuntu'
|
||||
ret = self.run_function('pkg.mod_repo', [repo, 'comps=main'])
|
||||
self.assertNotEqual(ret, {})
|
||||
self.assertIn(repo, ret)
|
||||
self.assertIn('deb '+uri, ret.keys()[0])
|
||||
ret = self.run_function('pkg.get_repo', [repo])
|
||||
self.assertEqual(ret['uri'], uri)
|
||||
elif os_grain == 'CentOS':
|
||||
|
@ -191,6 +199,11 @@ class PkgModuleTest(integration.ModuleCase,
|
|||
if os_family == 'RedHat':
|
||||
ret = self.run_function(func)
|
||||
self.assertIn(ret, (True, None))
|
||||
elif os_family == 'Suse':
|
||||
ret = self.run_function(func)
|
||||
if not isinstance(ret, dict):
|
||||
self.skipTest('Upstream repo did not return coherent results. Skipping test.')
|
||||
self.assertNotEqual(ret, {})
|
||||
elif os_family == 'Debian':
|
||||
ret = self.run_function(func)
|
||||
if not isinstance(ret, dict):
|
||||
|
@ -228,6 +241,50 @@ class PkgModuleTest(integration.ModuleCase,
|
|||
self.assertIn('less', keys)
|
||||
self.assertIn('zypper', keys)
|
||||
|
||||
@requires_network()
|
||||
@destructiveTest
|
||||
def test_pkg_upgrade_has_pending_upgrades(self):
|
||||
'''
|
||||
Test running a system upgrade when there are packages that need upgrading
|
||||
'''
|
||||
func = 'pkg.upgrade'
|
||||
os_family = self.run_function('grains.item', ['os_family'])['os_family']
|
||||
|
||||
# First make sure that an up-to-date copy of the package db is available
|
||||
self.run_function('pkg.refresh_db')
|
||||
|
||||
if os_family == 'Suse':
|
||||
# pkg.latest version returns empty if the latest version is already installed
|
||||
vim_version_dict = self.run_function('pkg.latest_version', ['vim'])
|
||||
if vim_version_dict == {}:
|
||||
# Latest version is installed, get its version and construct
|
||||
# a version selector so the immediately previous version is selected
|
||||
vim_version_dict = self.run_function('pkg.info_available', ['vim'])
|
||||
vim_version = 'version=<'+vim_version_dict['vim']['version']
|
||||
else:
|
||||
# Vim was not installed, so pkg.latest_version returns the latest one.
|
||||
# Construct a version selector so immediately previous version is selected
|
||||
vim_version = 'version=<'+vim_version_dict
|
||||
|
||||
# Install a version of vim that should need upgrading
|
||||
ret = self.run_function('pkg.install', ['vim', vim_version])
|
||||
|
||||
# Run a system upgrade, which should catch the fact that Vim needs upgrading, and upgrade it.
|
||||
ret = self.run_function(func)
|
||||
|
||||
# The changes dictionary should not be empty.
|
||||
self.assertIn('changes', ret)
|
||||
self.assertIn('vim', ret['changes'])
|
||||
else:
|
||||
ret = self.run_function('pkg.list_updates')
|
||||
if ret == '':
|
||||
self.skipTest('No updates available for this machine. Skipping pkg.upgrade test.')
|
||||
else:
|
||||
ret = self.run_function(func)
|
||||
|
||||
# The changes dictionary should not be empty.
|
||||
self.assertNotEqual(ret, {})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
|
|
@ -59,24 +59,27 @@ class HgTestCase(TestCase):
|
|||
'''
|
||||
Test for Perform a pull on the given repository
|
||||
'''
|
||||
with patch.dict(hg.__salt__, {'cmd.run':
|
||||
MagicMock(return_value='A')}):
|
||||
with patch.dict(hg.__salt__, {'cmd.run_all':
|
||||
MagicMock(return_value={'retcode': 0,
|
||||
'stdout': 'A'})}):
|
||||
self.assertEqual(hg.pull('cwd'), 'A')
|
||||
|
||||
def test_update(self):
|
||||
'''
|
||||
Test for Update to a given revision
|
||||
'''
|
||||
with patch.dict(hg.__salt__, {'cmd.run':
|
||||
MagicMock(return_value='A')}):
|
||||
with patch.dict(hg.__salt__, {'cmd.run_all':
|
||||
MagicMock(return_value={'retcode': 0,
|
||||
'stdout': 'A'})}):
|
||||
self.assertEqual(hg.update('cwd', 'rev'), 'A')
|
||||
|
||||
def test_clone(self):
|
||||
'''
|
||||
Test for Clone a new repository
|
||||
'''
|
||||
with patch.dict(hg.__salt__, {'cmd.run':
|
||||
MagicMock(return_value='A')}):
|
||||
with patch.dict(hg.__salt__, {'cmd.run_all':
|
||||
MagicMock(return_value={'retcode': 0,
|
||||
'stdout': 'A'})}):
|
||||
self.assertEqual(hg.clone('cwd', 'repository'), 'A')
|
||||
|
||||
def test_status_single(self):
|
||||
|
|
|
@ -30,7 +30,7 @@ class XAttrTestCase(TestCase):
|
|||
'squidward': 'patrick'}
|
||||
with patch.object(xattr, 'read', MagicMock(side_effect=['squarepants',
|
||||
'patrick'])):
|
||||
self.assertEqual(xattr.list('path/to/file'), expected)
|
||||
self.assertEqual(xattr.list_('path/to/file'), expected)
|
||||
|
||||
@patch('salt.utils.mac_utils.execute_return_result',
|
||||
MagicMock(side_effect=CommandExecutionError('No such file')))
|
||||
|
@ -38,7 +38,7 @@ class XAttrTestCase(TestCase):
|
|||
'''
|
||||
Test listing attributes of a missing file
|
||||
'''
|
||||
self.assertRaises(CommandExecutionError, xattr.list, '/path/to/file')
|
||||
self.assertRaises(CommandExecutionError, xattr.list_, '/path/to/file')
|
||||
|
||||
@patch('salt.utils.mac_utils.execute_return_result',
|
||||
MagicMock(return_value='expected results'))
|
||||
|
@ -55,9 +55,12 @@ class XAttrTestCase(TestCase):
|
|||
'''
|
||||
with patch.object(salt.utils.mac_utils, 'execute_return_result',
|
||||
MagicMock(return_value='expected results')) as mock:
|
||||
self.assertEqual(xattr.read('/path/to/file', 'com.attr', True),
|
||||
'expected results')
|
||||
mock.assert_called_once_with('xattr -p -x "com.attr" "/path/to/file"')
|
||||
self.assertEqual(
|
||||
xattr.read('/path/to/file', 'com.attr', **{'hex': True}),
|
||||
'expected results'
|
||||
)
|
||||
mock.assert_called_once_with(
|
||||
['xattr', '-p', '-x', 'com.attr', '/path/to/file'])
|
||||
|
||||
@patch('salt.utils.mac_utils.execute_return_result',
|
||||
MagicMock(side_effect=CommandExecutionError('No such file')))
|
||||
|
@ -101,7 +104,7 @@ class XAttrTestCase(TestCase):
|
|||
Test deleting a specific attribute from a file
|
||||
'''
|
||||
mock_cmd = MagicMock(return_value={'spongebob': 'squarepants'})
|
||||
with patch.object(xattr, 'list', mock_cmd):
|
||||
with patch.object(xattr, 'list_', mock_cmd):
|
||||
self.assertTrue(xattr.delete('/path/to/file', 'attribute'))
|
||||
|
||||
@patch('salt.utils.mac_utils.execute_return_success',
|
||||
|
@ -122,7 +125,7 @@ class XAttrTestCase(TestCase):
|
|||
Test clearing all attributes on a file
|
||||
'''
|
||||
mock_cmd = MagicMock(return_value={})
|
||||
with patch.object(xattr, 'list', mock_cmd):
|
||||
with patch.object(xattr, 'list_', mock_cmd):
|
||||
self.assertTrue(xattr.clear('/path/to/file'))
|
||||
|
||||
@patch('salt.utils.mac_utils.execute_return_success',
|
||||
|
|
|
@ -57,7 +57,10 @@ class ServiceTestCase(TestCase):
|
|||
'result': True},
|
||||
{'changes': {},
|
||||
'comment': 'The service salt is already running',
|
||||
'name': 'salt', 'result': True}]
|
||||
'name': 'salt', 'result': True},
|
||||
{'changes': 'saltstack',
|
||||
'comment': 'Service salt failed to start', 'name': 'salt',
|
||||
'result': True}]
|
||||
|
||||
tmock = MagicMock(return_value=True)
|
||||
fmock = MagicMock(return_value=False)
|
||||
|
@ -98,6 +101,22 @@ class ServiceTestCase(TestCase):
|
|||
with patch.dict(service.__salt__, {'service.status': fmock}):
|
||||
self.assertDictEqual(service.running("salt"), ret[3])
|
||||
|
||||
with contextlib.nested(
|
||||
patch.dict(service.__opts__, {'test': False}),
|
||||
patch.dict(
|
||||
service.__salt__, {
|
||||
'service.status':
|
||||
MagicMock(side_effect=[False, False]),
|
||||
'service.enabled':
|
||||
MagicMock(side_effect=[True, True]),
|
||||
'service.start':
|
||||
MagicMock(return_value="stack")}),
|
||||
patch.object(
|
||||
service, '_enable',
|
||||
MagicMock(return_value={'changes': 'saltstack'}))
|
||||
):
|
||||
self.assertDictEqual(service.running("salt", True), ret[6])
|
||||
|
||||
def test_dead(self):
|
||||
'''
|
||||
Test to ensure that the named service is dead
|
||||
|
@ -113,8 +132,8 @@ class ServiceTestCase(TestCase):
|
|||
'comment': 'Service salt was killed', 'name': 'salt',
|
||||
'result': True},
|
||||
{'changes': {},
|
||||
'comment': 'Service salt was killed', 'name': 'salt',
|
||||
'result': True},
|
||||
'comment': 'Service salt failed to die', 'name': 'salt',
|
||||
'result': False},
|
||||
{'changes': 'saltstack',
|
||||
'comment': 'The service salt is already dead', 'name': 'salt',
|
||||
'result': True}]
|
||||
|
@ -156,7 +175,7 @@ class ServiceTestCase(TestCase):
|
|||
|
||||
# test an initd which a wrong status (True even if dead)
|
||||
with patch.dict(service.__salt__, {'service.enabled': MagicMock(side_effect=[False, False, False]),
|
||||
'service.status': MagicMock(side_effect=[True, True, True]),
|
||||
'service.status': MagicMock(side_effect=[True, False, False]),
|
||||
'service.stop': MagicMock(return_value="stack")}):
|
||||
with patch.object(service, '_disable', MagicMock(return_value={})):
|
||||
self.assertDictEqual(service.dead("salt", False), ret[4])
|
||||
|
|
|
@ -482,6 +482,12 @@ class TestCustomExtensions(TestCase):
|
|||
rendered = env.from_string('{{ dataset|yaml }}').render(dataset=dataset)
|
||||
self.assertEqual(dataset, yaml.load(rendered))
|
||||
|
||||
def test_serialize_yaml_str(self):
|
||||
dataset = "str value"
|
||||
env = Environment(extensions=[SerializerExtension])
|
||||
rendered = env.from_string('{{ dataset|yaml }}').render(dataset=dataset)
|
||||
self.assertEqual(dataset, rendered)
|
||||
|
||||
def test_serialize_python(self):
|
||||
dataset = {
|
||||
"foo": True,
|
||||
|
|
Loading…
Add table
Reference in a new issue