Merge branch '2015.5' into '2015.8'

Conflicts:
  - salt/cloud/clouds/vmware.py
  - salt/modules/file.py
  - salt/states/archive.py
This commit is contained in:
rallytime 2016-04-25 15:45:17 -06:00
commit 9064d3bbfb
14 changed files with 128 additions and 46 deletions

3
.gitignore vendored
View file

@ -62,6 +62,9 @@ tags
# Allow a user to set their own _version.py for testing
_version.py
# Ignore auto generated _syspaths.py file
_syspaths.py
# Ignore grains file written out during tests
tests/integration/files/conf/grains
/salt/_syspaths.py

View file

@ -89,7 +89,7 @@ Execution Options
.. option:: -u, --update-bootstrap
Update salt-bootstrap to the latest develop version on GitHub.
Update salt-bootstrap to the latest stable bootstrap release.
.. option:: -y, --assume-yes

View file

@ -274,9 +274,9 @@ with labels.
``Awesome``
The pull request implements an especially well crafted solution, or a very difficult but necessary change.
``Low Hanging Fruit``
The issue is trivial or almost trivial to implement or fix. Issues having this label should be a good starting
place for new contributors to Salt.
``Help Wanted``
The issue appears to have a simple solution. Issues having this label
should be a good starting place for new contributors to Salt.
``Needs Testcase``
The issue or pull request relates to a feature that needs test coverage. The pull request containing the tests

View file

@ -129,12 +129,24 @@ modules.
The Salt module functions are also made available in the template context as
``salt:``
The following example illustrates calling the ``group_to_gid`` function in the
``file`` execution module with a single positional argument called
``some_group_that_exists``.
.. code-block:: jinja
moe:
user.present:
- gid: {{ salt['file.group_to_gid']('some_group_that_exists') }}
One way to think about this might be that the ``gid`` key is being assigned
a value equivelent to the following python pseudo-code:
.. code-block:: python
import salt.modules.file
file.group_to_gid('some_group_that_exists')
Note that for the above example to work, ``some_group_that_exists`` must exist
before the state file is processed by the templating engine.
@ -145,6 +157,9 @@ MAC address for eth0:
salt['network.hw_addr']('eth0')
To examine the possible arguments to each execution module function,
one can examine the `module reference documentation </ref/modules/all>`:
Advanced SLS module syntax
==========================

View file

@ -177,7 +177,8 @@ class Batch(object):
else:
parts.update(part)
for id in part.keys():
minion_tracker[queue]['minions'].remove(id)
if id in minion_tracker[queue]['minions']:
minion_tracker[queue]['minions'].remove(id)
except StopIteration:
# if a iterator is done:
# - set it to inactive

View file

@ -17,6 +17,7 @@ import salt.utils.url
import salt.crypt
import salt.transport
from salt.exceptions import CommandExecutionError
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=import-error,no-name-in-module
# Import 3rd-party libs
import salt.ext.six as six
@ -358,6 +359,25 @@ def cache_file(path, saltenv='base', env=None):
# Backwards compatibility
saltenv = env
contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)
path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
try:
if path_is_remote and contextkey in __context__:
# Prevent multiple caches in the same salt run. Affects remote URLs
# since the master won't know their hash, so the fileclient
# wouldn't be able to prevent multiple caches if we try to cache
# the remote URL more than once.
if os.path.isfile(__context__[contextkey]):
return __context__[contextkey]
else:
# File is in __context__ but no longer exists in the minion
# cache, get rid of the context key and re-cache below.
# Accounts for corner case where file is removed from minion
# cache between cp.cache_file calls in the same salt-run.
__context__.pop(contextkey)
except AttributeError:
pass
_mk_client()
path, senv = salt.utils.url.split_env(path)
@ -371,6 +391,10 @@ def cache_file(path, saltenv='base', env=None):
path, saltenv
)
)
if path_is_remote:
# Cache was successful, store the result in __context__ to prevent
# multiple caches (see above).
__context__[contextkey] = result
return result

View file

@ -3254,6 +3254,10 @@ def source_list(source, source_hash, saltenv):
salt '*' file.source_list salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' base
'''
contextkey = '{0}_|-{1}_|-{2}'.format(source, source_hash, saltenv)
if contextkey in __context__:
return __context__[contextkey]
# get the master file list
if isinstance(source, list):
mfiles = [(f, saltenv) for f in __salt__['cp.list_master'](saltenv)]
@ -3287,10 +3291,7 @@ def source_list(source, source_hash, saltenv):
ret = (single_src, single_hash)
break
elif proto.startswith('http') or proto == 'ftp':
dest = salt.utils.mkstemp()
fn_ = __salt__['cp.get_url'](single_src, dest)
os.remove(fn_)
if fn_:
if __salt__['cp.cache_file'](single_src):
ret = (single_src, single_hash)
break
elif proto == 'file' and os.path.exists(urlparsed_single_src.path):
@ -3306,11 +3307,16 @@ def source_list(source, source_hash, saltenv):
if (path, senv) in mfiles or (path, senv) in mdirs:
ret = (single, source_hash)
break
urlparsed_source = _urlparse(single)
if urlparsed_source.scheme == 'file' and os.path.exists(urlparsed_source.path):
urlparsed_src = _urlparse(single)
proto = urlparsed_src.scheme
if proto == 'file' and os.path.exists(urlparsed_src.path):
ret = (single, source_hash)
break
if single.startswith('/') and os.path.exists(single):
elif proto.startswith('http') or proto == 'ftp':
if __salt__['cp.cache_file'](single):
ret = (single, source_hash)
break
elif single.startswith('/') and os.path.exists(single):
ret = (single, source_hash)
break
if ret is None:
@ -3318,10 +3324,11 @@ def source_list(source, source_hash, saltenv):
raise CommandExecutionError(
'none of the specified sources were found'
)
else:
return ret
else:
return source, source_hash
ret = (source, source_hash)
__context__[contextkey] = ret
return ret
def apply_template_on_contents(

View file

@ -216,7 +216,7 @@ class SaltClientsMixIn(object):
# not the actual client we'll use.. but its what we'll use to get args
'local_batch': local_client.cmd_batch,
'local_async': local_client.run_job,
'runner': salt.runner.RunnerClient(opts=self.application.opts).async,
'runner': salt.runner.RunnerClient(opts=self.application.opts).cmd_async,
'runner_async': None, # empty, since we use the same client as `runner`
}
return SaltClientsMixIn.__saltclients
@ -804,8 +804,6 @@ class SaltAPIHandler(BaseSaltAPIHandler, SaltClientsMixIn): # pylint: disable=W
def disbatch(self):
'''
Disbatch all lowstates to the appropriate clients
Auth must have been verified before this point
'''
ret = []
@ -814,16 +812,23 @@ class SaltAPIHandler(BaseSaltAPIHandler, SaltClientsMixIn): # pylint: disable=W
client = low.get('client')
self._verify_client(client)
for low in self.lowstate:
# make sure that the chunk has a token, if not we can't do auth per-request
# Note: this means that you can send different tokens per lowstate
# as long as the base token (to auth with the API) is valid
if 'token' not in low:
# Make sure we have 'token' or 'username'/'password' in each low chunk.
# Salt will verify the credentials are correct.
if self.token is not None and 'token' not in low:
low['token'] = self.token
if not (('token' in low)
or ('username' in low and 'password' in low and 'eauth' in low)):
ret.append('Failed to authenticate')
break
# disbatch to the correct handler
try:
chunk_ret = yield getattr(self, '_disbatch_{0}'.format(low['client']))(low)
ret.append(chunk_ret)
except EauthAuthenticationError as exc:
ret.append('Failed to authenticate')
break
except Exception as ex:
ret.append('Unexpected exception while handling request: {0}'.format(ex))
logger.error('Unexpected exception while handling request:', exc_info=True)
@ -1021,8 +1026,7 @@ class SaltAPIHandler(BaseSaltAPIHandler, SaltClientsMixIn): # pylint: disable=W
'''
Disbatch runner client commands
'''
f_call = {'args': [chunk['fun'], chunk]}
pub_data = self.saltclients['runner'](chunk['fun'], chunk)
pub_data = self.saltclients['runner'](chunk)
tag = pub_data['tag'] + '/ret'
try:
event = yield self.application.event_listener.get_event(self, tag=tag)
@ -1037,8 +1041,7 @@ class SaltAPIHandler(BaseSaltAPIHandler, SaltClientsMixIn): # pylint: disable=W
'''
Disbatch runner client_async commands
'''
f_call = {'args': [chunk['fun'], chunk]}
pub_data = self.saltclients['runner'](chunk['fun'], chunk)
pub_data = self.saltclients['runner'](chunk)
raise tornado.gen.Return(pub_data)

View file

@ -16,6 +16,8 @@ from contextlib import closing
# Import 3rd-party libs
import salt.ext.six as six
# Import salt libs
from salt.exceptions import CommandExecutionError
# remove after archive_user deprecation.
from salt.utils import warn_until
@ -171,14 +173,31 @@ def extracted(name,
__env__,
'{0}.{1}'.format(re.sub('[:/\\\\]', '_', if_missing),
archive_format))
if __opts__['test']:
source_match = source
else:
try:
source_match = __salt__['file.source_list'](source,
source_hash,
__env__)[0]
except CommandExecutionError as exc:
ret['result'] = False
ret['comment'] = exc.strerror
return ret
if not os.path.exists(filename):
if __opts__['test']:
ret['result'] = None
ret['comment'] = \
'Archive {0} would have been downloaded in cache'.format(source)
'{0} {1} would be downloaded to cache'.format(
'One of' if not isinstance(source_match, six.string_types)
else 'Archive',
source_match
)
return ret
log.debug('Archive file {0} is not in cache, download it'.format(source))
log.debug('%s is not in cache, downloading it', source_match)
file_result = __salt__['state.single']('file.managed',
filename,
source=source,
@ -201,17 +220,21 @@ def extracted(name,
log.debug('failed to download {0}'.format(source))
return file_result
else:
log.debug('Archive file {0} is already in cache'.format(name))
log.debug('Archive %s is already in cache', name)
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Archive {0} would have been extracted in {1}'.format(
source, name)
ret['comment'] = '{0} {1} would be extracted to {2}'.format(
'One of' if not isinstance(source_match, six.string_types)
else 'Archive',
source_match,
name
)
return ret
__salt__['file.makedirs'](name, user=user, group=group)
log.debug('Extract {0} in {1}'.format(filename, name))
log.debug('Extracting {0} to {1}'.format(filename, name))
if archive_format == 'zip':
files = __salt__['archive.unzip'](filename, name)
elif archive_format == 'rar':
@ -269,11 +292,11 @@ def extracted(name,
ret['result'] = True
ret['changes']['directories_created'] = [name]
ret['changes']['extracted_files'] = files
ret['comment'] = '{0} extracted in {1}'.format(source, name)
ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
if not keep:
os.unlink(filename)
else:
__salt__['file.remove'](if_missing)
ret['result'] = False
ret['comment'] = 'Can\'t extract content of {0}'.format(source)
ret['comment'] = 'Can\'t extract content of {0}'.format(source_match)
return ret

View file

@ -2624,13 +2624,11 @@ def update_bootstrap(config, url=None):
'''
Update the salt-bootstrap script
url can be either:
- The URL to fetch the bootstrap script from
- The absolute path to the bootstrap
- The content of the bootstrap script
url can be one of:
- The URL to fetch the bootstrap script from
- The absolute path to the bootstrap
- The content of the bootstrap script
'''
default_url = config.get('bootstrap_script_url',
'https://bootstrap.saltstack.com')

View file

@ -1215,8 +1215,7 @@ class ExecutionOptionsMixIn(six.with_metaclass(MixInMeta, object)):
'-u', '--update-bootstrap',
default=False,
action='store_true',
help='Update salt-bootstrap to the latest develop version on '
'GitHub.'
help='Update salt-bootstrap to the latest stable bootstrap release.'
)
group.add_option(
'-y', '--assume-yes',

View file

@ -174,6 +174,7 @@ class FileModuleTest(integration.ModuleCase):
return_value=['http/httpd.conf.fallback']),
'cp.list_master_dirs': MagicMock(return_value=[]),
}
filemod.__context__ = {}
ret = filemod.source_list(['salt://http/httpd.conf',
'salt://http/httpd.conf.fallback'],
@ -189,6 +190,8 @@ class FileModuleTest(integration.ModuleCase):
'cp.list_master': MagicMock(side_effect=list_master),
'cp.list_master_dirs': MagicMock(return_value=[]),
}
filemod.__context__ = {}
ret = filemod.source_list(['salt://http/httpd.conf?saltenv=dev',
'salt://http/httpd.conf.fallback'],
'filehash', 'base')
@ -200,6 +203,8 @@ class FileModuleTest(integration.ModuleCase):
'cp.list_master': MagicMock(return_value=['http/httpd.conf']),
'cp.list_master_dirs': MagicMock(return_value=[]),
}
filemod.__context__ = {}
ret = filemod.source_list(
[{'salt://http/httpd.conf': ''}], 'filehash', 'base')
self.assertItemsEqual(ret, ['salt://http/httpd.conf', 'filehash'])
@ -210,8 +215,10 @@ class FileModuleTest(integration.ModuleCase):
filemod.__salt__ = {
'cp.list_master': MagicMock(return_value=[]),
'cp.list_master_dirs': MagicMock(return_value=[]),
'cp.get_url': MagicMock(return_value='/tmp/http.conf'),
'cp.cache_file': MagicMock(return_value='/tmp/http.conf'),
}
filemod.__context__ = {}
ret = filemod.source_list(
[{'http://t.est.com/http/httpd.conf': 'filehash'}], '', 'base')
self.assertItemsEqual(ret, ['http://t.est.com/http/httpd.conf',

View file

@ -62,7 +62,7 @@ class SaltnadoTestCase(integration.ModuleCase, AsyncHTTPTestCase):
@property
def opts(self):
return self.get_config('master', from_scratch=True)
return self.get_config('client_config', from_scratch=True)
@property
def mod_opts(self):

View file

@ -65,6 +65,7 @@ class ArchiveTestCase(TestCase):
mock_false = MagicMock(return_value=False)
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
mock_run = MagicMock(return_value=ret)
mock_source_list = MagicMock(return_value=source)
with patch('os.path.exists', mock_true):
with patch.dict(archive.__opts__, {'test': False,
@ -72,7 +73,8 @@ class ArchiveTestCase(TestCase):
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'file.makedirs': mock_true,
'cmd.run_all': mock_run}):
'cmd.run_all': mock_run,
'file.source_list': mock_source_list}):
filename = os.path.join(
tmp_dir,
'files/test/_tmp_test_archive_.tar'