mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Support remote sources in a source list (#32691)
* Support remote sources in a source list This commit modifies the source_list check so that remote sources (http(s), ftp, etc.) are not fetched more than once. To do so, it adds the use of ``__context__`` in ``cp.cache_file`` and ``file.source_list`` to prevent multiple fetches of a single file in the same salt run. * Update tests Added __context__ to test cases to reflect usage of __context__, and also added file.source_list to mocked funcs for archive.extracted unit test.
This commit is contained in:
parent
bd5442d768
commit
67d0c81184
5 changed files with 119 additions and 44 deletions
|
@ -16,6 +16,7 @@ import salt.utils
|
|||
import salt.crypt
|
||||
import salt.transport
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -348,6 +349,25 @@ def cache_file(path, saltenv='base', env=None):
|
|||
# Backwards compatibility
|
||||
saltenv = env
|
||||
|
||||
contextkey = '{0}_|-{1}_|-{2}'.format('cp.cache_file', path, saltenv)
|
||||
path_is_remote = _urlparse(path).scheme in ('http', 'https', 'ftp')
|
||||
try:
|
||||
if path_is_remote and contextkey in __context__:
|
||||
# Prevent multiple caches in the same salt run. Affects remote URLs
|
||||
# since the master won't know their hash, so the fileclient
|
||||
# wouldn't be able to prevent multiple caches if we try to cache
|
||||
# the remote URL more than once.
|
||||
if os.path.isfile(__context__[contextkey]):
|
||||
return __context__[contextkey]
|
||||
else:
|
||||
# File is in __context__ but no longer exists in the minion
|
||||
# cache, get rid of the context key and re-cache below.
|
||||
# Accounts for corner case where file is removed from minion
|
||||
# cache between cp.cache_file calls in the same salt-run.
|
||||
__context__.pop(contextkey)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
_mk_client()
|
||||
if path.startswith('salt://|'):
|
||||
# Strip pipe. Windows doesn't allow pipes in filenames
|
||||
|
@ -371,6 +391,10 @@ def cache_file(path, saltenv='base', env=None):
|
|||
path, saltenv
|
||||
)
|
||||
)
|
||||
if path_is_remote:
|
||||
# Cache was successful, store the result in __context__ to prevent
|
||||
# multiple caches (see above).
|
||||
__context__[contextkey] = result
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ import glob
|
|||
import mmap
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
from salt.ext.six import string_types
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import range, reduce, zip
|
||||
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||
|
@ -2839,56 +2839,69 @@ def source_list(source, source_hash, saltenv):
|
|||
|
||||
salt '*' file.source_list salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' base
|
||||
'''
|
||||
contextkey = '{0}_|-{1}_|-{2}'.format(source, source_hash, saltenv)
|
||||
if contextkey in __context__:
|
||||
return __context__[contextkey]
|
||||
|
||||
# get the master file list
|
||||
if isinstance(source, list):
|
||||
mfiles = __salt__['cp.list_master'](saltenv)
|
||||
mdirs = __salt__['cp.list_master_dirs'](saltenv)
|
||||
mfiles = [(f, saltenv) for f in __salt__['cp.list_master'](saltenv)]
|
||||
mdirs = [(d, saltenv) for d in __salt__['cp.list_master_dirs'](saltenv)]
|
||||
for single in source:
|
||||
if isinstance(single, dict):
|
||||
single = next(iter(single))
|
||||
|
||||
env_splitter = '?saltenv='
|
||||
if '?env=' in single:
|
||||
salt.utils.warn_until(
|
||||
'Boron',
|
||||
'Passing a salt environment should be done using '
|
||||
'\'saltenv\' not \'env\'. This functionality will be '
|
||||
'removed in Salt Boron.'
|
||||
)
|
||||
env_splitter = '?env='
|
||||
try:
|
||||
_, senv = single.split(env_splitter)
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
mfiles += ['{0}?saltenv={1}'.format(f, senv)
|
||||
for f in __salt__['cp.list_master'](senv)]
|
||||
mdirs += ['{0}?saltenv={1}'.format(d, senv)
|
||||
for d in __salt__['cp.list_master_dirs'](senv)]
|
||||
path, senv = salt.utils.url.parse(single)
|
||||
if senv:
|
||||
mfiles += [(f, senv) for f in __salt__['cp.list_master'](senv)]
|
||||
mdirs += [(d, senv) for d in __salt__['cp.list_master_dirs'](senv)]
|
||||
|
||||
ret = None
|
||||
for single in source:
|
||||
if isinstance(single, dict):
|
||||
# check the proto, if it is http or ftp then download the file
|
||||
# to check, if it is salt then check the master list
|
||||
# if it is a local file, check if the file exists
|
||||
if len(single) != 1:
|
||||
continue
|
||||
single_src = next(iter(single))
|
||||
single_hash = single[single_src] if single[single_src] else source_hash
|
||||
proto = _urlparse(single_src).scheme
|
||||
urlparsed_single_src = _urlparse(single_src)
|
||||
proto = urlparsed_single_src.scheme
|
||||
if proto == 'salt':
|
||||
if single_src[7:] in mfiles or single_src[7:] in mdirs:
|
||||
path, senv = salt.utils.url.parse(single_src)
|
||||
if not senv:
|
||||
senv = saltenv
|
||||
if (path, saltenv) in mfiles or (path, saltenv) in mdirs:
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif proto.startswith('http') or proto == 'ftp':
|
||||
dest = salt.utils.mkstemp()
|
||||
fn_ = __salt__['cp.get_url'](single_src, dest)
|
||||
os.remove(fn_)
|
||||
if fn_:
|
||||
if __salt__['cp.cache_file'](single_src):
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif isinstance(single, string_types):
|
||||
if single[7:] in mfiles or single[7:] in mdirs:
|
||||
elif proto == 'file' and os.path.exists(urlparsed_single_src.path):
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif single_src.startswith('/') and os.path.exists(single_src):
|
||||
ret = (single_src, single_hash)
|
||||
break
|
||||
elif isinstance(single, six.string_types):
|
||||
path, senv = salt.utils.url.parse(single)
|
||||
if not senv:
|
||||
senv = saltenv
|
||||
if (path, senv) in mfiles or (path, senv) in mdirs:
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
urlparsed_src = _urlparse(single)
|
||||
proto = urlparsed_src.scheme
|
||||
if proto == 'file' and os.path.exists(urlparsed_src.path):
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
elif proto.startswith('http') or proto == 'ftp':
|
||||
if __salt__['cp.cache_file'](single):
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
elif single.startswith('/') and os.path.exists(single):
|
||||
ret = (single, source_hash)
|
||||
break
|
||||
if ret is None:
|
||||
|
@ -2896,10 +2909,11 @@ def source_list(source, source_hash, saltenv):
|
|||
raise CommandExecutionError(
|
||||
'none of the specified sources were found'
|
||||
)
|
||||
else:
|
||||
return ret
|
||||
else:
|
||||
return source, source_hash
|
||||
ret = (source, source_hash)
|
||||
|
||||
__context__[contextkey] = ret
|
||||
return ret
|
||||
|
||||
|
||||
def get_managed(
|
||||
|
@ -3226,7 +3240,7 @@ def check_perms(name, ret, user, group, mode, follow_symlinks=False):
|
|||
elif 'cgroup' in perms and user != '':
|
||||
ret['changes']['group'] = group
|
||||
|
||||
if isinstance(orig_comment, string_types):
|
||||
if isinstance(orig_comment, six.string_types):
|
||||
if orig_comment:
|
||||
ret['comment'].insert(0, orig_comment)
|
||||
ret['comment'] = '; '.join(ret['comment'])
|
||||
|
@ -3483,7 +3497,7 @@ def get_diff(
|
|||
|
||||
ret = ''
|
||||
|
||||
if isinstance(env, string_types):
|
||||
if isinstance(env, six.string_types):
|
||||
salt.utils.warn_until(
|
||||
'Boron',
|
||||
'Passing a salt environment should be done using \'saltenv\' not '
|
||||
|
|
|
@ -11,6 +11,13 @@ import os
|
|||
import tarfile
|
||||
from contextlib import closing
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
# Import salt libs
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'archive'
|
||||
|
@ -132,14 +139,31 @@ def extracted(name,
|
|||
__env__,
|
||||
'{0}.{1}'.format(if_missing.replace('/', '_'),
|
||||
archive_format))
|
||||
|
||||
if __opts__['test']:
|
||||
source_match = source
|
||||
else:
|
||||
try:
|
||||
source_match = __salt__['file.source_list'](source,
|
||||
source_hash,
|
||||
__env__)[0]
|
||||
except CommandExecutionError as exc:
|
||||
ret['result'] = False
|
||||
ret['comment'] = exc.strerror
|
||||
return ret
|
||||
|
||||
if not os.path.exists(filename):
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = \
|
||||
'Archive {0} would have been downloaded in cache'.format(source)
|
||||
'{0} {1} would be downloaded to cache'.format(
|
||||
'One of' if not isinstance(source_match, six.string_types)
|
||||
else 'Archive',
|
||||
source_match
|
||||
)
|
||||
return ret
|
||||
|
||||
log.debug('Archive file {0} is not in cache, download it'.format(source))
|
||||
log.debug('%s is not in cache, downloading it', source_match)
|
||||
file_result = __salt__['state.single']('file.managed',
|
||||
filename,
|
||||
source=source,
|
||||
|
@ -162,17 +186,21 @@ def extracted(name,
|
|||
log.debug('failed to download {0}'.format(source))
|
||||
return file_result
|
||||
else:
|
||||
log.debug('Archive file {0} is already in cache'.format(name))
|
||||
log.debug('Archive %s is already in cache', name)
|
||||
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = 'Archive {0} would have been extracted in {1}'.format(
|
||||
source, name)
|
||||
ret['comment'] = '{0} {1} would be extracted to {2}'.format(
|
||||
'One of' if not isinstance(source_match, six.string_types)
|
||||
else 'Archive',
|
||||
source_match,
|
||||
name
|
||||
)
|
||||
return ret
|
||||
|
||||
__salt__['file.makedirs'](name, user=archive_user)
|
||||
|
||||
log.debug('Extract {0} in {1}'.format(filename, name))
|
||||
log.debug('Extracting {0} to {1}'.format(filename, name))
|
||||
if archive_format == 'zip':
|
||||
files = __salt__['archive.unzip'](filename, name)
|
||||
elif archive_format == 'rar':
|
||||
|
@ -229,11 +257,11 @@ def extracted(name,
|
|||
ret['result'] = True
|
||||
ret['changes']['directories_created'] = [name]
|
||||
ret['changes']['extracted_files'] = files
|
||||
ret['comment'] = '{0} extracted in {1}'.format(source, name)
|
||||
ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
|
||||
if not keep:
|
||||
os.unlink(filename)
|
||||
else:
|
||||
__salt__['file.remove'](if_missing)
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Can\'t extract content of {0}'.format(source)
|
||||
ret['comment'] = 'Can\'t extract content of {0}'.format(source_match)
|
||||
return ret
|
||||
|
|
|
@ -173,6 +173,7 @@ class FileModuleTest(integration.ModuleCase):
|
|||
return_value=['http/httpd.conf.fallback']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(['salt://http/httpd.conf',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
|
@ -188,6 +189,8 @@ class FileModuleTest(integration.ModuleCase):
|
|||
'cp.list_master': MagicMock(side_effect=list_master),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(['salt://http/httpd.conf?saltenv=dev',
|
||||
'salt://http/httpd.conf.fallback'],
|
||||
'filehash', 'base')
|
||||
|
@ -199,6 +202,8 @@ class FileModuleTest(integration.ModuleCase):
|
|||
'cp.list_master': MagicMock(return_value=['http/httpd.conf']),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(
|
||||
[{'salt://http/httpd.conf': ''}], 'filehash', 'base')
|
||||
self.assertItemsEqual(ret, ['salt://http/httpd.conf', 'filehash'])
|
||||
|
@ -209,8 +214,10 @@ class FileModuleTest(integration.ModuleCase):
|
|||
filemod.__salt__ = {
|
||||
'cp.list_master': MagicMock(return_value=[]),
|
||||
'cp.list_master_dirs': MagicMock(return_value=[]),
|
||||
'cp.get_url': MagicMock(return_value='/tmp/http.conf'),
|
||||
'cp.cache_file': MagicMock(return_value='/tmp/http.conf'),
|
||||
}
|
||||
filemod.__context__ = {}
|
||||
|
||||
ret = filemod.source_list(
|
||||
[{'http://t.est.com/http/httpd.conf': 'filehash'}], '', 'base')
|
||||
self.assertItemsEqual(ret, ['http://t.est.com/http/httpd.conf',
|
||||
|
|
|
@ -59,6 +59,7 @@ class ArchiveTest(TestCase):
|
|||
mock_false = MagicMock(return_value=False)
|
||||
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
|
||||
mock_run = MagicMock(return_value=ret)
|
||||
mock_source_list = MagicMock(return_value=source)
|
||||
|
||||
with patch('os.path.exists', mock_true):
|
||||
with patch.dict(archive.__opts__, {'test': False,
|
||||
|
@ -66,7 +67,8 @@ class ArchiveTest(TestCase):
|
|||
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
|
||||
'file.file_exists': mock_false,
|
||||
'file.makedirs': mock_true,
|
||||
'cmd.run_all': mock_run}):
|
||||
'cmd.run_all': mock_run,
|
||||
'file.source_list': mock_source_list}):
|
||||
filename = os.path.join(
|
||||
tmp_dir,
|
||||
'files/test/_tmp_test_archive_.tar'
|
||||
|
|
Loading…
Add table
Reference in a new issue