archive.extracted: don't try to cache local sources (#38285)

* archive.extracted: don't try to cache local sources

This will keep us from trying to cache file when we already have it
locally, which will help significantly with larger archives.

* Update tests to reflect change in cache behavior

* Add mock return for /bin/tar

This fixes failing tests for Ubuntu 14
This commit is contained in:
Erik Johnson 2016-12-19 15:37:58 -06:00 committed by Nicole Thomas
parent 13414949e3
commit 79231a5d8a
2 changed files with 79 additions and 47 deletions

View file

@ -700,6 +700,14 @@ def extracted(name,
urlparsed_source = _urlparse(source_match)
source_hash_basename = urlparsed_source.path or urlparsed_source.netloc
source_is_local = urlparsed_source.scheme in ('', 'file')
if source_is_local:
# Get rid of "file://" from start of source_match
source_match = urlparsed_source.path
if not os.path.isfile(source_match):
ret['comment'] = 'Source file \'{0}\' does not exist'.format(source_match)
return ret
valid_archive_formats = ('tar', 'rar', 'zip')
if not archive_format:
archive_format = salt.utils.files.guess_archive_type(source_hash_basename)
@ -827,16 +835,19 @@ def extracted(name,
)
return ret
cached_source = os.path.join(
__opts__['cachedir'],
'files',
__env__,
re.sub(r'[:/\\]', '_', source_hash_basename),
)
if source_is_local:
cached_source = source_match
else:
cached_source = os.path.join(
__opts__['cachedir'],
'files',
__env__,
re.sub(r'[:/\\]', '_', source_hash_basename),
)
if os.path.isdir(cached_source):
# Prevent a traceback from attempting to read from a directory path
salt.utils.rm_rf(cached_source)
if os.path.isdir(cached_source):
# Prevent a traceback from attempting to read from a directory path
salt.utils.rm_rf(cached_source)
if source_hash:
try:
@ -858,7 +869,7 @@ def extracted(name,
else:
source_sum = {}
if not os.path.isfile(cached_source):
if not source_is_local and not os.path.isfile(cached_source):
if __opts__['test']:
ret['result'] = None
ret['comment'] = \
@ -1373,7 +1384,7 @@ def extracted(name,
ret['changes']['directories_created'] = [name]
ret['changes']['extracted_files'] = files
ret['comment'] = '{0} extracted to {1}'.format(source_match, name)
if not keep:
if not source_is_local and not keep:
log.debug('Cleaning cached source file %s', cached_source)
try:
os.remove(cached_source)

View file

@ -6,7 +6,6 @@
# Import python libs
from __future__ import absolute_import
import os
import tempfile
# Import Salt Testing libs
from salttesting import TestCase, skipIf
@ -31,6 +30,24 @@ archive.__opts__ = {"cachedir": "/tmp", "test": False}
archive.__env__ = 'test'
def _isfile_side_effect(path):
'''
MagicMock side_effect for os.path.isfile(). We don't want to use dict.get
here because we want the test to fail if there's a path we haven't
accounted for, so that we can add it.
NOTE: This may fall over on some platforms if /usr/bin/tar does not exist.
If so, just add an entry in the dictionary for the path being used for tar.
'''
return {
'/tmp/foo.tar.gz': True,
'/tmp/out': False,
'/usr/bin/tar': True,
'/bin/tar': True,
'/tmp/test_extracted_tar': False,
}[path]
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ArchiveTestCase(TestCase):
@ -45,8 +62,8 @@ class ArchiveTestCase(TestCase):
archive.extracted tar options
'''
source = '/tmp/file.tar.gz'
tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive', '')
source = '/tmp/foo.tar.gz'
tmp_dir = '/tmp/test_extracted_tar'
test_tar_opts = [
'--no-anchored foo',
'v -p --opt',
@ -74,6 +91,7 @@ class ArchiveTestCase(TestCase):
'top_level_dirs': [],
'top_level_files': ['saltines', 'cheese'],
})
isfile_mock = MagicMock(side_effect=_isfile_side_effect)
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir}):
@ -84,17 +102,16 @@ class ArchiveTestCase(TestCase):
'cmd.run_all': mock_run,
'archive.list': list_mock,
'file.source_list': mock_source_list}):
filename = os.path.join(
tmp_dir,
'files/test/_tmp_file.tar.gz'
)
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
options=test_opts,
enforce_toplevel=False)
ret_opts.append(filename)
mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
with patch.object(os.path, 'isfile', isfile_mock):
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
options=test_opts,
enforce_toplevel=False)
ret_opts.append(source)
mock_run.assert_called_with(ret_opts,
cwd=tmp_dir + os.sep,
python_shell=False)
def test_tar_gnutar(self):
'''
@ -102,8 +119,8 @@ class ArchiveTestCase(TestCase):
'''
gnutar = MagicMock(return_value='tar (GNU tar)')
source = '/tmp/foo.tar.gz'
missing = MagicMock(return_value=False)
nop = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
mock_true = MagicMock(return_value=True)
state_single_mock = MagicMock(return_value={'local': {'result': True}})
run_all = MagicMock(return_value={'retcode': 0, 'stdout': 'stdout', 'stderr': 'stderr'})
mock_source_list = MagicMock(return_value=(source, None))
@ -113,21 +130,23 @@ class ArchiveTestCase(TestCase):
'top_level_dirs': [],
'top_level_files': ['stdout'],
})
isfile_mock = MagicMock(side_effect=_isfile_side_effect)
with patch.dict(archive.__salt__, {'cmd.run': gnutar,
'file.directory_exists': missing,
'file.file_exists': missing,
'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'state.single': state_single_mock,
'file.makedirs': nop,
'file.makedirs': mock_true,
'cmd.run_all': run_all,
'archive.list': list_mock,
'file.source_list': mock_source_list}):
ret = archive.extracted('/tmp/out',
source,
options='xvzf',
enforce_toplevel=False,
keep=True)
self.assertEqual(ret['changes']['extracted_files'], 'stdout')
with patch.object(os.path, 'isfile', isfile_mock):
ret = archive.extracted('/tmp/out',
source,
options='xvzf',
enforce_toplevel=False,
keep=True)
self.assertEqual(ret['changes']['extracted_files'], 'stdout')
def test_tar_bsdtar(self):
'''
@ -135,8 +154,8 @@ class ArchiveTestCase(TestCase):
'''
bsdtar = MagicMock(return_value='tar (bsdtar)')
source = '/tmp/foo.tar.gz'
missing = MagicMock(return_value=False)
nop = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
mock_true = MagicMock(return_value=True)
state_single_mock = MagicMock(return_value={'local': {'result': True}})
run_all = MagicMock(return_value={'retcode': 0, 'stdout': 'stdout', 'stderr': 'stderr'})
mock_source_list = MagicMock(return_value=(source, None))
@ -146,21 +165,23 @@ class ArchiveTestCase(TestCase):
'top_level_dirs': [],
'top_level_files': ['stderr'],
})
isfile_mock = MagicMock(side_effect=_isfile_side_effect)
with patch.dict(archive.__salt__, {'cmd.run': bsdtar,
'file.directory_exists': missing,
'file.file_exists': missing,
'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'state.single': state_single_mock,
'file.makedirs': nop,
'file.makedirs': mock_true,
'cmd.run_all': run_all,
'archive.list': list_mock,
'file.source_list': mock_source_list}):
ret = archive.extracted('/tmp/out',
source,
options='xvzf',
enforce_toplevel=False,
keep=True)
self.assertEqual(ret['changes']['extracted_files'], 'stderr')
with patch.object(os.path, 'isfile', isfile_mock):
ret = archive.extracted('/tmp/out',
source,
options='xvzf',
enforce_toplevel=False,
keep=True)
self.assertEqual(ret['changes']['extracted_files'], 'stderr')
if __name__ == '__main__':
from integration import run_tests