mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #55700 from Oloremo/55443-archive-hash-trust
Added a skip_files_list_verify argument to archive.extracted state
This commit is contained in:
commit
0eb09fc5e4
5 changed files with 176 additions and 4 deletions
|
@ -37,6 +37,7 @@ Versions are `MAJOR.PATCH`.
|
|||
- [#53736](https://github.com/saltstack/salt/issues/53736) - Added index get_settings, put_settings methods for Elasticsearch module. - [@Oloremo](https://github.com/Oloremo)
|
||||
- [#55418](https://github.com/saltstack/salt/pull/55418) - Added clean_parent argument for the archive state. - [@Oloremo](https://github.com/Oloremo)
|
||||
- [#55593](https://github.com/saltstack/salt/issues/55593) - Added a support for a global proxy to pip module. - [@Oloremo](https://github.com/Oloremo)
|
||||
- [#55443](https://github.com/saltstack/salt/issues/55443) - Added a skip_files_list_verify argument to archive.extracted state. - [@Oloremo](https://github.com/Oloremo)
|
||||
|
||||
---
|
||||
|
||||
|
|
|
@ -711,8 +711,8 @@ def list_minion(saltenv='base'):
|
|||
|
||||
def is_cached(path, saltenv='base'):
|
||||
'''
|
||||
Return a boolean if the given path on the master has been cached on the
|
||||
minion
|
||||
Returns the full path to a file if it is cached locally on the minion
|
||||
otherwise returns a blank string
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
|
|
@ -176,6 +176,7 @@ def extracted(name,
|
|||
source_hash=None,
|
||||
source_hash_name=None,
|
||||
source_hash_update=False,
|
||||
skip_files_list_verify=False,
|
||||
skip_verify=False,
|
||||
password=None,
|
||||
options=None,
|
||||
|
@ -411,14 +412,33 @@ def extracted(name,
|
|||
|
||||
source_hash_update : False
|
||||
Set this to ``True`` if archive should be extracted if source_hash has
|
||||
changed. This would extract regardless of the ``if_missing`` parameter.
|
||||
changed and there is a difference between the archive and the local files.
|
||||
This would extract regardless of the ``if_missing`` parameter.
|
||||
|
||||
Note that this is only checked if the ``source`` value has not changed.
|
||||
If it has (e.g. to increment a version number in the path) then the
|
||||
archive will not be extracted even if the hash has changed.
|
||||
|
||||
.. note::
|
||||
Setting this to ``True`` along with ``keep_source`` set to ``False``
|
||||
will result the ``source`` re-download to do a archive file list check.
|
||||
If it's not desirable please consider the ``skip_files_list_verify``
|
||||
argument.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
skip_files_list_verify : False
|
||||
Set this to ``True`` if archive should be extracted if source_hash has
|
||||
changed but only checksums of the archive will be checked to determine if
|
||||
the extraction is required.
|
||||
|
||||
.. note::
|
||||
The current limitation of this logic is that you have to set
|
||||
minions ``hash_type`` config option to the same one that you're going
|
||||
to pass via ``source_hash`` argument.
|
||||
|
||||
.. versionadded:: Neon
|
||||
|
||||
skip_verify : False
|
||||
If ``True``, hash verification of remote file sources (``http://``,
|
||||
``https://``, ``ftp://``) will be skipped, and the ``source_hash``
|
||||
|
@ -533,7 +553,7 @@ def extracted(name,
|
|||
then re-create that directory before extracting. Note that ``clean``
|
||||
and ``clean_parent`` are mutually exclusive.
|
||||
|
||||
.. versionadded:: Sodium
|
||||
.. versionadded:: Neon
|
||||
|
||||
user
|
||||
The user to own each extracted file. Not available on Windows.
|
||||
|
@ -678,6 +698,11 @@ def extracted(name,
|
|||
# Remove pub kwargs as they're irrelevant here.
|
||||
kwargs = salt.utils.args.clean_kwargs(**kwargs)
|
||||
|
||||
if skip_files_list_verify and skip_verify:
|
||||
ret['comment'] = ('Only one of "skip_files_list_verify" and '
|
||||
'"skip_verify" can be set to True')
|
||||
return ret
|
||||
|
||||
if 'keep_source' in kwargs and 'keep' in kwargs:
|
||||
ret.setdefault('warnings', []).append(
|
||||
'Both \'keep_source\' and \'keep\' were used. Since these both '
|
||||
|
@ -947,6 +972,31 @@ def extracted(name,
|
|||
else:
|
||||
source_sum = {}
|
||||
|
||||
if skip_files_list_verify:
|
||||
if source_is_local:
|
||||
cached = source_match
|
||||
else:
|
||||
cached = __salt__['cp.is_cached'](source_match, saltenv=__env__)
|
||||
|
||||
if cached:
|
||||
existing_cached_source_sum = _read_cached_checksum(cached)
|
||||
log.debug('Existing source sum is: "%s". Expected source sum is "%s"',
|
||||
existing_cached_source_sum, source_sum)
|
||||
if source_sum and existing_cached_source_sum:
|
||||
if existing_cached_source_sum['hsum'] == source_sum['hsum']:
|
||||
ret['result'] = None if __opts__['test'] else True
|
||||
ret['comment'] = (
|
||||
'Archive {0} existing source sum is the same as the '
|
||||
'expected one and skip_files_list_verify argument was set '
|
||||
'to True. Extraction is not needed'.format(
|
||||
salt.utils.url.redact_http_basic_auth(source_match)
|
||||
)
|
||||
)
|
||||
return ret
|
||||
else:
|
||||
log.debug('There is no cached source %s available on minion',
|
||||
source_match)
|
||||
|
||||
if source_is_local:
|
||||
cached = source_match
|
||||
else:
|
||||
|
|
|
@ -28,6 +28,7 @@ ARCHIVE_DIR = os.path.join('c:/', 'tmp') \
|
|||
ARCHIVE_NAME = 'custom.tar.gz'
|
||||
ARCHIVE_TAR_SOURCE = 'http://localhost:{0}/{1}'.format(9999, ARCHIVE_NAME)
|
||||
ARCHIVE_TAR_HASH = 'md5=7643861ac07c30fe7d2310e9f25ca514'
|
||||
ARCHIVE_TAR_SHA_HASH = 'sha256=9591159d86f0a180e4e0645b2320d0235e23e66c66797df61508bf185e0ac1d2'
|
||||
ARCHIVE_TAR_BAD_HASH = 'md5=d41d8cd98f00b204e9800998ecf8427e'
|
||||
ARCHIVE_TAR_HASH_UPPER = 'md5=7643861AC07C30FE7D2310E9F25CA514'
|
||||
|
||||
|
@ -256,3 +257,29 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
saltenv='prod')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self._check_extracted(os.path.join(ARCHIVE_DIR, self.untar_file))
|
||||
|
||||
def test_local_archive_extracted_with_skip_files_list_verify(self):
|
||||
'''
|
||||
test archive.extracted with local file and skip_files_list_verify set to True
|
||||
'''
|
||||
expected_comment = ('existing source sum is the same as the expected one and '
|
||||
'skip_files_list_verify argument was set to True. '
|
||||
'Extraction is not needed')
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=self.archive_local_tar_source, archive_format='tar',
|
||||
skip_files_list_verify=True,
|
||||
source_hash_update=True,
|
||||
source_hash=ARCHIVE_TAR_SHA_HASH)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_extracted(self.untar_file)
|
||||
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=self.archive_local_tar_source, archive_format='tar',
|
||||
skip_files_list_verify=True,
|
||||
source_hash_update=True,
|
||||
source_hash=ARCHIVE_TAR_SHA_HASH)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertInSaltComment(expected_comment, ret)
|
||||
|
|
|
@ -266,3 +266,97 @@ class ArchiveTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assertEqual(ret['result'], False)
|
||||
self.assertEqual(ret['changes'], {})
|
||||
self.assertEqual(ret['comment'], ret_comment)
|
||||
|
||||
def test_skip_files_list_verify_conflict(self):
|
||||
'''
|
||||
Tests the call of extraction with both skip_files_list_verify and skip_verify set to True
|
||||
'''
|
||||
gnutar = MagicMock(return_value='tar (GNU tar)')
|
||||
source = '/tmp/foo.tar.gz'
|
||||
ret_comment = 'Only one of "skip_files_list_verify" and "skip_verify" can be set to True'
|
||||
mock_false = MagicMock(return_value=False)
|
||||
mock_true = MagicMock(return_value=True)
|
||||
state_single_mock = MagicMock(return_value={'local': {'result': True}})
|
||||
run_all = MagicMock(return_value={'retcode': 0, 'stdout': 'stdout', 'stderr': 'stderr'})
|
||||
mock_source_list = MagicMock(return_value=(source, None))
|
||||
list_mock = MagicMock(return_value={
|
||||
'dirs': [],
|
||||
'files': ['stdout'],
|
||||
'links': [],
|
||||
'top_level_dirs': [],
|
||||
'top_level_files': ['stdout'],
|
||||
'top_level_links': [],
|
||||
})
|
||||
isfile_mock = MagicMock(side_effect=_isfile_side_effect)
|
||||
|
||||
with patch.dict(archive.__salt__, {'cmd.run': gnutar,
|
||||
'file.directory_exists': mock_false,
|
||||
'file.file_exists': mock_false,
|
||||
'state.single': state_single_mock,
|
||||
'file.makedirs': mock_true,
|
||||
'cmd.run_all': run_all,
|
||||
'archive.list': list_mock,
|
||||
'file.source_list': mock_source_list}),\
|
||||
patch.dict(archive.__states__, {'file.directory': mock_true}),\
|
||||
patch.object(os.path, 'isfile', isfile_mock),\
|
||||
patch('salt.utils.path.which', MagicMock(return_value=True)):
|
||||
ret = archive.extracted(os.path.join(os.sep + 'tmp', 'out'),
|
||||
source,
|
||||
options='xvzf',
|
||||
enforce_toplevel=False,
|
||||
clean=True,
|
||||
skip_files_list_verify=True,
|
||||
skip_verify=True,
|
||||
keep=True)
|
||||
self.assertEqual(ret['result'], False)
|
||||
self.assertEqual(ret['changes'], {})
|
||||
self.assertEqual(ret['comment'], ret_comment)
|
||||
|
||||
def test_skip_files_list_verify_success(self):
|
||||
'''
|
||||
Test that if the local and expected source hash are the same we won't do anything.
|
||||
'''
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
source = 'c:\\tmp\\foo.tar.gz'
|
||||
tmp_dir = 'c:\\tmp\\test_extracted_tar'
|
||||
elif salt.utils.platform.is_darwin():
|
||||
source = '/private/tmp/foo.tar.gz'
|
||||
tmp_dir = '/private/tmp/test_extracted_tar'
|
||||
else:
|
||||
source = '/tmp/foo.tar.gz'
|
||||
tmp_dir = '/tmp/test_extracted_tar'
|
||||
|
||||
expected_comment = ('Archive {} existing source sum is the same as the '
|
||||
'expected one and skip_files_list_verify argument '
|
||||
'was set to True. Extraction is not needed'.format(source))
|
||||
expected_ret = {'name': tmp_dir,
|
||||
'result': True,
|
||||
'changes': {},
|
||||
'comment': expected_comment
|
||||
}
|
||||
mock_true = MagicMock(return_value=True)
|
||||
mock_false = MagicMock(return_value=False)
|
||||
mock_cached = MagicMock(return_value='{}/{}'.format(tmp_dir, source))
|
||||
source_sum = {'hsum': 'testhash', 'hash_type': 'sha256'}
|
||||
mock_hash = MagicMock(return_value=source_sum)
|
||||
mock_source_list = MagicMock(return_value=(source, None))
|
||||
isfile_mock = MagicMock(side_effect=_isfile_side_effect)
|
||||
|
||||
with patch('salt.states.archive._read_cached_checksum', mock_hash):
|
||||
with patch.dict(archive.__opts__, {'test': False,
|
||||
'cachedir': tmp_dir,
|
||||
'hash_type': 'sha256'}),\
|
||||
patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
|
||||
'file.get_source_sum': mock_hash,
|
||||
'file.check_hash': mock_true,
|
||||
'cp.is_cached': mock_cached,
|
||||
'file.source_list': mock_source_list}),\
|
||||
patch.object(os.path, 'isfile', isfile_mock):
|
||||
|
||||
ret = archive.extracted(tmp_dir,
|
||||
source,
|
||||
source_hash='testhash',
|
||||
skip_files_list_verify=True,
|
||||
enforce_toplevel=False)
|
||||
self.assertDictEqual(ret, expected_ret)
|
||||
|
|
Loading…
Add table
Reference in a new issue