Merge pull request #38262 from terminalmage/issue38228

Fix archive.extracted when --strip or --strip-components is in the options
This commit is contained in:
Mike Place 2016-12-15 01:57:18 -07:00 committed by GitHub
commit fd32dc3e9b
3 changed files with 138 additions and 23 deletions

View file

@ -39,6 +39,7 @@ log = logging.getLogger(__name__)
def list_(name,
archive_format=None,
options=None,
strip_components=None,
clean=False,
verbose=False,
saltenv='base'):
@ -91,6 +92,14 @@ def list_(name,
It is not necessary to manually specify options for gzip'ed
archives, as gzip compression is natively supported by tarfile_.
strip_components
This argument specifies a number of top-level directories to strip from
the results. This is similar to the paths that would be extracted if
``--strip-components`` (or ``--strip``) were used when extracting tar
archives.
.. versionadded:: 2016.11.2
clean : False
Set this value to ``True`` to delete the path referred to by ``name``
once the contents have been listed. This option should be used with
@ -119,6 +128,7 @@ def list_(name,
.. code-block:: bash
salt '*' archive.list /path/to/myfile.tar.gz
salt '*' archive.list /path/to/myfile.tar.gz strip_components=1
salt '*' archive.list salt://foo.tar.gz
salt '*' archive.list https://domain.tld/myfile.zip
salt '*' archive.list ftp://10.1.2.3/foo.rar
@ -203,6 +213,17 @@ def list_(name,
raise CommandExecutionError('Failed to cache {0}'.format(name))
try:
if strip_components:
try:
int(strip_components)
except ValueError:
strip_components = -1
if strip_components <= 0:
raise CommandExecutionError(
'\'strip_components\' must be a positive integer'
)
parsed = _urlparse(name)
path = parsed.path or parsed.netloc
@ -253,6 +274,24 @@ def list_(name,
'Failed to clean cached archive %s: %s',
cached, exc.__str__()
)
if strip_components:
stripped_ret = []
for item in ret:
try:
# Strip off the specified number of directory boundaries,
# and grab what comes after the last stripped path
# separator.
stripped_item = item.split(
os.sep, strip_components)[strip_components]
if stripped_item:
stripped_ret.append(stripped_item)
except IndexError:
# Path is excluded by strip_components because it is not
# deep enough.
pass
ret = stripped_ret
if verbose:
verbose_ret = {'dirs': [],
'files': [],

View file

@ -527,7 +527,7 @@ def extracted(name,
- name: /opt/
- source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz
- source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
- tar_options: v
- options: v
- user: foo
- group: foo
@ -684,6 +684,21 @@ def extracted(name,
ret.setdefault('warnings', []).append(msg)
options = zip_options
if options is not None and not isinstance(options, six.string_types):
options = str(options)
strip_components = None
if options and archive_format == 'tar':
try:
strip_components = int(
re.search(
r'''--strip(?:-components)?(?:\s+|=)["']?(\d+)["']?''',
options
).group(1)
)
except (AttributeError, ValueError):
pass
if archive_format == 'zip':
if options:
if use_cmd_unzip is None:
@ -859,6 +874,7 @@ def extracted(name,
contents = __salt__['archive.list'](cached_source,
archive_format=archive_format,
options=list_options,
strip_components=strip_components,
clean=False,
verbose=True)
except CommandExecutionError as exc:
@ -1160,10 +1176,7 @@ def extracted(name,
)
return ret
try:
tar_opts = shlex.split(options)
except AttributeError:
tar_opts = shlex.split(str(options))
tar_opts = shlex.split(options)
tar_cmd = ['tar']
tar_shortopts = 'x'

View file

@ -4,10 +4,11 @@ Tests for the archive state
'''
# Import python libs
from __future__ import absolute_import
import errno
import logging
import os
import platform
import socket
import shutil
import threading
import tornado.ioloop
import tornado.web
@ -21,15 +22,18 @@ ensure_in_syspath('../../')
import integration
import salt.utils
# Setup logging
log = logging.getLogger(__name__)
STATE_DIR = os.path.join(integration.FILES, 'file', 'base')
if salt.utils.is_windows():
ARCHIVE_DIR = os.path.join("c:/", "tmp")
else:
ARCHIVE_DIR = '/tmp/archive/'
ARCHIVE_DIR = '/tmp/archive'
PORT = 9999
ARCHIVE_TAR_SOURCE = 'http://localhost:{0}/custom.tar.gz'.format(PORT)
UNTAR_FILE = ARCHIVE_DIR + 'custom/README'
UNTAR_FILE = os.path.join(ARCHIVE_DIR, 'custom/README')
ARCHIVE_TAR_HASH = 'md5=7643861ac07c30fe7d2310e9f25ca514'
STATE_DIR = os.path.join(integration.FILES, 'file', 'base')
if '7' in platform.dist()[1]:
@ -77,18 +81,26 @@ class ArchiveTest(integration.ModuleCase,
tornado.ioloop.IOLoop.instance().stop()
cls.server_thread.join()
def _check_ext_remove(self, dir, file):
def setUp(self):
self._clear_archive_dir()
def tearDown(self):
self._clear_archive_dir()
@staticmethod
def _clear_archive_dir():
try:
salt.utils.rm_rf(ARCHIVE_DIR)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def _check_extracted(self, path):
'''
function to check if file was extracted
and remove the directory.
'''
# check to see if it extracted
check_dir = os.path.isfile(file)
self.assertTrue(check_dir)
# wipe away dir. Can't do this in teardown
# because it needs to be wiped before each test
shutil.rmtree(dir)
log.debug('Checking for extracted file: %s', path)
self.assertTrue(os.path.isfile(path))
def test_archive_extracted_skip_verify(self):
'''
@ -97,11 +109,12 @@ class ArchiveTest(integration.ModuleCase,
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
skip_verify=True)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
self._check_extracted(UNTAR_FILE)
def test_archive_extracted_with_source_hash(self):
'''
@ -112,30 +125,80 @@ class ArchiveTest(integration.ModuleCase,
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
source_hash=ARCHIVE_TAR_HASH)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
self._check_extracted(UNTAR_FILE)
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
def test_archive_extracted_with_root_user_and_group(self):
'''
test archive.extracted without skip_verify
only external resources work to check to
ensure source_hash is verified correctly
test archive.extracted with user and group set to "root"
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
source_hash=ARCHIVE_TAR_HASH,
user='root', group='root')
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
self._check_extracted(UNTAR_FILE)
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
def test_archive_extracted_with_strip_in_options(self):
'''
test archive.extracted with --strip in options
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE,
source_hash=ARCHIVE_TAR_HASH,
options='--strip=1',
enforce_toplevel=False)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
def test_archive_extracted_with_strip_components_in_options(self):
'''
test archive.extracted with --strip-components in options
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE,
source_hash=ARCHIVE_TAR_HASH,
options='--strip-components=1',
enforce_toplevel=False)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
def test_archive_extracted_without_archive_format(self):
'''
test archive.extracted with no archive_format option
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_TAR_SOURCE,
source_hash=ARCHIVE_TAR_HASH)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
if __name__ == '__main__':