mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2016.3' into '2016.11'
Conflicts: - doc/topics/tutorials/gitfs.rst - salt/fileserver/roots.py - salt/loader.py
This commit is contained in:
commit
6d78adbf08
8 changed files with 203 additions and 91 deletions
|
@ -79,9 +79,6 @@ Additionally, version 0.21.0 of pygit2 introduced a dependency on python-cffi_,
|
|||
which in turn depends on newer releases of libffi_. Upgrading libffi_ is not
|
||||
advisable as several other applications depend on it, so on older LTS linux
|
||||
releases pygit2_ 0.20.3 and libgit2_ 0.20.0 is the recommended combination.
|
||||
While these are not packaged in the official repositories for Debian and
|
||||
Ubuntu, SaltStack is actively working on adding packages for these to our
|
||||
repositories_. The progress of this effort can be tracked `here <salt-pack-70>`_.
|
||||
|
||||
.. warning::
|
||||
pygit2_ is actively developed and `frequently makes
|
||||
|
@ -99,8 +96,53 @@ repositories_. The progress of this effort can be tracked `here <salt-pack-70>`_
|
|||
.. _libssh2: http://www.libssh2.org/
|
||||
.. _python-cffi: https://pypi.python.org/pypi/cffi
|
||||
.. _libffi: http://sourceware.org/libffi/
|
||||
.. _repositories: https://repo.saltstack.com
|
||||
.. _salt-pack-70: https://github.com/saltstack/salt-pack/issues/70
|
||||
|
||||
|
||||
RedHat Pygit2 Issues
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Around the time of the release of RedHat 7.3, RedHat effectively broke pygit2_
|
||||
by upgrading python-cffi_ to a release incompatible with the version of pygit2_
|
||||
available in their repositories. This prevents Python from importing the
|
||||
pygit2_ module at all, leading to a master that refuses to start, and leaving
|
||||
the following errors in the master log file:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
2017-02-10 09:07:34,892 [salt.utils.gitfs ][ERROR ][11211] Import pygit2 failed: CompileError: command 'gcc' failed with exit status 1
|
||||
2017-02-10 09:07:34,907 [salt.utils.gitfs ][ERROR ][11211] gitfs is configured but could not be loaded, are pygit2 and libgit2 installed?
|
||||
2017-02-10 09:07:34,907 [salt.utils.gitfs ][CRITICAL][11211] No suitable gitfs provider module is installed.
|
||||
2017-02-10 09:07:34,912 [salt.master ][CRITICAL][11211] Master failed pre flight checks, exiting
|
||||
|
||||
This issue has been reported on the `RedHat Bugzilla`_. In the meantime, you
|
||||
can work around it by downgrading python-cffi_. To do this, go to `this page`_
|
||||
and download the appropriate python-cffi_ 0.8.6 RPM. Then copy that RPM to the
|
||||
master and downgrade using the ``rpm`` command. For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# rpm -Uvh --oldpackage python-cffi-0.8.6-1.el7.x86_64.rpm
|
||||
Preparing... ################################# [100%]
|
||||
Updating / installing...
|
||||
1:python-cffi-0.8.6-1.el7 ################################# [ 50%]
|
||||
Cleaning up / removing...
|
||||
2:python-cffi-1.6.0-5.el7 ################################# [100%]
|
||||
# rpm -q python-cffi
|
||||
python-cffi-0.8.6-1.el7.x86_64
|
||||
|
||||
To confirm that pygit2_ is now "fixed", you can test trying to import it like so:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# python -c 'import pygit2'
|
||||
#
|
||||
|
||||
If the command produces no output, then your master should work when you start
|
||||
it again.
|
||||
|
||||
.. _`this page`: https://koji.fedoraproject.org/koji/buildinfo?buildID=569520
|
||||
.. _`RedHat Bugzilla`: https://bugzilla.redhat.com/show_bug.cgi?id=1400668
|
||||
|
||||
|
||||
GitPython
|
||||
---------
|
||||
|
@ -927,8 +969,8 @@ by this reactor.
|
|||
Similarly, the tag name ``salt/fileserver/gitfs/update`` can be replaced by
|
||||
anything, so long as the usage is consistent.
|
||||
|
||||
The ``root`` user name in the hook script and sudo policy should be changed to match the user under which
|
||||
the minion is running.
|
||||
The ``root`` user name in the hook script and sudo policy should be changed to
|
||||
match the user under which the minion is running.
|
||||
|
||||
.. _`post-receive hook`: http://www.git-scm.com/book/en/Customizing-Git-Git-Hooks#Server-Side-Hooks
|
||||
|
||||
|
|
|
@ -318,40 +318,82 @@ def _file_lists(load, form):
|
|||
return cache_match
|
||||
if refresh_cache:
|
||||
ret = {
|
||||
'files': [],
|
||||
'dirs': [],
|
||||
'empty_dirs': [],
|
||||
'links': []
|
||||
'files': set(),
|
||||
'dirs': set(),
|
||||
'empty_dirs': set(),
|
||||
'links': {}
|
||||
}
|
||||
|
||||
def _add_to(tgt, fs_root, parent_dir, items):
|
||||
'''
|
||||
Add the files to the target set
|
||||
'''
|
||||
def _translate_sep(path):
|
||||
'''
|
||||
Translate path separators for Windows masterless minions
|
||||
'''
|
||||
return path.replace('\\', '/') if os.path.sep == '\\' else path
|
||||
|
||||
for item in items:
|
||||
abs_path = os.path.join(parent_dir, item)
|
||||
log.trace('roots: Processing %s', abs_path)
|
||||
is_link = os.path.islink(abs_path)
|
||||
log.trace(
|
||||
'roots: %s is %sa link',
|
||||
abs_path, 'not ' if not is_link else ''
|
||||
)
|
||||
if is_link and __opts__['fileserver_ignoresymlinks']:
|
||||
continue
|
||||
rel_path = _translate_sep(os.path.relpath(abs_path, fs_root))
|
||||
log.trace('roots: %s relative path is %s', abs_path, rel_path)
|
||||
if salt.fileserver.is_file_ignored(__opts__, rel_path):
|
||||
continue
|
||||
tgt.add(rel_path)
|
||||
try:
|
||||
if not os.listdir(abs_path):
|
||||
ret['empty_dirs'].add(rel_path)
|
||||
except Exception:
|
||||
# Generic exception because running os.listdir() on a
|
||||
# non-directory path raises an OSError on *NIX and a
|
||||
# WindowsError on Windows.
|
||||
pass
|
||||
if is_link:
|
||||
link_dest = os.readlink(abs_path)
|
||||
log.trace(
|
||||
'roots: %s symlink destination is %s',
|
||||
abs_path, link_dest
|
||||
)
|
||||
if link_dest.startswith('..'):
|
||||
joined = os.path.join(abs_path, link_dest)
|
||||
else:
|
||||
joined = os.path.join(
|
||||
os.path.dirname(abs_path), link_dest
|
||||
)
|
||||
rel_dest = os.path.relpath(
|
||||
os.path.realpath(os.path.normpath(joined)),
|
||||
fs_root
|
||||
)
|
||||
log.trace(
|
||||
'roots: %s relative path is %s',
|
||||
abs_path, rel_dest
|
||||
)
|
||||
if not rel_dest.startswith('..'):
|
||||
# Only count the link if it does not point
|
||||
# outside of the root dir of the fileserver
|
||||
# (i.e. the "path" variable)
|
||||
ret['links'][rel_path] = rel_dest
|
||||
|
||||
for path in __opts__['file_roots'][load['saltenv']]:
|
||||
for root, dirs, files in os.walk(
|
||||
path,
|
||||
followlinks=__opts__['fileserver_followsymlinks']):
|
||||
# Don't walk any directories that match file_ignore_regex or glob
|
||||
dirs[:] = [d for d in dirs if not salt.fileserver.is_file_ignored(__opts__, d)]
|
||||
_add_to(ret['dirs'], path, root, dirs)
|
||||
_add_to(ret['files'], path, root, files)
|
||||
|
||||
ret['files'] = sorted(ret['files'])
|
||||
ret['dirs'] = sorted(ret['dirs'])
|
||||
ret['empty_dirs'] = sorted(ret['empty_dirs'])
|
||||
|
||||
dir_rel_fn = os.path.relpath(root, path)
|
||||
if __opts__.get('file_client', 'remote') == 'local' and os.path.sep == "\\":
|
||||
dir_rel_fn = dir_rel_fn.replace('\\', '/')
|
||||
ret['dirs'].append(dir_rel_fn)
|
||||
if len(dirs) == 0 and len(files) == 0:
|
||||
if dir_rel_fn not in ('.', '..') \
|
||||
and not salt.fileserver.is_file_ignored(__opts__, dir_rel_fn):
|
||||
ret['empty_dirs'].append(dir_rel_fn)
|
||||
for fname in files:
|
||||
is_link = os.path.islink(os.path.join(root, fname))
|
||||
if is_link:
|
||||
ret['links'].append(fname)
|
||||
if __opts__['fileserver_ignoresymlinks'] and is_link:
|
||||
continue
|
||||
rel_fn = os.path.relpath(
|
||||
os.path.join(root, fname),
|
||||
path
|
||||
)
|
||||
if not salt.fileserver.is_file_ignored(__opts__, rel_fn):
|
||||
if __opts__.get('file_client', 'remote') == 'local' and os.path.sep == "\\":
|
||||
rel_fn = rel_fn.replace('\\', '/')
|
||||
ret['files'].append(rel_fn)
|
||||
if save_cache:
|
||||
try:
|
||||
salt.fileserver.write_file_list_cache(
|
||||
|
@ -403,28 +445,13 @@ def symlink_list(load):
|
|||
ret = {}
|
||||
if load['saltenv'] not in __opts__['file_roots']:
|
||||
return ret
|
||||
for path in __opts__['file_roots'][load['saltenv']]:
|
||||
try:
|
||||
prefix = load['prefix'].strip('/')
|
||||
except KeyError:
|
||||
prefix = ''
|
||||
# Adopting rsync functionality here and stopping at any encounter of a symlink
|
||||
for root, dirs, files in os.walk(os.path.join(path, prefix), followlinks=False):
|
||||
# Don't walk any directories that match file_ignore_regex or glob
|
||||
dirs[:] = [d for d in dirs if not salt.fileserver.is_file_ignored(__opts__, d)]
|
||||
for fname in files:
|
||||
if not os.path.islink(os.path.join(root, fname)):
|
||||
continue
|
||||
rel_fn = os.path.relpath(
|
||||
os.path.join(root, fname),
|
||||
path
|
||||
)
|
||||
if not salt.fileserver.is_file_ignored(__opts__, rel_fn):
|
||||
ret[rel_fn] = os.readlink(os.path.join(root, fname))
|
||||
for dname in dirs:
|
||||
if os.path.islink(os.path.join(root, dname)):
|
||||
ret[os.path.relpath(os.path.join(root,
|
||||
dname),
|
||||
path)] = os.readlink(os.path.join(root,
|
||||
dname))
|
||||
return ret
|
||||
|
||||
if 'prefix' in load:
|
||||
prefix = load['prefix'].strip('/')
|
||||
else:
|
||||
prefix = ''
|
||||
|
||||
symlinks = _file_lists(load, 'links')
|
||||
return dict([(key, val)
|
||||
for key, val in six.iteritems(symlinks)
|
||||
if key.startswith(prefix)])
|
||||
|
|
|
@ -1310,26 +1310,26 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
|||
else:
|
||||
desc = self.suffix_map[suffix]
|
||||
# if it is a directory, we don't open a file
|
||||
try:
|
||||
mod_namespace = '.'.join((
|
||||
self.loaded_base_name,
|
||||
self.mod_type_check(fpath),
|
||||
self.tag,
|
||||
name))
|
||||
except TypeError:
|
||||
mod_namespace = '{0}.{1}.{2}.{3}'.format(
|
||||
self.loaded_base_name,
|
||||
self.mod_type_check(fpath),
|
||||
self.tag,
|
||||
name)
|
||||
if suffix == '':
|
||||
mod = imp.load_module(
|
||||
'{0}.{1}.{2}.{3}'.format(
|
||||
self.loaded_base_name,
|
||||
self.mod_type_check(fpath),
|
||||
self.tag,
|
||||
name
|
||||
), None, fpath, desc)
|
||||
mod = imp.load_module(mod_namespace, None, fpath, desc)
|
||||
# reload all submodules if necessary
|
||||
if not self.initial_load:
|
||||
self._reload_submodules(mod)
|
||||
else:
|
||||
with salt.utils.fopen(fpath, desc[1]) as fn_:
|
||||
mod = imp.load_module(
|
||||
'{0}.{1}.{2}.{3}'.format(
|
||||
self.loaded_base_name,
|
||||
self.mod_type_check(fpath),
|
||||
self.tag,
|
||||
name
|
||||
), fn_, fpath, desc)
|
||||
mod = imp.load_module(mod_namespace, fn_, fpath, desc)
|
||||
|
||||
except IOError:
|
||||
raise
|
||||
|
@ -1385,11 +1385,9 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
|||
except Exception:
|
||||
err_string = '__init__ failed'
|
||||
log.debug(
|
||||
'Error loading {0}.{1}: {2}'.format(
|
||||
self.tag,
|
||||
module_name,
|
||||
err_string),
|
||||
exc_info=True)
|
||||
'Error loading %s.%s: %s',
|
||||
self.tag, module_name, err_string, exc_info=True
|
||||
)
|
||||
self.missing_modules[module_name] = err_string
|
||||
self.missing_modules[name] = err_string
|
||||
return False
|
||||
|
@ -1404,10 +1402,10 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
|||
module_name,
|
||||
)
|
||||
if virtual_err is not None:
|
||||
log.trace('Error loading {0}.{1}: {2}'.format(self.tag,
|
||||
module_name,
|
||||
virtual_err,
|
||||
))
|
||||
log.trace(
|
||||
'Error loading %s.%s: %s',
|
||||
self.tag, module_name, virtual_err
|
||||
)
|
||||
|
||||
# if process_virtual returned a non-True value then we are
|
||||
# supposed to not process this module
|
||||
|
|
|
@ -41,11 +41,18 @@ def __virtual__():
|
|||
return (False, 'useradd execution module not loaded: either pwd python library not available or system not one of Linux, OpenBSD or NetBSD')
|
||||
|
||||
|
||||
def _quote_username(name):
|
||||
if isinstance(name, int):
|
||||
name = "{0}".format(name)
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def _get_gecos(name):
|
||||
'''
|
||||
Retrieve GECOS field info and return it in dictionary form
|
||||
'''
|
||||
gecos_field = pwd.getpwnam(name).pw_gecos.split(',', 3)
|
||||
gecos_field = pwd.getpwnam(_quote_username(name)).pw_gecos.split(',', 3)
|
||||
if not gecos_field:
|
||||
return {}
|
||||
else:
|
||||
|
@ -521,7 +528,7 @@ def info(name):
|
|||
salt '*' user.info root
|
||||
'''
|
||||
try:
|
||||
data = pwd.getpwnam(name)
|
||||
data = pwd.getpwnam(_quote_username(name))
|
||||
except KeyError:
|
||||
return {}
|
||||
else:
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Recursively iterate over directories and add all files as Pillar data
|
||||
|
||||
``File_tree`` is an external pillar that allows
|
||||
values from all files in a directory tree to be imported as Pillar data.
|
||||
|
||||
Note this is an external pillar, and is subject to the rules and constraints
|
||||
governing external pillars detailed here: :ref:`external-pillars`.
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
|
|
|
@ -60,6 +60,9 @@ def _walk_through(job_dir):
|
|||
for top in os.listdir(job_dir):
|
||||
t_path = os.path.join(job_dir, top)
|
||||
|
||||
if not os.path.exists(t_path):
|
||||
continue
|
||||
|
||||
for final in os.listdir(t_path):
|
||||
load_path = os.path.join(t_path, final, LOAD_P)
|
||||
|
||||
|
|
|
@ -154,11 +154,12 @@ class RootsTest(integration.ModuleCase):
|
|||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_symlink_list(self):
|
||||
with patch.dict(roots.__opts__, {'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
with patch.dict(roots.__opts__, {'cachedir': self.master_opts['cachedir'],
|
||||
'file_roots': self.master_opts['file_roots'],
|
||||
'fileserver_ignoresymlinks': False,
|
||||
'fileserver_followsymlinks': False,
|
||||
'file_ignore_regex': False,
|
||||
'file_ignore_glob': False}):
|
||||
ret = roots.symlink_list({'saltenv': 'base'})
|
||||
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ from salt.exceptions import CommandExecutionError
|
|||
from salttesting import skipIf, TestCase
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
from salttesting.mock import (
|
||||
Mock,
|
||||
MagicMock,
|
||||
mock_open,
|
||||
patch,
|
||||
|
@ -130,6 +131,34 @@ class CpTestCase(TestCase):
|
|||
|
||||
self.assertEqual(cp.push_dir(path), ret)
|
||||
|
||||
@patch(
|
||||
'salt.modules.cp.os.path',
|
||||
MagicMock(isfile=Mock(return_value=True), wraps=cp.os.path))
|
||||
@patch.multiple(
|
||||
'salt.modules.cp',
|
||||
_auth=MagicMock(**{'return_value.gen_token.return_value': 'token'}),
|
||||
__opts__={'id': 'abc', 'file_buffer_size': 10})
|
||||
@patch('salt.utils.fopen', mock_open(read_data='content'))
|
||||
@patch('salt.transport.Channel.factory', MagicMock())
|
||||
def test_push(self):
|
||||
'''
|
||||
Test if push works with good posix path.
|
||||
'''
|
||||
import salt
|
||||
response = cp.push('/saltines/test.file')
|
||||
self.assertEqual(response, True)
|
||||
self.assertEqual(salt.utils.fopen().read.call_count, 2)
|
||||
salt.transport.Channel.factory({}).send.assert_called_once_with(
|
||||
dict(
|
||||
loc=salt.utils.fopen().tell(),
|
||||
cmd='_file_recv',
|
||||
tok='token',
|
||||
path=('saltines', 'test.file'),
|
||||
data='', # data is empty here because load['data'] is overwritten
|
||||
id='abc'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
|
Loading…
Add table
Reference in a new issue