Merge remote-tracking branch 'upstream/2014.7' into merge-forward-2015.2

Conflicts:
	salt/fileserver/gitfs.py
	salt/grains/core.py
This commit is contained in:
Colton Myers 2015-01-22 16:46:32 -07:00
commit c1e630340e
9 changed files with 260 additions and 97 deletions

View file

@ -107,9 +107,7 @@ install GitPython`` (or ``easy_install GitPython``) as root.
Dulwich
-------
Dulwich does not, at this time, have a limitation on the supported version,
however support for Dulwich is new and it is possible that incompatibilities
with old versions will be found.
Dulwich 0.9.4 or newer is required to use Dulwich as backend for gitfs.
Dulwich is available in EPEL, and can be easily installed on the master using
yum:

View file

@ -1,20 +1,28 @@
#!/bin/bash
set -o errexit
PKG_DIR=$(cd $(dirname $0); pwd)
BUILD_DIR=build/output/salt
rm -rf dist/ $BUILD_DIR &&\
cp $PKG_DIR/_syspaths.py salt/ &&\
python2.7 setup.py sdist &&\
python2.7 setup.py bdist &&\
python2.7 setup.py bdist_esky &&\
rm salt/_syspaths.py &&\
rm -f dist/*.tar.gz &&\
mkdir -p $BUILD_DIR/{etc,install,bin/appdata} &&\
cp conf/* $BUILD_DIR/etc/
cp $PKG_DIR/*.xml $PKG_DIR/install.sh $BUILD_DIR/install &&\
chmod +x $BUILD_DIR/install/install.sh &&\
unzip -d $BUILD_DIR/bin dist/*.zip &&\
cp $BUILD_DIR/bin/*/libgcc_s.so.1 $BUILD_DIR/bin/ &&\
find build/output/salt/bin/ -mindepth 1 -maxdepth 1 -type d -not -name appdata -exec mv {} $BUILD_DIR/bin/appdata/ \; &&\
gtar -C $BUILD_DIR/.. -czvf dist/salt-$(git describe | sed 's|^v||')-esky-smartos.tar.gz salt &&\
# In case this is a git checkout, run sdist then extract out tarball
# contents to get all the critical versioning files into place
rm -rf dist/
python2.7 setup.py sdist
gtar xzvf dist/salt*.tar.gz --strip-components=1
rm -rf dist/ $BUILD_DIR
cp $PKG_DIR/_syspaths.py salt/
python2.7 setup.py bdist
python2.7 setup.py bdist_esky
rm salt/_syspaths.py
rm -f dist/*.tar.gz
mkdir -p $BUILD_DIR/{install,bin/appdata}
cp -r conf $BUILD_DIR/etc
cp $PKG_DIR/*.xml $PKG_DIR/install.sh $BUILD_DIR/install
chmod +x $BUILD_DIR/install/install.sh
unzip -d $BUILD_DIR/bin dist/*.zip
cp $BUILD_DIR/bin/*/libgcc_s.so.1 $BUILD_DIR/bin/
find build/output/salt/bin/ -mindepth 1 -maxdepth 1 -type d -not -name appdata -exec mv {} $BUILD_DIR/bin/appdata/ \;
gtar -C $BUILD_DIR/.. -czvf dist/salt-$(awk '/^Version:/{print $2}' < PKG-INFO)-esky-smartos.tar.gz salt
echo "tarball built"

View file

@ -51,12 +51,15 @@ import logging
import os
import re
import shutil
import stat
import subprocess
from datetime import datetime
from salt.ext.six import text_type as _text_type
from salt._compat import StringIO
VALID_PROVIDERS = ('gitpython', 'pygit2', 'dulwich')
PER_REMOTE_PARAMS = ('base', 'mountpoint', 'root')
SYMLINK_RECURSE_DEPTH = 100
# Auth support (auth params can be global or per-remote, too)
AUTH_PROVIDERS = ('pygit2',)
@ -422,7 +425,7 @@ def _get_tree_dulwich(repo, tgt_env):
return repo['repo'].get_object(commit.tree)
# Branch or tag not matched, check if 'tgt_env' is a commit. This is more
# difficult with Dulwich because of its inability to deal with tgt_envened
# difficult with Dulwich because of its inability to deal with shortened
# SHA-1 hashes.
if not _env_is_exposed(tgt_env):
return None
@ -1173,14 +1176,39 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
if repo['root']:
repo_path = os.path.join(repo['root'], repo_path)
blob = None
depth = 0
if provider == 'gitpython':
tree = _get_tree_gitpython(repo, tgt_env)
if not tree:
# Branch/tag/SHA not found in repo, try the next
continue
try:
blob = tree / repo_path
except KeyError:
while True:
depth += 1
if depth > SYMLINK_RECURSE_DEPTH:
break
try:
file_blob = tree / repo_path
if stat.S_ISLNK(file_blob.mode):
# Path is a symlink. The blob data corresponding to
# this path's object ID will be the target of the
# symlink. Follow the symlink and set repo_path to the
# location indicated in the blob data.
stream = StringIO()
file_blob.stream_data(stream)
stream.seek(0)
link_tgt = stream.read()
stream.close()
repo_path = os.path.normpath(
os.path.join(os.path.dirname(repo_path), link_tgt)
)
else:
blob = file_blob
break
except KeyError:
# File not found or repo_path points to a directory
break
if blob is None:
continue
blob_hexsha = blob.hexsha
@ -1189,25 +1217,57 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
if not tree:
# Branch/tag/SHA not found in repo, try the next
continue
try:
oid = tree[repo_path].oid
blob = repo['repo'][oid]
except KeyError:
while True:
depth += 1
if depth > SYMLINK_RECURSE_DEPTH:
break
try:
if stat.S_ISLNK(tree[repo_path].filemode):
# Path is a symlink. The blob data corresponding to this
# path's object ID will be the target of the symlink. Follow
# the symlink and set repo_path to the location indicated
# in the blob data.
link_tgt = repo['repo'][tree[repo_path].oid].data
repo_path = os.path.normpath(
os.path.join(os.path.dirname(repo_path), link_tgt)
)
else:
oid = tree[repo_path].oid
blob = repo['repo'][oid]
except KeyError:
break
if blob is None:
continue
blob_hexsha = blob.hex
elif provider == 'dulwich':
prefix_dirs, _, filename = repo_path.rpartition(os.path.sep)
tree = _get_tree_dulwich(repo, tgt_env)
tree = _dulwich_walk_tree(repo['repo'], tree, prefix_dirs)
if not isinstance(tree, dulwich.objects.Tree):
# Branch/tag/SHA not found in repo, try the next
continue
try:
# Referencing the path in the tree returns a tuple, the
# second element of which is the object ID of the blob
blob = repo['repo'].get_object(tree[filename][1])
except KeyError:
while True:
depth += 1
if depth > SYMLINK_RECURSE_DEPTH:
break
prefix_dirs, _, filename = repo_path.rpartition(os.path.sep)
tree = _get_tree_dulwich(repo, tgt_env)
tree = _dulwich_walk_tree(repo['repo'], tree, prefix_dirs)
if not isinstance(tree, dulwich.objects.Tree):
# Branch/tag/SHA not found in repo
break
try:
mode, oid = tree[filename]
if stat.S_ISLNK(mode):
# Path is a symlink. The blob data corresponding to
# this path's object ID will be the target of the
# symlink. Follow the symlink and set repo_path to the
# location indicated in the blob data.
link_tgt = repo['repo'].get_object(oid).as_raw_string()
repo_path = os.path.normpath(
os.path.join(os.path.dirname(repo_path), link_tgt)
)
else:
blob = repo['repo'].get_object(oid)
break
except KeyError:
break
if blob is None:
continue
blob_hexsha = blob.sha().hexdigest()
@ -1352,12 +1412,15 @@ def _file_lists(load, form):
return cache_match
if refresh_cache:
ret = {}
ret['files'] = _get_file_list(load)
ret['files'], ret['symlinks'] = _get_file_list(load)
ret['dirs'] = _get_dir_list(load)
if save_cache:
salt.fileserver.write_file_list_cache(
__opts__, ret, list_cache, w_lock
)
# NOTE: symlinks are organized in a dict instead of a list, however the
# 'symlinks' key will be defined above so it will never get to the
# default value in the call to ret.get() below.
return ret.get(form, [])
# Shouldn't get here, but if we do, this prevents a TypeError
return []
@ -1386,48 +1449,60 @@ def _get_file_list(load):
provider = _get_provider()
if 'saltenv' not in load or load['saltenv'] not in envs():
return []
ret = set()
return [], {}
files = set()
symlinks = {}
for repo in init():
fl_func = None
if provider == 'gitpython':
ret.update(
_file_list_gitpython(repo, load['saltenv'])
)
fl_func = _file_list_gitpython
elif provider == 'pygit2':
ret.update(
_file_list_pygit2(repo, load['saltenv'])
)
fl_func = _file_list_pygit2
elif provider == 'dulwich':
ret.update(
_file_list_dulwich(repo, load['saltenv'])
)
return sorted(ret)
fl_func = _file_list_dulwich
try:
repo_files, repo_symlinks = fl_func(repo, load['saltenv'])
except TypeError:
# We should never get here unless the gitfs_provider is not
# accounted for in tbe above if/elif block.
continue
else:
files.update(repo_files)
symlinks.update(repo_symlinks)
return sorted(files), symlinks
def _file_list_gitpython(repo, tgt_env):
'''
Get file list using GitPython
'''
ret = set()
files = set()
symlinks = {}
if tgt_env == 'base':
tgt_env = repo['base']
tree = _get_tree_gitpython(repo, tgt_env)
if not tree:
return ret
return files, symlinks
if repo['root']:
try:
tree = tree / repo['root']
except KeyError:
return ret
for blob in tree.traverse():
if not isinstance(blob, git.Blob):
return files, symlinks
relpath = lambda path: os.path.relpath(path, repo['root'])
add_mountpoint = lambda path: os.path.join(repo['mountpoint'], path)
for file_blob in tree.traverse():
if not isinstance(file_blob, git.Blob):
continue
if repo['root']:
path = os.path.relpath(blob.path, repo['root'])
else:
path = blob.path
ret.add(os.path.join(repo['mountpoint'], path))
return ret
file_path = add_mountpoint(relpath(file_blob.path))
files.add(file_path)
if stat.S_ISLNK(file_blob.mode):
stream = StringIO()
file_blob.stream_data(stream)
stream.seek(0)
link_tgt = stream.read()
stream.close()
symlinks[file_path] = link_tgt
return files, symlinks
def _file_list_pygit2(repo, tgt_env):
@ -1437,23 +1512,28 @@ def _file_list_pygit2(repo, tgt_env):
def _traverse(tree, repo_obj, blobs, prefix):
'''
Traverse through a pygit2 Tree object recursively, accumulating all the
blob paths within it in the "blobs" list
file paths and symlink info in the "blobs" dict
'''
for entry in iter(tree):
blob = repo_obj[entry.oid]
if isinstance(blob, pygit2.Blob):
blobs.append(os.path.join(prefix, entry.name))
elif isinstance(blob, pygit2.Tree):
_traverse(blob,
obj = repo_obj[entry.oid]
if isinstance(obj, pygit2.Blob):
repo_path = os.path.join(prefix, entry.name)
blobs.setdefault('files', []).append(repo_path)
if stat.S_ISLNK(tree[entry.name].filemode):
link_tgt = repo_obj[tree[entry.name].oid].data
blobs.setdefault('symlinks', {})[repo_path] = link_tgt
elif isinstance(obj, pygit2.Tree):
_traverse(obj,
repo_obj,
blobs,
os.path.join(prefix, entry.name))
ret = set()
files = set()
symlinks = {}
if tgt_env == 'base':
tgt_env = repo['base']
tree = _get_tree_pygit2(repo, tgt_env)
if not tree:
return ret
return files, symlinks
if repo['root']:
try:
# This might need to be changed to account for a root that
@ -1461,17 +1541,19 @@ def _file_list_pygit2(repo, tgt_env):
oid = tree[repo['root']].oid
tree = repo['repo'][oid]
except KeyError:
return ret
return files, symlinks
if not isinstance(tree, pygit2.Tree):
return ret
blobs = []
return files, symlinks
blobs = {}
if len(tree):
_traverse(tree, repo['repo'], blobs, repo['root'])
for blob in blobs:
if repo['root']:
blob = os.path.relpath(blob, repo['root'])
ret.add(os.path.join(repo['mountpoint'], blob))
return ret
relpath = lambda path: os.path.relpath(path, repo['root'])
add_mountpoint = lambda path: os.path.join(repo['mountpoint'], path)
for repo_path in blobs.get('files', []):
files.add(add_mountpoint(relpath(repo_path)))
for repo_path, link_tgt in blobs.get('symlinks', {}).iteritems():
symlinks[add_mountpoint(relpath(repo_path))] = link_tgt
return files, symlinks
def _file_list_dulwich(repo, tgt_env):
@ -1481,32 +1563,40 @@ def _file_list_dulwich(repo, tgt_env):
def _traverse(tree, repo_obj, blobs, prefix):
'''
Traverse through a dulwich Tree object recursively, accumulating all the
blob paths within it in the "blobs" list
file paths and symlink info in the "blobs" dict
'''
for item in tree.items():
obj = repo_obj.get_object(item.sha)
if isinstance(obj, dulwich.objects.Blob):
blobs.append(os.path.join(prefix, item.path))
repo_path = os.path.join(prefix, item.path)
blobs.setdefault('files', []).append(repo_path)
mode, oid = tree[item.path]
if stat.S_ISLNK(mode):
link_tgt = repo_obj.get_object(oid).as_raw_string()
blobs.setdefault('symlinks', {})[repo_path] = link_tgt
elif isinstance(obj, dulwich.objects.Tree):
_traverse(obj,
repo_obj,
blobs,
os.path.join(prefix, item.path))
ret = set()
files = set()
symlinks = {}
if tgt_env == 'base':
tgt_env = repo['base']
tree = _get_tree_dulwich(repo, tgt_env)
tree = _dulwich_walk_tree(repo['repo'], tree, repo['root'])
if not isinstance(tree, dulwich.objects.Tree):
return ret
blobs = []
return files, symlinks
blobs = {}
if len(tree):
_traverse(tree, repo['repo'], blobs, repo['root'])
for blob in blobs:
if repo['root']:
blob = os.path.relpath(blob, repo['root'])
ret.add(os.path.join(repo['mountpoint'], blob))
return ret
relpath = lambda path: os.path.relpath(path, repo['root'])
add_mountpoint = lambda path: os.path.join(repo['mountpoint'], path)
for repo_path in blobs.get('files', []):
files.add(add_mountpoint(relpath(repo_path)))
for repo_path, link_tgt in blobs.get('symlinks', {}).iteritems():
symlinks[add_mountpoint(relpath(repo_path))] = link_tgt
return files, symlinks
def file_list_emptydirs(load): # pylint: disable=W0613
@ -1659,3 +1749,27 @@ def _dir_list_dulwich(repo, tgt_env):
blob = os.path.relpath(blob, repo['root'])
ret.add(os.path.join(repo['mountpoint'], blob))
return ret
def symlink_list(load):
'''
Return a dict of all symlinks based on a given path in the repo
'''
if 'env' in load:
salt.utils.warn_until(
'Boron',
'Passing a salt environment should be done using \'saltenv\' '
'not \'env\'. This functionality will be removed in Salt Boron.'
)
load['saltenv'] = load.pop('env')
if load['saltenv'] not in envs():
return {}
try:
prefix = load['prefix'].strip('/')
except KeyError:
prefix = ''
symlinks = _file_lists(load, 'symlinks')
return dict([(key, val)
for key, val in symlinks.iteritems()
if key.startswith(prefix)])

View file

@ -1185,6 +1185,12 @@ def os_data():
grains['osfinger'] = '{os}-{ver}'.format(
os=grains['osfullname'],
ver=grains['osrelease'].partition('.')[0])
elif grains.get('os') in ('FreeBSD', 'OpenBSD', 'NetBSD'):
grains['osmajorrelease'] = grains['osrelease'].split('.', 1)[0]
grains['osfinger'] = '{os}-{ver}'.format(
os=grains['os'],
ver=grains['osrelease'])
if grains.get('osrelease', ''):
osrelease_info = grains['osrelease'].split('.')

View file

@ -579,6 +579,20 @@ class MultiMinion(MinionBase):
# have the Minion class run anything it has to run
next(minion['generator'])
# If a minion instance receives event, handle the event on all
# instances
if package:
try:
for master in masters:
minions[master].handle_event(package)
except Exception:
pass
finally:
package = None
# have the Minion class run anything it has to run
next(minion['generator'])
class Minion(MinionBase):
'''

View file

@ -1158,7 +1158,7 @@ def login(url=None, username=None, password=None, email=None):
salt '*' docker.login <url> <username> <password> <email>
'''
client = _get_client()
return client.login(url, username, password, email)
return client.login(username, password, email, url)
def search(term):

View file

@ -14,6 +14,7 @@ import salt.utils
from salt.ext.six import string_types
from salt.exceptions import CommandExecutionError
from salt.modules.systemd import _sd_booted
import string
log = logging.getLogger(__name__)
@ -35,6 +36,16 @@ def __virtual__():
return __virtualname__
def _check_systemd_salt_config():
conf = '/etc/sysctl.d/99-salt.conf'
if not os.path.exists(conf):
sysctl_dir = os.path.split(conf)[0]
if not os.path.exists(sysctl_dir):
os.makedirs(sysctl_dir)
salt.utils.fopen(conf, 'w').close()
return conf
def default_config():
'''
Linux hosts using systemd 207 or later ignore ``/etc/sysctl.conf`` and only
@ -56,7 +67,7 @@ def default_config():
version = line.split()[-1]
try:
if int(version) >= 207:
return '/etc/sysctl.d/99-salt.conf'
return _check_systemd_salt_config()
except ValueError:
log.error(
'Unexpected non-numeric systemd version {0!r} '
@ -133,7 +144,7 @@ def assign(name, value):
salt '*' sysctl.assign net.ipv4.ip_forward 1
'''
value = str(value)
sysctl_file = '/proc/sys/{0}'.format(name.replace('.', '/'))
sysctl_file = '/proc/sys/{0}'.format(name.translate(string.maketrans('./', '/.')))
if not os.path.exists(sysctl_file):
raise CommandExecutionError('sysctl {0} does not exist'.format(name))

View file

@ -134,11 +134,7 @@ def render(data, saltenv='base', sls='', argline='', **kwargs):
'''
if not HAS_GPG:
raise SaltRenderError('GPG unavailable')
if isinstance(__salt__, dict):
if 'config.get' in __salt__:
homedir = __salt__['config.get']('gpg_keydir', DEFAULT_GPG_KEYDIR)
else:
homedir = __opts__.get('gpg_keydir', DEFAULT_GPG_KEYDIR)
log.debug('Reading GPG keys from: {0}'.format(homedir))
homedir = __salt__['config.get']('gpg_keydir', DEFAULT_GPG_KEYDIR)
log.debug('Reading GPG keys from: {0}'.format(homedir))
gpg = gnupg.GPG(gnupghome=homedir)
return decrypt_object(data, gpg)

View file

@ -197,7 +197,23 @@ def extracted(name,
else:
log.debug('Untar {0} in {1}'.format(filename, name))
tar_cmd = ['tar', 'x{0}'.format(tar_options), '-f', repr(filename)]
tar_opts = tar_options.split(' ')
tar_cmd = ['tar']
tar_shortopts = 'x'
tar_longopts = []
for opt in tar_opts:
if not opt.startswith('-'):
if opt not in ['x', 'f']:
tar_shortopts = tar_shortopts + opt
else:
tar_longopts.append(opt)
tar_cmd.append(tar_shortopts)
tar_cmd.extend(tar_longopts)
tar_cmd.extend(['-f', filename])
results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False)
if results['retcode'] != 0:
ret['result'] = False