Merge branch '2017.7' into '2018.3'

Conflicts:
  - salt/cli/api.py
  - salt/client/ssh/wrapper/state.py
  - salt/config/__init__.py
  - salt/crypt.py
  - salt/daemons/masterapi.py
  - salt/key.py
  - salt/loader.py
  - salt/master.py
  - salt/modules/archive.py
  - salt/modules/hipchat.py
  - salt/modules/pip.py
  - salt/modules/state.py
  - salt/modules/virt.py
  - salt/modules/x509.py
  - salt/state.py
  - salt/transport/tcp.py
  - salt/transport/zeromq.py
  - salt/utils/__init__.py
  - salt/utils/event.py
  - salt/utils/extmods.py
  - salt/utils/files.py
  - salt/utils/parsers.py
  - salt/utils/verify.py
  - tests/integration/shell/test_call.py
  - tests/unit/utils/test_args.py
This commit is contained in:
rallytime 2018-03-06 13:45:59 -05:00
commit d83727fdf9
No known key found for this signature in database
GPG key ID: E8F1A4B90D0DEA19
47 changed files with 777 additions and 488 deletions

View file

@ -29,6 +29,25 @@ load-plugins=saltpylint.pep8,
# Don't bump this values on PyLint 1.4.0 - Know bug that ignores the passed --rcfile
jobs=1
# List of blacklisted functions and suggested replacements
#
# NOTE: This pylint check will infer the full name of the function by walking
# back up from the function name to the parent, to the parent's parent, etc.,
# and this means that functions which come from platform-specific modules need
# to be referenced using name of the module from which the function was
# imported. This happens a lot in the os and os.path modules. Functions from
# os.path should be defined using posixpath.funcname and ntpath.funcname, while
# functions from os should be defined using posix.funcname and nt.funcname.
#
# When defining a blacklisted function, the format is:
#
# <func_name>=<suggested_replacement>
#
# The replacement text will be included in the alert message.
#
blacklisted-functions=posix.umask=salt.utils.files.set_umask or get_umask,
nt.umask=salt.utils.files.set_umask or get_umask
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no

Binary file not shown.

Before

Width:  |  Height:  |  Size: 438 KiB

After

Width:  |  Height:  |  Size: 240 KiB

View file

@ -341,10 +341,15 @@ rst_prolog = """\
| <a href="https://repo.saltstack.com/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe.md5"><strong>md5</strong></a></p>
.. |osxdownload| raw:: html
.. |osxdownloadpy2| raw:: html
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-x86_64.pkg"><strong>salt-{release}-x86_64.pkg</strong></a>
| <a href="https://repo.saltstack.com/osx/salt-{release}-x86_64.pkg.md5"><strong>md5</strong></a></p>
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-py2-x86_64.pkg"><strong>salt-{release}-py2-x86_64.pkg</strong></a>
| <a href="https://repo.saltstack.com/osx/salt-{release}-py2-x86_64.pkg.md5"><strong>md5</strong></a></p>
.. |osxdownloadpy3| raw:: html
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
| <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
""".format(release=release)

View file

@ -8,7 +8,8 @@ Installation from the Official SaltStack Repository
===================================================
**Latest stable build from the selected branch**:
|osxdownload|
|osxdownloadpy2|
|osxdownloadpy3|
The output of ``md5 <salt pkg>`` should match the contents of the
corresponding md5 file.

View file

@ -9,11 +9,11 @@
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
import logging
# Import Salt libs
import salt.client.netapi
import salt.utils.files
import salt.utils.parsers as parsers
from salt.utils.verify import check_user, verify_files, verify_log
@ -42,9 +42,8 @@ class SaltAPI(parsers.SaltAPIParser):
'udp://',
'file://')):
# Logfile is not using Syslog, verify
current_umask = os.umask(0o027)
verify_files([logfile], self.config['user'])
os.umask(current_umask)
with salt.utils.files.set_umask(0o027):
verify_files([logfile], self.config['user'])
except OSError as err:
log.exception('Failed to prepare salt environment')
self.shutdown(err.errno)

View file

@ -1031,6 +1031,7 @@ class Single(object):
opts_pkg['pillar_roots'] = self.opts['pillar_roots']
opts_pkg['ext_pillar'] = self.opts['ext_pillar']
opts_pkg['extension_modules'] = self.opts['extension_modules']
opts_pkg['module_dirs'] = self.opts['module_dirs']
opts_pkg['_ssh_version'] = self.opts['_ssh_version']
opts_pkg['__master_opts__'] = self.context['master_opts']
if '_caller_cachedir' in self.opts:

View file

@ -106,9 +106,11 @@ def need_deployment():
'''
if os.path.exists(OPTIONS.saltdir):
shutil.rmtree(OPTIONS.saltdir)
old_umask = os.umask(0o077)
os.makedirs(OPTIONS.saltdir)
os.umask(old_umask)
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
try:
os.makedirs(OPTIONS.saltdir)
finally:
os.umask(old_umask) # pylint: disable=blacklisted-function
# Verify perms on saltdir
if not is_windows():
euid = os.geteuid()
@ -158,10 +160,10 @@ def unpack_thin(thin_path):
Unpack the Salt thin archive.
'''
tfile = tarfile.TarFile.gzopen(thin_path)
old_umask = os.umask(0o077)
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
tfile.extractall(path=OPTIONS.saltdir)
tfile.close()
os.umask(old_umask)
os.umask(old_umask) # pylint: disable=blacklisted-function
try:
os.unlink(thin_path)
except OSError:
@ -189,10 +191,10 @@ def unpack_ext(ext_path):
'minion',
'extmods')
tfile = tarfile.TarFile.gzopen(ext_path)
old_umask = os.umask(0o077)
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
tfile.extractall(path=modcache)
tfile.close()
os.umask(old_umask)
os.umask(old_umask) # pylint: disable=blacklisted-function
os.unlink(ext_path)
ver_path = os.path.join(modcache, 'ext_version')
ver_dst = os.path.join(OPTIONS.saltdir, 'ext_version')
@ -299,7 +301,7 @@ def main(argv): # pylint: disable=W0613
sys.stderr.write(OPTIONS.delimiter + '\n')
sys.stderr.flush()
if OPTIONS.cmd_umask is not None:
old_umask = os.umask(OPTIONS.cmd_umask)
old_umask = os.umask(OPTIONS.cmd_umask) # pylint: disable=blacklisted-function
if OPTIONS.tty:
# Returns bytes instead of string on python 3
stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
@ -313,7 +315,7 @@ def main(argv): # pylint: disable=W0613
else:
subprocess.call(salt_argv)
if OPTIONS.cmd_umask is not None:
os.umask(old_umask)
os.umask(old_umask) # pylint: disable=blacklisted-function
if __name__ == '__main__':
sys.exit(main(sys.argv))

View file

@ -491,17 +491,18 @@ def request(mods=None,
'kwargs': kwargs
}
})
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
msg = 'Unable to write state request file {0}. Check permission.'
log.error(msg.format(notify_path))
os.umask(cumask)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
return ret
@ -557,17 +558,18 @@ def clear_request(name=None):
req.pop(name)
else:
return False
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
msg = 'Unable to write state request file {0}. Check permission.'
log.error(msg.format(notify_path))
os.umask(cumask)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
return True

View file

@ -2684,14 +2684,15 @@ def create(vm_):
non_hostname_chars = compile(r'[^\w-]')
if search(non_hostname_chars, vm_name):
hostName = split(non_hostname_chars, vm_name, maxsplit=1)[0]
domainName = split(non_hostname_chars, vm_name, maxsplit=1)[-1]
else:
hostName = vm_name
domainName = hostName.split('.', 1)[-1]
domainName = domain
if 'Windows' not in object_ref.config.guestFullName:
identity = vim.vm.customization.LinuxPrep()
identity.hostName = vim.vm.customization.FixedName(name=hostName)
identity.domain = domainName if hostName != domainName else domain
identity.domain = domainName
else:
identity = vim.vm.customization.Sysprep()
identity.guiUnattended = vim.vm.customization.GuiUnattended()

View file

@ -650,10 +650,11 @@ VALID_OPTS = {
's3fs_update_interval': int,
'svnfs_update_interval': int,
'git_pillar_base': six.string_types,
'git_pillar_branch': six.string_types,
'git_pillar_env': six.string_types,
'git_pillar_root': six.string_types,
# NOTE: git_pillar_base, git_pillar_branch, git_pillar_env, and
# git_pillar_root omitted here because their values could conceivably be
# loaded as non-string types, which is OK because git_pillar will normalize
# them to strings. But rather than include all the possible types they
# could be, we'll just skip type-checking.
'git_pillar_ssl_verify': bool,
'git_pillar_global_lock': bool,
'git_pillar_user': six.string_types,
@ -665,12 +666,11 @@ VALID_OPTS = {
'git_pillar_refspecs': list,
'git_pillar_includes': bool,
'git_pillar_verify_config': bool,
# NOTE: gitfs_base, gitfs_mountpoint, and gitfs_root omitted here because
# their values could conceivably be loaded as non-string types, which is OK
# because gitfs will normalize them to strings. But rather than include all
# the possible types they could be, we'll just skip type-checking.
'gitfs_remotes': list,
'gitfs_mountpoint': six.string_types,
'gitfs_root': six.string_types,
'gitfs_base': six.string_types,
'gitfs_user': six.string_types,
'gitfs_password': six.string_types,
'gitfs_insecure_auth': bool,
'gitfs_privkey': six.string_types,
'gitfs_pubkey': six.string_types,
@ -885,11 +885,14 @@ VALID_OPTS = {
'winrepo_dir': six.string_types,
'winrepo_dir_ng': six.string_types,
'winrepo_cachefile': six.string_types,
# NOTE: winrepo_branch omitted here because its value could conceivably be
# loaded as a non-string type, which is OK because winrepo will normalize
# them to strings. But rather than include all the possible types it could
# be, we'll just skip type-checking.
'winrepo_cache_expire_max': int,
'winrepo_cache_expire_min': int,
'winrepo_remotes': list,
'winrepo_remotes_ng': list,
'winrepo_branch': six.string_types,
'winrepo_ssl_verify': bool,
'winrepo_user': six.string_types,
'winrepo_password': six.string_types,
@ -1630,6 +1633,7 @@ DEFAULT_MASTER_OPTS = {
'eauth_acl_module': '',
'eauth_tokens': 'localfs',
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'master', 'extmods'),
'module_dirs': [],
'file_recv': False,
'file_recv_max_size': 100,
'file_buffer_size': 1048576,

View file

@ -84,8 +84,7 @@ def dropfile(cachedir, user=None):
'''
dfn = os.path.join(cachedir, '.dfn')
# set a mask (to avoid a race condition on file creation) and store original.
mask = os.umask(191)
try:
with salt.utils.files.set_umask(0o277):
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
@ -103,8 +102,6 @@ def dropfile(cachedir, user=None):
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask) # restore original umask
def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
@ -138,21 +135,23 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None):
if not os.access(keydir, os.W_OK):
raise IOError('Write access denied to "{0}" for user "{1}".'.format(os.path.abspath(keydir), getpass.getuser()))
cumask = os.umask(0o277)
if HAS_M2:
# if passphrase is empty or None use no cipher
if not passphrase:
gen.save_pem(priv, cipher=None)
with salt.utils.files.set_umask(0o277):
if HAS_M2:
# if passphrase is empty or None use no cipher
if not passphrase:
gen.save_pem(priv, cipher=None)
else:
gen.save_pem(
priv,
cipher='des_ede3_cbc',
callback=lambda x: salt.utils.stringutils.to_bytes(passphrase))
else:
gen.save_pem(priv, cipher='des_ede3_cbc', callback=lambda x: six.b(passphrase))
else:
with salt.utils.files.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM', passphrase))
os.umask(cumask)
with salt.utils.files.fopen(priv, 'wb') as f:
f.write(gen.exportKey('PEM', passphrase))
if HAS_M2:
gen.save_pub_key(pub)
else:
with salt.utils.files.fopen(pub, 'wb+') as f:
with salt.utils.files.fopen(pub, 'wb') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 0o400)
if user:

View file

@ -202,10 +202,9 @@ def mk_key(opts, user):
os.unlink(keyfile)
key = salt.crypt.Crypticle.generate_key_string()
cumask = os.umask(191)
with salt.utils.files.fopen(keyfile, 'w+') as fp_:
fp_.write(salt.utils.stringutils.to_str(key))
os.umask(cumask)
with salt.utils.files.set_umask(0o277):
with salt.utils.files.fopen(keyfile, 'w+') as fp_:
fp_.write(salt.utils.stringutils.to_str(key))
# 600 octal: Read and write access to the owner only.
# Write access is necessary since on subsequent runs, if the file
# exists, it needs to be written to again. Windows enforces this.

View file

@ -143,22 +143,20 @@ class Client(object):
saltenv,
path)
destdir = os.path.dirname(dest)
cumask = os.umask(63)
with salt.utils.files.set_umask(0o077):
# remove destdir if it is a regular file to avoid an OSError when
# running os.makedirs below
if os.path.isfile(destdir):
os.remove(destdir)
# remove destdir if it is a regular file to avoid an OSError when
# running os.makedirs below
if os.path.isfile(destdir):
os.remove(destdir)
# ensure destdir exists
try:
os.makedirs(destdir)
except OSError as exc:
if exc.errno != errno.EEXIST: # ignore if it was there already
raise
# ensure destdir exists
try:
os.makedirs(destdir)
except OSError as exc:
if exc.errno != errno.EEXIST: # ignore if it was there already
raise
yield dest
os.umask(cumask)
yield dest
def get_cachedir(self, cachedir=None):
if cachedir is None:

View file

@ -1044,7 +1044,7 @@ class RaetKey(Key):
'''
Use libnacl to generate and safely save a private key
'''
import libnacl.dual # pylint: disable=3rd-party-module-not-gated
import libnacl.dual # pylint: disable=import-error,3rd-party-module-not-gated
d_key = libnacl.dual.DualSecret()
keydir, keyname, _, _ = self._get_key_attrs(keydir, keyname,
keysize, user)
@ -1440,14 +1440,13 @@ class RaetKey(Key):
keydata = {'priv': priv,
'sign': sign}
path = os.path.join(self.opts['pki_dir'], 'local.key')
c_umask = os.umask(191)
if os.path.exists(path):
#mode = os.stat(path).st_mode
os.chmod(path, stat.S_IWUSR | stat.S_IRUSR)
with salt.utils.files.fopen(path, 'w+b') as fp_:
fp_.write(self.serial.dumps(keydata))
os.chmod(path, stat.S_IRUSR)
os.umask(c_umask)
with salt.utils.files.set_umask(0o277):
if os.path.exists(path):
#mode = os.stat(path).st_mode
os.chmod(path, stat.S_IWUSR | stat.S_IRUSR)
with salt.utils.files.fopen(path, 'w+') as fp_:
fp_.write(self.serial.dumps(keydata))
os.chmod(path, stat.S_IRUSR)
def delete_local(self):
'''

View file

@ -791,29 +791,28 @@ def grains(opts, force_refresh=False, proxy=None):
grains_data.update(opts['grains'])
# Write cache if enabled
if opts.get('grains_cache', False):
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Late import
import salt.modules.cmdmod
# Make sure cache file isn't read-only
salt.modules.cmdmod._run_quiet('attrib -R "{0}"'.format(cfn))
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
try:
serial = salt.payload.Serial(opts)
serial.dump(grains_data, fp_)
except TypeError as e:
log.error('Failed to serialize grains cache: %s', e)
raise # re-throw for cleanup
except Exception as e:
log.error('Unable to write to grains cache file %s: %s', cfn, e)
# Based on the original exception, the file may or may not have been
# created. If it was, we will remove it now, as the exception means
# the serialized data is not to be trusted, no matter what the
# exception is.
if os.path.isfile(cfn):
os.unlink(cfn)
os.umask(cumask)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Late import
import salt.modules.cmdmod
# Make sure cache file isn't read-only
salt.modules.cmdmod._run_quiet('attrib -R "{0}"'.format(cfn))
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
try:
serial = salt.payload.Serial(opts)
serial.dump(grains_data, fp_)
except TypeError as e:
log.error('Failed to serialize grains cache: %s', e)
raise # re-throw for cleanup
except Exception as e:
log.error('Unable to write to grains cache file %s: %s', cfn, e)
# Based on the original exception, the file may or may not have been
# created. If it was, we will remove it now, as the exception means
# the serialized data is not to be trusted, no matter what the
# exception is.
if os.path.isfile(cfn):
os.unlink(cfn)
if grains_deep_merge:
salt.utils.dictupdate.update(grains_data, opts['grains'])

View file

@ -578,9 +578,8 @@ class Master(SMaster):
# Check to see if we need to create a pillar cache dir
if self.opts['pillar_cache'] and not os.path.isdir(os.path.join(self.opts['cachedir'], 'pillar_cache')):
try:
prev_umask = os.umask(0o077)
os.mkdir(os.path.join(self.opts['cachedir'], 'pillar_cache'))
os.umask(prev_umask)
with salt.utils.files.set_umask(0o077):
os.mkdir(os.path.join(self.opts['cachedir'], 'pillar_cache'))
except OSError:
pass

View file

@ -1077,8 +1077,7 @@ def unzip(zip_file,
if not salt.utils.platform.is_windows():
perm = zfile.getinfo(target).external_attr >> 16
if perm == 0:
umask_ = os.umask(0)
os.umask(umask_)
umask_ = salt.utils.files.get_umask()
if target.endswith('/'):
perm = 0o777 & ~umask_
else:

View file

@ -5358,30 +5358,25 @@ def manage_file(name,
# Create the file, user rw-only if mode will be set to prevent
# a small security race problem before the permissions are set
if mode:
current_umask = os.umask(0o77)
# Create a new file when test is False and source is None
if contents is None:
if not __opts__['test']:
if touch(name):
ret['changes']['new'] = 'file {0} created'.format(name)
ret['comment'] = 'Empty file'
else:
return _error(
ret, 'Empty file {0} not created'.format(name)
)
else:
if not __opts__['test']:
if touch(name):
ret['changes']['diff'] = 'New file'
else:
return _error(
ret, 'File {0} not created'.format(name)
)
if mode:
os.umask(current_umask)
with salt.utils.files.set_umask(0o077 if mode else None):
# Create a new file when test is False and source is None
if contents is None:
if not __opts__['test']:
if touch(name):
ret['changes']['new'] = 'file {0} created'.format(name)
ret['comment'] = 'Empty file'
else:
return _error(
ret, 'Empty file {0} not created'.format(name)
)
else:
if not __opts__['test']:
if touch(name):
ret['changes']['diff'] = 'New file'
else:
return _error(
ret, 'File {0} not created'.format(name)
)
if contents is not None:
# Write the static contents to a temporary file
@ -5415,8 +5410,7 @@ def manage_file(name,
# out what mode to use for the new file.
if mode is None and not salt.utils.platform.is_windows():
# Get current umask
mask = os.umask(0)
os.umask(mask)
mask = salt.utils.files.get_umask()
# Calculate the mode value that results from the umask
mode = oct((0o777 ^ mask) & 0o666)

View file

@ -513,7 +513,7 @@ if __name__ == '__main__':
sys.exit(1)
os.setsid()
os.umask(0)
os.umask(0o000) # pylint: disable=blacklisted-function
try:
pid = os.fork()

View file

@ -42,8 +42,8 @@ def get(key,
Attempt to retrieve the named value from pillar, if the named value is not
available return the passed default. The default return is an empty string
except __opts__['pillar_raise_on_missing'] is set to True, in which case a
KeyError will be raised.
except ``__opts__['pillar_raise_on_missing']`` is set to True, in which
case a ``KeyError`` exception will be raised.
If the merge parameter is set to ``True``, the default will be recursively
merged into the returned pillar data.
@ -53,11 +53,18 @@ def get(key,
{'pkg': {'apache': 'httpd'}}
To retrieve the value associated with the apache key in the pkg dict this
key can be passed::
To retrieve the value associated with the ``apache`` key in the ``pkg``
dict this key can be passed as::
pkg:apache
key
The pillar key to get value from
default
If specified, return this value in case when named pillar value does
not exist.
merge : ``False``
If ``True``, the retrieved values will be merged into the passed
default. When the default and the retrieved value are both

View file

@ -338,6 +338,22 @@ def _process_requirements(requirements, cmd, cwd, saltenv, user):
return cleanup_requirements, None
def _format_env_vars(env_vars):
ret = {}
if env_vars:
if isinstance(env_vars, dict):
for key, val in six.iteritems(env_vars):
if not isinstance(key, six.string_types):
key = str(key) # future lint: disable=blacklisted-function
if not isinstance(val, six.string_types):
val = str(val) # future lint: disable=blacklisted-function
ret[key] = val
else:
raise CommandExecutionError(
'env_vars {0} is not a dictionary'.format(env_vars))
return ret
def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
requirements=None,
bin_env=None,
@ -811,16 +827,7 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
cmd_kwargs = dict(saltenv=saltenv, use_vt=use_vt, runas=user)
if env_vars:
if isinstance(env_vars, dict):
for key, val in six.iteritems(env_vars):
if not isinstance(key, six.string_types):
key = str(key) # future lint: disable=blacklisted-function
if not isinstance(val, six.string_types):
val = str(val) # future lint: disable=blacklisted-function
cmd_kwargs.setdefault('env', {})[key] = val
else:
raise CommandExecutionError(
'env_vars {0} is not a dictionary'.format(env_vars))
cmd_kwargs.setdefault('env', {}).update(_format_env_vars(env_vars))
try:
if cwd:
@ -974,7 +981,8 @@ def uninstall(pkgs=None,
def freeze(bin_env=None,
user=None,
cwd=None,
use_vt=False):
use_vt=False,
env_vars=None):
'''
Return a list of installed packages either globally or in the specified
virtualenv
@ -1027,6 +1035,8 @@ def freeze(bin_env=None,
cmd_kwargs = dict(runas=user, cwd=cwd, use_vt=use_vt, python_shell=False)
if bin_env and os.path.isdir(bin_env):
cmd_kwargs['env'] = {'VIRTUAL_ENV': bin_env}
if env_vars:
cmd_kwargs.setdefault('env', {}).update(_format_env_vars(env_vars))
result = __salt__['cmd.run_all'](cmd, **cmd_kwargs)
if result['retcode'] > 0:
@ -1038,7 +1048,8 @@ def freeze(bin_env=None,
def list_(prefix=None,
bin_env=None,
user=None,
cwd=None):
cwd=None,
env_vars=None):
'''
Filter list of installed apps from ``freeze`` and check to see if
``prefix`` exists in the list of packages installed.
@ -1067,7 +1078,7 @@ def list_(prefix=None,
if prefix is None or 'pip'.startswith(prefix):
packages['pip'] = version(bin_env)
for line in freeze(bin_env=bin_env, user=user, cwd=cwd):
for line in freeze(bin_env=bin_env, user=user, cwd=cwd, env_vars=env_vars):
if line.startswith('-f') or line.startswith('#'):
# ignore -f line as it contains --find-links directory
# ignore comment lines

View file

@ -777,19 +777,18 @@ def request(mods=None,
'kwargs': kwargs
}
})
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
os.umask(cumask)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
return ret
@ -843,19 +842,18 @@ def clear_request(name=None):
req.pop(name)
else:
return False
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
os.umask(cumask)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run']('attrib -R "{0}"'.format(notify_path))
with salt.utils.files.fopen(notify_path, 'w+b') as fp_:
serial.dump(req, fp_)
except (IOError, OSError):
log.error(
'Unable to write state request file %s. Check permission.',
notify_path
)
return True
@ -1248,13 +1246,12 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
return ['Pillar failed to render with the following messages:'] + errors
orchestration_jid = kwargs.get('orchestration_jid')
umask = os.umask(0o77)
if kwargs.get('cache'):
if os.path.isfile(cfn):
with salt.utils.files.fopen(cfn, 'rb') as fp_:
high_ = serial.load(fp_)
return st_.state.call_high(high_, orchestration_jid)
os.umask(umask)
with salt.utils.files.set_umask(0o077):
if kwargs.get('cache'):
if os.path.isfile(cfn):
with salt.utils.files.fopen(cfn, 'rb') as fp_:
high_ = serial.load(fp_)
return st_.state.call_high(high_, orchestration_jid)
if isinstance(mods, six.string_types):
mods = mods.split(',')
@ -1281,36 +1278,36 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
if __salt__['config.option']('state_data', '') == 'terse' or kwargs.get('terse'):
ret = _filter_running(ret)
cache_file = os.path.join(__opts__['cachedir'], 'sls.p')
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run'](['attrib', '-R', cache_file], python_shell=False)
with salt.utils.files.fopen(cache_file, 'w+b') as fp_:
serial.dump(ret, fp_)
except (IOError, OSError):
log.error(
'Unable to write to SLS cache file %s. Check permission.',
cache_file
)
_set_retcode(ret, high_)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
__salt__['cmd.run'](['attrib', '-R', cache_file], python_shell=False)
with salt.utils.files.fopen(cache_file, 'w+b') as fp_:
serial.dump(ret, fp_)
except (IOError, OSError):
log.error(
'Unable to write to SLS cache file %s. Check permission.',
cache_file
)
_set_retcode(ret, high_)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
try:
with salt.utils.fopen(cfn, 'w+b') as fp_:
try:
serial.dump(high_, fp_)
except TypeError:
# Can't serialize pydsl
pass
except (IOError, OSError):
log.error(
'Unable to write to highstate cache file %s. Do you have permissions?',
cfn
)
try:
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
try:
serial.dump(high_, fp_)
except TypeError:
# Can't serialize pydsl
pass
except (IOError, OSError):
log.error(
'Unable to write to highstate cache file %s. Do you have permissions?',
cfn
)
os.umask(cumask)
_snapper_post(opts, kwargs.get('__pub_jid', 'called localy'), snapper_pre)
return ret

View file

@ -98,7 +98,7 @@ def _find_utmp():
result[os.stat(utmp).st_mtime] = utmp
except Exception:
pass
if result > 0:
if len(result):
return result[sorted(result).pop()]
else:
return False

View file

@ -756,28 +756,27 @@ def write_pem(text, path, overwrite=True, pem_type=None):
"-----BEGIN CERTIFICATE-----MIIGMzCCBBugA..." \\
path=/etc/pki/mycert.crt
'''
old_umask = os.umask(0o77)
text = get_pem_entry(text, pem_type=pem_type)
_dhparams = ''
_private_key = ''
if pem_type and pem_type == 'CERTIFICATE' and os.path.isfile(path) and \
not overwrite:
_filecontents = _text_or_file(path)
try:
_dhparams = get_pem_entry(_filecontents, 'DH PARAMETERS')
except salt.exceptions.SaltInvocationError:
pass
try:
_private_key = get_pem_entry(_filecontents, '(?:RSA )?PRIVATE KEY')
except salt.exceptions.SaltInvocationError:
pass
with salt.utils.files.fopen(path, 'w') as _fp:
if pem_type and pem_type == 'CERTIFICATE' and _private_key:
_fp.write(salt.utils.stringutils.to_str(_private_key))
_fp.write(text)
if pem_type and pem_type == 'CERTIFICATE' and _dhparams:
_fp.write(salt.utils.stringutils.to_str(_dhparams))
os.umask(old_umask)
with salt.utils.files.set_umask(0o077):
text = get_pem_entry(text, pem_type=pem_type)
_dhparams = ''
_private_key = ''
if pem_type and pem_type == 'CERTIFICATE' and os.path.isfile(path) and \
not overwrite:
_filecontents = _text_or_file(path)
try:
_dhparams = get_pem_entry(_filecontents, 'DH PARAMETERS')
except salt.exceptions.SaltInvocationError:
pass
try:
_private_key = get_pem_entry(_filecontents, '(?:RSA )?PRIVATE KEY')
except salt.exceptions.SaltInvocationError:
pass
with salt.utils.files.fopen(path, 'w') as _fp:
if pem_type and pem_type == 'CERTIFICATE' and _private_key:
_fp.write(salt.utils.stringutils.to_str(_private_key))
_fp.write(text)
if pem_type and pem_type == 'CERTIFICATE' and _dhparams:
_fp.write(salt.utils.stringutils.to_str(_dhparams))
return 'PEM written to {0}'.format(path)

View file

@ -307,9 +307,9 @@ class EventListener(object):
'''
if request not in self.request_map:
return
for tag, future in self.request_map[request]:
for tag, matcher, future in self.request_map[request]:
# timeout the future
self._timeout_future(tag, future)
self._timeout_future(tag, matcher, future)
# remove the timeout
if future in self.timeout_map:
tornado.ioloop.IOLoop.current().remove_timeout(self.timeout_map[future])
@ -317,9 +317,22 @@ class EventListener(object):
del self.request_map[request]
@staticmethod
def prefix_matcher(mtag, tag):
if mtag is None or tag is None:
raise TypeError('mtag or tag can not be None')
return mtag.startswith(tag)
@staticmethod
def exact_matcher(mtag, tag):
if mtag is None or tag is None:
raise TypeError('mtag or tag can not be None')
return mtag == tag
def get_event(self,
request,
tag='',
matcher=prefix_matcher.__func__,
callback=None,
timeout=None
):
@ -339,43 +352,52 @@ class EventListener(object):
tornado.ioloop.IOLoop.current().add_callback(callback, future)
future.add_done_callback(handle_future)
# add this tag and future to the callbacks
self.tag_map[tag].append(future)
self.request_map[request].append((tag, future))
self.tag_map[(tag, matcher)].append(future)
self.request_map[request].append((tag, matcher, future))
if timeout:
timeout_future = tornado.ioloop.IOLoop.current().call_later(timeout, self._timeout_future, tag, future)
timeout_future = tornado.ioloop.IOLoop.current().call_later(timeout, self._timeout_future, tag, matcher, future)
self.timeout_map[future] = timeout_future
return future
def _timeout_future(self, tag, future):
def _timeout_future(self, tag, matcher, future):
'''
Timeout a specific future
'''
if tag not in self.tag_map:
if (tag, matcher) not in self.tag_map:
return
if not future.done():
future.set_exception(TimeoutException())
self.tag_map[tag].remove(future)
if len(self.tag_map[tag]) == 0:
del self.tag_map[tag]
self.tag_map[(tag, matcher)].remove(future)
if len(self.tag_map[(tag, matcher)]) == 0:
del self.tag_map[(tag, matcher)]
def _handle_event_socket_recv(self, raw):
'''
Callback for events on the event sub socket
'''
mtag, data = self.event.unpack(raw, self.event.serial)
# see if we have any futures that need this info:
for tag_prefix, futures in six.iteritems(self.tag_map):
if mtag.startswith(tag_prefix):
for future in futures:
if future.done():
continue
future.set_result({'data': data, 'tag': mtag})
self.tag_map[tag_prefix].remove(future)
if future in self.timeout_map:
tornado.ioloop.IOLoop.current().remove_timeout(self.timeout_map[future])
del self.timeout_map[future]
for (tag, matcher), futures in six.iteritems(self.tag_map):
try:
is_matched = matcher(mtag, tag)
except Exception as e:
logger.error('Failed to run a matcher.', exc_info=True)
is_matched = False
if not is_matched:
continue
for future in futures:
if future.done():
continue
future.set_result({'data': data, 'tag': mtag})
self.tag_map[(tag, matcher)].remove(future)
if future in self.timeout_map:
tornado.ioloop.IOLoop.current().remove_timeout(self.timeout_map[future])
del self.timeout_map[future]
class BaseSaltAPIHandler(tornado.web.RequestHandler): # pylint: disable=W0223
@ -924,64 +946,83 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
if self.application.opts['order_masters']:
syndic_min_wait = tornado.gen.sleep(self.application.opts['syndic_wait'])
job_not_running = self.job_not_running(pub_data['jid'],
chunk['tgt'],
f_call['kwargs']['tgt_type'],
minions_remaining=minions_remaining
)
# To ensure job_not_running and all_return are terminated by each other, communicate using a future
is_finished = Future()
job_not_running_future = self.job_not_running(pub_data['jid'],
chunk['tgt'],
f_call['kwargs']['tgt_type'],
is_finished,
minions_remaining=list(minions_remaining),
)
# if we have a min_wait, do that
if syndic_min_wait is not None:
yield syndic_min_wait
# we are completed when either all minions return or the job isn't running anywhere
chunk_ret = yield self.all_returns(pub_data['jid'],
finish_futures=[job_not_running],
minions_remaining=minions_remaining,
)
raise tornado.gen.Return(chunk_ret)
all_return_future = self.all_returns(pub_data['jid'],
is_finished,
minions_remaining=list(minions_remaining),
)
yield job_not_running_future
raise tornado.gen.Return((yield all_return_future))
@tornado.gen.coroutine
def all_returns(self,
jid,
finish_futures=None,
is_finished,
minions_remaining=None,
):
'''
Return a future which will complete once all returns are completed
(according to minions_remaining), or one of the passed in "finish_futures" completes
(according to minions_remaining), or one of the passed in "is_finished" completes
'''
if finish_futures is None:
finish_futures = []
if minions_remaining is None:
minions_remaining = []
ret_tag = tagify([jid, 'ret'], 'job')
chunk_ret = {}
minion_events = {}
for minion in minions_remaining:
tag = tagify([jid, 'ret', minion], 'job')
minion_event = self.application.event_listener.get_event(self,
tag=tag,
matcher=EventListener.exact_matcher,
timeout=self.application.opts['timeout'])
minion_events[minion_event] = minion
while True:
ret_event = self.application.event_listener.get_event(self,
tag=ret_tag,
)
f = yield Any([ret_event] + finish_futures)
if f in finish_futures:
raise tornado.gen.Return(chunk_ret)
event = f.result()
chunk_ret[event['data']['id']] = event['data']['return']
# its possible to get a return that wasn't in the minion_remaining list
f = yield Any(minion_events.keys() + [is_finished])
try:
minions_remaining.remove(event['data']['id'])
if f is is_finished:
for event in minion_events:
if not event.done():
event.set_result(None)
raise tornado.gen.Return(chunk_ret)
f_result = f.result()
chunk_ret[f_result['data']['id']] = f_result['data']['return']
except TimeoutException:
pass
# clear finished event future
try:
minions_remaining.remove(minion_events[f])
del minion_events[f]
except ValueError:
pass
if len(minions_remaining) == 0:
if not is_finished.done():
is_finished.set_result(True)
raise tornado.gen.Return(chunk_ret)
@tornado.gen.coroutine
def job_not_running(self,
jid,
tgt,
tgt_type,
minions_remaining=None,
):
jid,
tgt,
tgt_type,
is_finished,
minions_remaining=None,
):
'''
Return a future which will complete once jid (passed in) is no longer
running on tgt
@ -998,12 +1039,21 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
minion_running = False
while True:
try:
event = yield self.application.event_listener.get_event(self,
tag=ping_tag,
timeout=self.application.opts['gather_job_timeout'],
)
event = self.application.event_listener.get_event(self,
tag=ping_tag,
timeout=self.application.opts['gather_job_timeout'],
)
f = yield Any([event, is_finished])
# When finished entire routine, cleanup other futures and return result
if f is is_finished:
if not event.done():
event.set_result(None)
raise tornado.gen.Return(True)
event = f.result()
except TimeoutException:
if not minion_running:
if not is_finished.done():
is_finished.set_result(True)
raise tornado.gen.Return(True)
else:
ping_pub_data = yield self.saltclients['local'](tgt,

View file

@ -3890,24 +3890,23 @@ class BaseHighState(object):
return err
if not high:
return ret
cumask = os.umask(0o77)
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
self.state.functions['cmd.run'](
['attrib', '-R', cfn],
python_shell=False,
output_loglevel='quiet')
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
try:
self.serial.dump(high, fp_)
except TypeError:
# Can't serialize pydsl
pass
except (IOError, OSError):
log.error('Unable to write to "state.highstate" cache file %s', cfn)
with salt.utils.files.set_umask(0o077):
try:
if salt.utils.platform.is_windows():
# Make sure cache file isn't read-only
self.state.functions['cmd.run'](
['attrib', '-R', cfn],
python_shell=False,
output_loglevel='quiet')
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
try:
self.serial.dump(high, fp_)
except TypeError:
# Can't serialize pydsl
pass
except (IOError, OSError):
log.error('Unable to write to "state.highstate" cache file %s', cfn)
os.umask(cumask)
return self.state.call_high(high, orchestration_jid)
def compile_highstate(self):

View file

@ -199,8 +199,7 @@ executed when the state it is watching changes. Example:
``cmd.wait`` itself does not do anything; all functionality is inside its ``mod_watch``
function, which is called by ``watch`` on changes.
``cmd.wait`` will be deprecated in future due to the confusion it causes. The
preferred format is using the :ref:`onchanges Requisite <requisites-onchanges>`, which
The preferred format is using the :ref:`onchanges Requisite <requisites-onchanges>`, which
works on ``cmd.run`` as well as on any other state. The example would then look as follows:
.. code-block:: yaml

View file

@ -63,7 +63,7 @@ def send_message(name,
- api_url: https://hipchat.myteam.com
- api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
- api_version: v1
- color: green
- message_color: green
- notify: True
The following parameters are required:
@ -96,7 +96,7 @@ def send_message(name,
The api version for Hipchat to use,
if not specified in the configuration options of master or minion.
color
message_color
The color the Hipchat message should be displayed in. One of the following, default: yellow
"yellow", "red", "green", "purple", "gray", or "random".

View file

@ -341,10 +341,12 @@ def mounted(name,
mount_invisible_keys = [
'actimeo',
'comment',
'credentials',
'direct-io-mode',
'password',
'retry',
'port',
'retry',
'secretfile',
]
if extra_mount_invisible_keys:

View file

@ -180,8 +180,7 @@ def _check_pkg_version_format(pkg):
def _check_if_installed(prefix, state_pkg_name, version_spec,
ignore_installed, force_reinstall,
upgrade, user, cwd, bin_env):
upgrade, user, cwd, bin_env, env_vars):
# result: None means the command failed to run
# result: True means the package is installed
# result: False means the package is not installed
@ -190,7 +189,8 @@ def _check_if_installed(prefix, state_pkg_name, version_spec,
# Check if the requested package is already installed.
try:
pip_list = __salt__['pip.list'](prefix, bin_env=bin_env,
user=user, cwd=cwd)
user=user, cwd=cwd,
env_vars=env_vars)
prefix_realname = _find_key(prefix, pip_list)
except (CommandNotFoundError, CommandExecutionError) as err:
ret['result'] = None
@ -682,7 +682,7 @@ def installed(name,
version_spec = version_spec
out = _check_if_installed(prefix, state_pkg_name, version_spec,
ignore_installed, force_reinstall,
upgrade, user, cwd, bin_env)
upgrade, user, cwd, bin_env, env_vars)
# If _check_if_installed result is None, something went wrong with
# the command running. This way we keep stateful output.
if out['result'] is None:
@ -823,7 +823,8 @@ def installed(name,
# Case for packages that are not an URL
if prefix:
pipsearch = __salt__['pip.list'](prefix, bin_env,
user=user, cwd=cwd)
user=user, cwd=cwd,
env_vars=env_vars)
# If we didnt find the package in the system after
# installing it report it

View file

@ -21,6 +21,7 @@ import errno
import salt.crypt
import salt.utils.async
import salt.utils.event
import salt.utils.files
import salt.utils.platform
import salt.utils.process
import salt.utils.verify
@ -1386,11 +1387,8 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel):
# Securely create socket
log.info('Starting the Salt Puller on %s', pull_uri)
old_umask = os.umask(0o177)
try:
with salt.utils.files.set_umask(0o177):
pull_sock.start()
finally:
os.umask(old_umask)
# run forever
try:

View file

@ -19,6 +19,7 @@ from random import randint
import salt.auth
import salt.crypt
import salt.utils.event
import salt.utils.files
import salt.utils.minions
import salt.utils.process
import salt.utils.stringutils
@ -806,11 +807,8 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
# Securely create socket
log.info('Starting the Salt Puller on %s', pull_uri)
old_umask = os.umask(0o177)
try:
with salt.utils.files.set_umask(0o177):
pull_sock.bind(pull_uri)
finally:
os.umask(old_umask)
try:
while True:

View file

@ -142,7 +142,8 @@ def yamlify_arg(arg):
return arg
if arg.strip() == '':
# Because YAML loads empty strings as None, we return the original string
# Because YAML loads empty (or all whitespace) strings as None, we
# return the original string
# >>> import yaml
# >>> yaml.load('') is None
# True
@ -151,6 +152,9 @@ def yamlify_arg(arg):
return arg
elif '_' in arg and all([x in '0123456789_' for x in arg.strip()]):
# When the stripped string includes just digits and underscores, the
# underscores are ignored and the digits are combined together and
# loaded as an int. We don't want that, so return the original value.
return arg
try:
@ -177,6 +181,14 @@ def yamlify_arg(arg):
else:
return arg
elif isinstance(arg, list):
# lists must be wrapped in brackets
if (isinstance(original_arg, six.string_types) and
not original_arg.startswith('[')):
return original_arg
else:
return arg
elif arg is None \
or isinstance(arg, (list, float, six.integer_types, six.string_types)):
# yaml.safe_load will load '|' as '', don't let it do that.

View file

@ -75,6 +75,7 @@ import salt.payload
import salt.utils.async
import salt.utils.cache
import salt.utils.dicttrim
import salt.utils.files
import salt.utils.platform
import salt.utils.process
import salt.utils.stringutils
@ -1018,12 +1019,9 @@ class AsyncEventPublisher(object):
)
log.info('Starting pull socket on {0}'.format(epull_uri))
old_umask = os.umask(0o177)
try:
with salt.utils.files.set_umask(0o177):
self.publisher.start()
self.puller.start()
finally:
os.umask(old_umask)
def handle_publish(self, package, _):
'''
@ -1106,8 +1104,7 @@ class EventPublisher(salt.utils.process.SignalHandlingMultiprocessingProcess):
)
# Start the master event publisher
old_umask = os.umask(0o177)
try:
with salt.utils.files.set_umask(0o177):
self.publisher.start()
self.puller.start()
if (self.opts['ipc_mode'] != 'tcp' and (
@ -1115,8 +1112,6 @@ class EventPublisher(salt.utils.process.SignalHandlingMultiprocessingProcess):
self.opts['external_auth'])):
os.chmod(os.path.join(
self.opts['sock_dir'], 'master_event_pub.ipc'), 0o666)
finally:
os.umask(old_umask)
# Make sure the IO loop and respective sockets are closed and
# destroyed

View file

@ -11,6 +11,7 @@ import shutil
# Import salt libs
import salt.fileclient
import salt.utils.files
import salt.utils.hashutils
import salt.utils.path
import salt.utils.url
@ -71,85 +72,83 @@ def sync(opts,
remote = set()
source = salt.utils.url.create('_' + form)
mod_dir = os.path.join(opts['extension_modules'], '{0}'.format(form))
cumask = os.umask(0o77)
touched = False
try:
if not os.path.isdir(mod_dir):
log.info('Creating module dir \'%s\'', mod_dir)
try:
os.makedirs(mod_dir)
except (IOError, OSError):
log.error(
'Cannot create cache module directory %s. Check '
'permissions.', mod_dir
)
fileclient = salt.fileclient.get_file_client(opts)
for sub_env in saltenv:
log.info(
'Syncing %s for environment \'%s\'', form, sub_env
)
cache = []
log.info(
'Loading cache from {0}, for {1})'.format(source, sub_env)
)
# Grab only the desired files (.py, .pyx, .so)
cache.extend(
fileclient.cache_dir(
source, sub_env, include_empty=False,
include_pat=r'E@\.(pyx?|so|zip)$', exclude_pat=None
)
)
local_cache_dir = os.path.join(
opts['cachedir'],
'files',
sub_env,
'_{0}'.format(form)
with salt.utils.files.set_umask(0o077):
try:
if not os.path.isdir(mod_dir):
log.info('Creating module dir \'%s\'', mod_dir)
try:
os.makedirs(mod_dir)
except (IOError, OSError):
log.error(
'Cannot create cache module directory %s. Check '
'permissions.', mod_dir
)
log.debug('Local cache dir: \'%s\'', local_cache_dir)
for fn_ in cache:
relpath = os.path.relpath(fn_, local_cache_dir)
relname = os.path.splitext(relpath)[0].replace(os.sep, '.')
if extmod_whitelist and form in extmod_whitelist and relname not in extmod_whitelist[form]:
continue
if extmod_blacklist and form in extmod_blacklist and relname in extmod_blacklist[form]:
continue
remote.add(relpath)
dest = os.path.join(mod_dir, relpath)
log.info('Copying \'%s\' to \'%s\'', fn_, dest)
if os.path.isfile(dest):
# The file is present, if the sum differs replace it
hash_type = opts.get('hash_type', 'md5')
src_digest = salt.utils.hashutils.get_hash(fn_, hash_type)
dst_digest = salt.utils.hashutils.get_hash(dest, hash_type)
if src_digest != dst_digest:
# The downloaded file differs, replace!
fileclient = salt.fileclient.get_file_client(opts)
for sub_env in saltenv:
log.info(
'Syncing %s for environment \'%s\'', form, sub_env
)
cache = []
log.info(
'Loading cache from {0}, for {1})'.format(source, sub_env)
)
# Grab only the desired files (.py, .pyx, .so)
cache.extend(
fileclient.cache_dir(
source, sub_env, include_empty=False,
include_pat=r'E@\.(pyx?|so|zip)$', exclude_pat=None
)
)
local_cache_dir = os.path.join(
opts['cachedir'],
'files',
sub_env,
'_{0}'.format(form)
)
log.debug('Local cache dir: \'%s\'', local_cache_dir)
for fn_ in cache:
relpath = os.path.relpath(fn_, local_cache_dir)
relname = os.path.splitext(relpath)[0].replace(os.sep, '.')
if extmod_whitelist and form in extmod_whitelist and relname not in extmod_whitelist[form]:
continue
if extmod_blacklist and form in extmod_blacklist and relname in extmod_blacklist[form]:
continue
remote.add(relpath)
dest = os.path.join(mod_dir, relpath)
log.info('Copying \'%s\' to \'%s\'', fn_, dest)
if os.path.isfile(dest):
# The file is present, if the sum differs replace it
hash_type = opts.get('hash_type', 'md5')
src_digest = salt.utils.hashutils.get_hash(fn_, hash_type)
dst_digest = salt.utils.hashutils.get_hash(dest, hash_type)
if src_digest != dst_digest:
# The downloaded file differs, replace!
shutil.copyfile(fn_, dest)
ret.append('{0}.{1}'.format(form, relname))
else:
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(fn_, dest)
ret.append('{0}.{1}'.format(form, relname))
else:
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(fn_, dest)
ret.append('{0}.{1}'.format(form, relname))
touched = bool(ret)
if opts['clean_dynamic_modules'] is True:
current = set(_listdir_recursively(mod_dir))
for fn_ in current - remote:
full = os.path.join(mod_dir, fn_)
if os.path.isfile(full):
touched = True
os.remove(full)
# Cleanup empty dirs
while True:
emptydirs = _list_emptydirs(mod_dir)
if not emptydirs:
break
for emptydir in emptydirs:
touched = True
shutil.rmtree(emptydir, ignore_errors=True)
except Exception as exc:
log.error('Failed to sync %s module: %s', form, exc)
finally:
os.umask(cumask)
touched = bool(ret)
if opts['clean_dynamic_modules'] is True:
current = set(_listdir_recursively(mod_dir))
for fn_ in current - remote:
full = os.path.join(mod_dir, fn_)
if os.path.isfile(full):
touched = True
os.remove(full)
# Cleanup empty dirs
while True:
emptydirs = _list_emptydirs(mod_dir)
if not emptydirs:
break
for emptydir in emptydirs:
touched = True
shutil.rmtree(emptydir, ignore_errors=True)
except Exception as exc:
log.error('Failed to sync %s module: %s', form, exc)
return ret, touched

View file

@ -299,20 +299,29 @@ def wait_lock(path, lock_fn=None, timeout=5, sleep=0.1, time_start=None):
log.trace('Write lock for %s (%s) released', path, lock_fn)
def get_umask():
'''
Returns the current umask
'''
ret = os.umask(0) # pylint: disable=blacklisted-function
os.umask(ret) # pylint: disable=blacklisted-function
return ret
@contextlib.contextmanager
def set_umask(mask):
'''
Temporarily set the umask and restore once the contextmanager exits
'''
if salt.utils.platform.is_windows():
# Don't attempt on Windows
if mask is None or salt.utils.platform.is_windows():
# Don't attempt on Windows, or if no mask was passed
yield
else:
try:
orig_mask = os.umask(mask)
orig_mask = os.umask(mask) # pylint: disable=blacklisted-function
yield
finally:
os.umask(orig_mask)
os.umask(orig_mask) # pylint: disable=blacklisted-function
def fopen(*args, **kwargs):

View file

@ -715,9 +715,8 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
# verify the default
if logfile is not None and not logfile.startswith(('tcp://', 'udp://', 'file://')):
# Logfile is not using Syslog, verify
current_umask = os.umask(0o027)
verify_files([logfile], self.config['user'])
os.umask(current_umask)
with salt.utils.files.set_umask(0o027):
verify_files([logfile], self.config['user'])
if logfile is None:
# Use the default setting if the logfile wasn't explicity set

View file

@ -80,7 +80,7 @@ def daemonize(redirect_out=True):
os.chdir('/')
# noinspection PyArgumentList
os.setsid()
os.umask(18)
os.umask(0o022) # pylint: disable=blacklisted-function
# do second fork
try:

View file

@ -225,7 +225,7 @@ def chugid_and_umask(runas, umask):
if runas is not None and runas != getpass.getuser():
chugid(runas)
if umask is not None:
os.umask(umask)
os.umask(umask) # pylint: disable=blacklisted-function
def get_default_group(user):

View file

@ -230,12 +230,11 @@ def verify_env(
continue
if not os.path.isdir(dir_):
try:
cumask = os.umask(18) # 077
os.makedirs(dir_)
with salt.utils.files.set_umask(0o022):
os.makedirs(dir_)
# If starting the process as root, chown the new dirs
if os.getuid() == 0:
os.chown(dir_, uid, gid)
os.umask(cumask)
except OSError as err:
msg = 'Failed to create directory path "{0}" - {1}\n'
sys.stderr.write(msg.format(dir_, err))

View file

@ -1,6 +1,6 @@
profitbricks-config:
username: ''
password: ''
username: 'foo'
password: 'bar'
datacenter_id: 74d65326-d9b7-41c3-9f51-73ffe0fcd16d
driver: profitbricks
ssh_public_key: ~/.ssh/id_rsa.pub

View file

@ -0,0 +1,38 @@
{%- set virtualenv_base = salt['runtests_helpers.get_salt_temp_dir_for_path']('virtualenv-12-base-1') -%}
{%- set virtualenv_test = salt['runtests_helpers.get_salt_temp_dir_for_path']('issue-46127-pip-env-vars') -%}
{{ virtualenv_base }}:
virtualenv.managed:
- system_site_packages: False
- distribute: True
install_older_venv_1:
pip.installed:
- name: 'virtualenv < 13.0'
- bin_env: {{ virtualenv_base }}
- require:
- virtualenv: {{ virtualenv_base }}
# For this test we need to make sure that the virtualenv used in the
# 'issue-46127-setup' pip.installed state below was created using
# virtualenv < 13.0. virtualenvs created using later versions make
# packages with custom setuptools prefixes relative to the virtualenv
# itself, which makes the use of env_vars obsolete.
# Thus, the two states above ensure that the 'base' venv has
# a version old enough to exhibit the behavior we want to test.
setup_test_virtualenv_1:
cmd.run:
- name: {{ virtualenv_base }}/bin/virtualenv {{ virtualenv_test }}
- onchanges:
- pip: install_older_venv_1
issue-46127-setup:
pip.installed:
- name: 'carbon < 1.3'
- no_deps: True
- env_vars:
PYTHONPATH: "/opt/graphite/lib/:/opt/graphite/webapp/"
- bin_env: {{ virtualenv_test }}
- require:
- cmd: setup_test_virtualenv_1

View file

@ -363,58 +363,56 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin
def test_issue_14979_output_file_permissions(self):
output_file = os.path.join(TMP, 'issue-14979')
current_umask = os.umask(0o077)
try:
# Let's create an initial output file with some data
self.run_script(
'salt-call',
'-c {0} --output-file={1} -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat1 = os.stat(output_file)
with salt.utils.files.set_umask(0o077):
try:
# Let's create an initial output file with some data
self.run_script(
'salt-call',
'-c {0} --output-file={1} -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat1 = os.stat(output_file)
# Let's change umask
os.umask(0o777)
# Let's change umask
os.umask(0o777) # pylint: disable=blacklisted-function
self.run_script(
'salt-call',
'-c {0} --output-file={1} --output-file-append -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat2 = os.stat(output_file)
self.assertEqual(stat1.st_mode, stat2.st_mode)
# Data was appeneded to file
self.assertTrue(stat1.st_size < stat2.st_size)
self.run_script(
'salt-call',
'-c {0} --output-file={1} --output-file-append -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat2 = os.stat(output_file)
self.assertEqual(stat1.st_mode, stat2.st_mode)
# Data was appeneded to file
self.assertTrue(stat1.st_size < stat2.st_size)
# Let's remove the output file
os.unlink(output_file)
# Not appending data
self.run_script(
'salt-call',
'-c {0} --output-file={1} -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat3 = os.stat(output_file)
# Mode must have changed since we're creating a new log file
self.assertNotEqual(stat1.st_mode, stat3.st_mode)
finally:
if os.path.exists(output_file):
# Let's remove the output file
os.unlink(output_file)
# Restore umask
os.umask(current_umask)
# Not appending data
self.run_script(
'salt-call',
'-c {0} --output-file={1} -g'.format(
self.get_config_dir(),
output_file
),
catch_stderr=True,
with_retcode=True
)
stat3 = os.stat(output_file)
# Mode must have changed since we're creating a new log file
self.assertNotEqual(stat1.st_mode, stat3.st_mode)
finally:
if os.path.exists(output_file):
os.unlink(output_file)
@skipIf(sys.platform.startswith('win'), 'This test does not apply on Win')
def test_42116_cli_pillar_override(self):

View file

@ -524,3 +524,78 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
finally:
if os.path.isdir(venv_dir):
shutil.rmtree(venv_dir)
def test_46127_pip_env_vars(self):
'''
Test that checks if env_vars passed to pip.installed are also passed
to pip.freeze while checking for existing installations
'''
# This issue is most easily checked while installing carbon
# Much of the code here comes from the test_weird_install function above
ographite = '/opt/graphite'
if os.path.isdir(ographite):
self.skipTest(
'You already have \'{0}\'. This test would overwrite this '
'directory'.format(ographite)
)
try:
os.makedirs(ographite)
except OSError as err:
if err.errno == errno.EACCES:
# Permission denied
self.skipTest(
'You don\'t have the required permissions to run this test'
)
finally:
if os.path.isdir(ographite):
shutil.rmtree(ographite)
venv_dir = os.path.join(RUNTIME_VARS.TMP, 'issue-46127-pip-env-vars')
try:
# We may be able to remove this, I had to add it because the custom
# modules from the test suite weren't available in the jinja
# context when running the call to state.sls that comes after.
self.run_function('saltutil.sync_modules')
# Since we don't have the virtualenv created, pip.installed will
# thrown and error.
ret = self.run_function(
'state.sls', mods='issue-46127-pip-env-vars'
)
self.assertSaltTrueReturn(ret)
for key in six.iterkeys(ret):
self.assertTrue(ret[key]['result'])
if ret[key]['name'] != 'carbon < 1.3':
continue
self.assertEqual(
ret[key]['comment'],
'All packages were successfully installed'
)
break
else:
raise Exception('Expected state did not run')
# Run the state again. Now the already installed message should
# appear
ret = self.run_function(
'state.sls', mods='issue-46127-pip-env-vars'
)
self.assertSaltTrueReturn(ret)
# We cannot use assertInSaltComment here because we need to skip
# some of the state return parts
for key in six.iterkeys(ret):
self.assertTrue(ret[key]['result'])
# As we are re-running the formula, some states will not be run
# and "name" may or may not be present, so we use .get() pattern
if ret[key].get('name', '') != 'carbon < 1.3':
continue
self.assertEqual(
ret[key]['comment'],
('Python package carbon < 1.3 was already installed\n'
'All packages were successfully installed'))
break
else:
raise Exception('Expected state did not run')
finally:
if os.path.isdir(ographite):
shutil.rmtree(ographite)
if os.path.isdir(venv_dir):
shutil.rmtree(venv_dir)

View file

@ -937,6 +937,27 @@ class PipTestCase(TestCase, LoaderModuleMockMixin):
)
self.assertEqual(ret, eggs)
mock = MagicMock(
return_value={
'retcode': 0,
'stdout': '\n'.join(eggs)
}
)
# Passing env_vars passes them to underlying command?
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):
with patch('salt.modules.pip.version',
MagicMock(return_value='6.1.1')):
ret = pip.freeze(env_vars={"foo": "bar"})
mock.assert_called_once_with(
['pip', 'freeze'],
cwd=None,
runas=None,
use_vt=False,
python_shell=False,
env={"foo": "bar"}
)
self.assertEqual(ret, eggs)
# Non zero returncode raises exception?
mock = MagicMock(return_value={'retcode': 1, 'stderr': 'CABOOOOMMM!'})
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):

View file

@ -91,7 +91,7 @@ class TestEventListener(AsyncTestCase):
{'sock_dir': SOCK_DIR,
'transport': 'zeromq'})
self._finished = False # fit to event_listener's behavior
event_future = event_listener.get_event(self, 'evt1', self.stop) # get an event future
event_future = event_listener.get_event(self, 'evt1', callback=self.stop) # get an event future
me.fire_event({'data': 'foo2'}, 'evt2') # fire an event we don't want
me.fire_event({'data': 'foo1'}, 'evt1') # fire an event we do want
self.wait() # wait for the future

View file

@ -3,6 +3,7 @@
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
from collections import namedtuple
import logging
# Import Salt Libs
from salt.exceptions import SaltInvocationError
@ -19,6 +20,8 @@ from tests.support.mock import (
patch
)
log = logging.getLogger(__name__)
class ArgsTestCase(TestCase):
'''
@ -202,3 +205,63 @@ class ArgsTestCase(TestCase):
self.assertEqual(fun, 'amod.afunc')
self.assertEqual(args, ['double " single \'', 'double " single \''])
self.assertEqual(kwargs, {'kw1': 'equal=equal', 'kw2': 'val2'})
def test_yamlify_arg(self):
'''
Test that we properly yamlify CLI input. In several of the tests below
assertIs is used instead of assertEqual. This is because we want to
confirm that the return value is not a copy of the original, but the
same instance as the original.
'''
def _yamlify_arg(item):
log.debug('Testing yamlify_arg with %r', item)
return salt.utils.args.yamlify_arg(item)
# Make sure non-strings are just returned back
for item in (True, False, None, 123, 45.67, ['foo'], {'foo': 'bar'}):
self.assertIs(_yamlify_arg(item), item)
# Make sure whitespace-only isn't loaded as None
for item in ('', '\t', ' '):
self.assertIs(_yamlify_arg(item), item)
# This value would be loaded as an int (123), the underscores would be
# ignored. Test that we identify this case and return the original
# value.
item = '1_2_3'
self.assertIs(_yamlify_arg(item), item)
# The '#' is treated as a comment when not part of a data structure, we
# don't want that behavior
for item in ('# hash at beginning', 'Hello world! # hash elsewhere'):
self.assertIs(_yamlify_arg(item), item)
# However we _do_ want the # to be intact if it _is_ within a data
# structure.
item = '["foo", "bar", "###"]'
self.assertEqual(_yamlify_arg(item), ["foo", "bar", "###"])
item = '{"foo": "###"}'
self.assertEqual(_yamlify_arg(item), {"foo": "###"})
# The string "None" should load _as_ None
self.assertIs(_yamlify_arg('None'), None)
# Leading dashes, or strings containing colons, will result in lists
# and dicts, and we only want to load lists and dicts when the strings
# look like data structures.
for item in ('- foo', 'foo: bar'):
self.assertIs(_yamlify_arg(item), item)
# Make sure we don't load '|' as ''
item = '|'
self.assertIs(_yamlify_arg(item), item)
# Make sure we load ints, floats, and strings correctly
self.assertEqual(_yamlify_arg('123'), 123)
self.assertEqual(_yamlify_arg('45.67'), 45.67)
self.assertEqual(_yamlify_arg('foo'), 'foo')
# We tested list/dict loading above, but there is separate logic when
# the string contains a '#', so we need to test again here.
self.assertEqual(_yamlify_arg('["foo", "bar"]'), ["foo", "bar"])
self.assertEqual(_yamlify_arg('{"foo": "bar"}'), {"foo": "bar"})