mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2016.3' into 'develop'
Conflicts: - salt/modules/blockdev.py - salt/modules/ddns.py - salt/modules/inspectlib/collector.py - salt/modules/linux_sysctl.py - salt/modules/status.py - salt/modules/x509.py - salt/utils/process.py - salt/utils/verify.py - tests/integration/__init__.py - tests/integration/modules/state.py - tests/integration/states/cmd.py
This commit is contained in:
commit
1fe0b69a6c
66 changed files with 899 additions and 757 deletions
|
@ -11,6 +11,44 @@ document</topics/development/logging>`, if all you're after is salt's logging
|
|||
configurations, please continue reading.
|
||||
|
||||
|
||||
.. conf_log:: log_levels
|
||||
|
||||
Log Levels
|
||||
==========
|
||||
|
||||
The log levels are ordered numerically such that setting the log level to a
|
||||
specific level will record all log statements at that level and higher. For
|
||||
example, setting ``log_level: error`` will log statements at ``error``,
|
||||
``critical``, and ``quiet`` levels, although nothing *should* be logged at
|
||||
``quiet`` level.
|
||||
|
||||
Most of the logging levels are defined by default in Python's logging library
|
||||
and can be found in the official `Python documentation
|
||||
<https://docs.python.org/library/logging.html#levels>`_. Salt uses some more
|
||||
levels in addition to the standard levels. All levels available in salt are
|
||||
shown in the table below.
|
||||
|
||||
.. note::
|
||||
|
||||
Python dependencies used by salt may define and use additional logging
|
||||
levels. For example, the Python 2 version of the ``multiprocessing``
|
||||
standard Python library `uses the levels
|
||||
<https://docs.python.org/2/library/multiprocessing.html#logging>`_
|
||||
``subwarning``, 25 and ``subdebug``, 5.
|
||||
|
||||
Level Numeric value Description
|
||||
======== ============= ========================================================================
|
||||
quiet 1000 Nothing should be logged at this level
|
||||
critical 50 Critical errors
|
||||
error 40 Errors
|
||||
warning 30 Warnings
|
||||
info 20 Normal log information
|
||||
profile 15 Profiling information on salt performance
|
||||
debug 10 Information useful for debugging both salt implementations and salt code
|
||||
trace 5 More detailed code debugging information
|
||||
garbage 1 Even more debugging information
|
||||
all 0 Everything
|
||||
|
||||
Available Configuration Settings
|
||||
================================
|
||||
|
||||
|
@ -19,30 +57,29 @@ Available Configuration Settings
|
|||
``log_file``
|
||||
------------
|
||||
|
||||
The log records can be sent to a regular file, local path name, or network location.
|
||||
Remote logging works best when configured to use rsyslogd(8) (e.g.: ``file:///dev/log``),
|
||||
with rsyslogd(8) configured for network logging. The format for remote addresses is:
|
||||
``<file|udp|tcp>://<host|socketpath>:<port-if-required>/<log-facility>``. Where ``log-facility`` is the symbolic name of a syslog facility as defined in the :ref:`SysLogHandler documentation <python2:logging.handlers.SysLogHandler.encodePriority>` . It defaults to ``LOG_USER``.
|
||||
|
||||
Default: Dependent of the binary being executed, for example, for ``salt-master``,
|
||||
``/var/log/salt/master``.
|
||||
|
||||
|
||||
The log records can be sent to a regular file, local path name, or network
|
||||
location. Remote logging works best when configured to use rsyslogd(8) (e.g.:
|
||||
``file:///dev/log``), with rsyslogd(8) configured for network logging. The
|
||||
format for remote addresses is:
|
||||
``<file|udp|tcp>://<host|socketpath>:<port-if-required>/<log-facility>``. Where
|
||||
``log-facility`` is the symbolic name of a syslog facility as defined in the
|
||||
:ref:`SysLogHandler documentation
|
||||
<python2:logging.handlers.SysLogHandler.encodePriority>` . It defaults to
|
||||
``LOG_USER``.
|
||||
|
||||
Default: Dependent of the binary being executed, for example, for
|
||||
``salt-master``, ``/var/log/salt/master``.
|
||||
|
||||
Examples:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
log_file: /var/log/salt/master
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
log_file: /var/log/salt/minion
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
log_file: file:///dev/log
|
||||
|
@ -55,8 +92,6 @@ Examples:
|
|||
|
||||
log_file: udp://loghost:10514
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_level
|
||||
|
||||
``log_level``
|
||||
|
@ -73,11 +108,10 @@ The level of log record messages to send to the console. One of ``all``,
|
|||
log_level: warning
|
||||
|
||||
.. note::
|
||||
Add ``log_level: quiet```in salt configuration file to completely disable
|
||||
logging. In case of running salt in command line use``--log-level=quiet``
|
||||
Add ``log_level: quiet`` in salt configuration file to completely disable
|
||||
logging. In case of running salt in command line use ``--log-level=quiet``
|
||||
instead.
|
||||
|
||||
|
||||
.. conf_log:: log_level_logfile
|
||||
|
||||
``log_level_logfile``
|
||||
|
@ -93,8 +127,6 @@ The level of messages to send to the log file. One of ``all``, ``garbage``,
|
|||
|
||||
log_level_logfile: warning
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_datefmt
|
||||
|
||||
``log_datefmt``
|
||||
|
@ -109,8 +141,6 @@ formatting can be seen on :func:`time.strftime <python2:time.strftime>`.
|
|||
|
||||
log_datefmt: '%H:%M:%S'
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_datefmt_logfile
|
||||
|
||||
``log_datefmt_logfile``
|
||||
|
@ -125,8 +155,6 @@ formatting can be seen on :func:`time.strftime <python2:time.strftime>`.
|
|||
|
||||
log_datefmt_logfile: '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_fmt_console
|
||||
|
||||
``log_fmt_console``
|
||||
|
@ -155,8 +183,6 @@ also provides these custom LogRecord attributes to colorize console log output:
|
|||
|
||||
log_fmt_console: '[%(levelname)-8s] %(message)s'
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_fmt_logfile
|
||||
|
||||
``log_fmt_logfile``
|
||||
|
@ -179,8 +205,6 @@ enclosing brackets ``[`` and ``]``:
|
|||
|
||||
log_fmt_logfile: '%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s] %(message)s'
|
||||
|
||||
|
||||
|
||||
.. conf_log:: log_granular_levels
|
||||
|
||||
``log_granular_levels``
|
||||
|
@ -198,7 +222,6 @@ at the ``debug`` level:
|
|||
'salt': 'warning'
|
||||
'salt.modules': 'debug'
|
||||
|
||||
|
||||
External Logging Handlers
|
||||
-------------------------
|
||||
|
||||
|
|
|
@ -334,7 +334,7 @@ class SSH(object):
|
|||
'''
|
||||
if not isinstance(ret[host], dict) or self.opts.get('ssh_key_deploy'):
|
||||
target = self.targets[host]
|
||||
if 'passwd' in target or self.opts['ssh_passwd']:
|
||||
if target.get('passwd', False) or self.opts['ssh_passwd']:
|
||||
self._key_deploy_run(host, target, False)
|
||||
return ret
|
||||
if ret[host].get('stderr', '').count('Permission denied'):
|
||||
|
@ -873,6 +873,12 @@ class Single(object):
|
|||
minion_opts=self.minion_opts,
|
||||
**self.target)
|
||||
opts_pkg = pre_wrapper['test.opts_pkg']() # pylint: disable=E1102
|
||||
if '_error' in opts_pkg:
|
||||
#Refresh failed
|
||||
retcode = opts_pkg['retcode']
|
||||
ret = json.dumps({'local': opts_pkg})
|
||||
return ret, retcode
|
||||
|
||||
opts_pkg['file_roots'] = self.opts['file_roots']
|
||||
opts_pkg['pillar_roots'] = self.opts['pillar_roots']
|
||||
opts_pkg['ext_pillar'] = self.opts['ext_pillar']
|
||||
|
@ -888,12 +894,6 @@ class Single(object):
|
|||
|
||||
retcode = 0
|
||||
|
||||
if '_error' in opts_pkg:
|
||||
#Refresh failed
|
||||
retcode = opts_pkg['retcode']
|
||||
ret = json.dumps({'local': opts_pkg['_error']})
|
||||
return ret, retcode
|
||||
|
||||
pillar = salt.pillar.Pillar(
|
||||
opts_pkg,
|
||||
opts_pkg['grains'],
|
||||
|
|
|
@ -231,7 +231,8 @@ class MasterKeys(dict):
|
|||
self.sig_path = os.path.join(self.opts['pki_dir'],
|
||||
opts['master_pubkey_signature'])
|
||||
if os.path.isfile(self.sig_path):
|
||||
self.pub_signature = salt.utils.fopen(self.sig_path).read()
|
||||
with salt.utils.fopen(self.sig_path) as fp_:
|
||||
self.pub_signature = fp_.read()
|
||||
log.info('Read {0}\'s signature from {1}'
|
||||
''.format(os.path.basename(self.pub_path),
|
||||
self.opts['master_pubkey_signature']))
|
||||
|
|
|
@ -589,6 +589,8 @@ class Client(object):
|
|||
result.append(chunk)
|
||||
else:
|
||||
dest_tmp = "{0}.part".format(dest)
|
||||
# We need an open filehandle to use in the on_chunk callback,
|
||||
# that's why we're not using a with clause here.
|
||||
destfp = salt.utils.fopen(dest_tmp, 'wb')
|
||||
|
||||
def on_chunk(chunk):
|
||||
|
@ -1083,6 +1085,8 @@ class RemoteClient(Client):
|
|||
os.makedirs(destdir)
|
||||
else:
|
||||
return False
|
||||
# We need an open filehandle here, that's why we're not using a
|
||||
# with clause:
|
||||
fn_ = salt.utils.fopen(dest, 'wb+')
|
||||
else:
|
||||
log.debug('No dest file found')
|
||||
|
|
|
@ -184,7 +184,7 @@ class SaltColorLogRecord(SaltLogRecord):
|
|||
reset)
|
||||
self.colorlevel = '%s[%-8s]%s' % (clevel,
|
||||
self.levelname,
|
||||
TextFormat('reset'))
|
||||
reset)
|
||||
self.colorprocess = '%s[%5s]%s' % (LOG_COLORS['process'],
|
||||
self.process,
|
||||
reset)
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Module for managing block devices
|
||||
|
||||
.. versionadded:: 2014.7.0
|
||||
.. deprecated:: Carbon
|
||||
Merged to `disk` module
|
||||
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__func_alias__ = {
|
||||
'format_': 'format'
|
||||
}
|
||||
|
||||
__virtualname__ = 'blockdev'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
Only load this module if the blockdev utility is available
|
||||
'''
|
||||
if salt.utils.is_windows():
|
||||
return (False, ('The {0} execution module '
|
||||
'is not supported on windows'.format(__virtualname__)))
|
||||
elif not salt.utils.which('blockdev'):
|
||||
return (False, ('Cannot load the {0} execution module: '
|
||||
'blockdev utility not found'.format(__virtualname__)))
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def format_(device, fs_type='ext4',
|
||||
inode_size=None, lazy_itable_init=None, force=False):
|
||||
'''
|
||||
Format a filesystem onto a block device
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
.. deprecated:: Carbon
|
||||
|
||||
device
|
||||
The block device in which to create the new filesystem
|
||||
|
||||
fs_type
|
||||
The type of filesystem to create
|
||||
|
||||
inode_size
|
||||
Size of the inodes
|
||||
|
||||
This option is only enabled for ext and xfs filesystems
|
||||
|
||||
lazy_itable_init
|
||||
If enabled and the uninit_bg feature is enabled, the inode table will
|
||||
not be fully initialized by mke2fs. This speeds up filesystem
|
||||
initialization noticeably, but it requires the kernel to finish
|
||||
initializing the filesystem in the background when the filesystem
|
||||
is first mounted. If the option value is omitted, it defaults to 1 to
|
||||
enable lazy inode table zeroing.
|
||||
|
||||
This option is only enabled for ext filesystems
|
||||
|
||||
force
|
||||
Force mke2fs to create a filesystem, even if the specified device is
|
||||
not a partition on a block special device. This option is only enabled
|
||||
for ext and xfs filesystems
|
||||
|
||||
This option is dangerous, use it with caution.
|
||||
|
||||
.. versionadded:: Carbon
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' blockdev.format /dev/sdX1
|
||||
'''
|
||||
salt.utils.warn_until(
|
||||
'Oxygen',
|
||||
'The blockdev module has been merged with the disk module,'
|
||||
'and will disappear in Oxygen. Use the disk.format_ function instead.'
|
||||
)
|
||||
return __salt__['disk.format_'](device,
|
||||
fs_type=fs_type,
|
||||
inode_size=inode_size,
|
||||
lazy_itable_init=lazy_itable_init,
|
||||
force=force)
|
||||
|
||||
|
||||
def fstype(device):
|
||||
'''
|
||||
Return the filesystem name of a block device
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
.. deprecated:: Carbon
|
||||
|
||||
device
|
||||
The name of the block device
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' blockdev.fstype /dev/sdX1
|
||||
'''
|
||||
salt.utils.warn_until(
|
||||
'Oxygen',
|
||||
'The blockdev module has been merged with the disk module,'
|
||||
'and will disappear in Oxygen. Use the disk.fstype function instead.'
|
||||
)
|
||||
return __salt__['disk.fstype'](device)
|
|
@ -74,7 +74,8 @@ def recv(files, dest):
|
|||
return 'Destination unavailable'
|
||||
|
||||
try:
|
||||
salt.utils.fopen(final, 'w+').write(data)
|
||||
with salt.utils.fopen(final, 'w+') as fp_:
|
||||
fp_.write(data)
|
||||
ret[final] = True
|
||||
except IOError:
|
||||
ret[final] = False
|
||||
|
|
|
@ -52,6 +52,7 @@ def load():
|
|||
|
||||
try:
|
||||
datastore_path = os.path.join(__opts__['cachedir'], 'datastore')
|
||||
# serial.load() will close the filehandle, no need for a "with" block
|
||||
fn_ = salt.utils.fopen(datastore_path, 'rb')
|
||||
return serial.load(fn_)
|
||||
except (IOError, OSError, NameError):
|
||||
|
|
|
@ -191,6 +191,8 @@ def update(zone, name, ttl, rdtype, data, nameserver='127.0.0.1', timeout=5,
|
|||
dns_update.replace(name, ttl, rdata)
|
||||
elif not is_exist:
|
||||
dns_update.add(name, ttl, rdata)
|
||||
else:
|
||||
return None
|
||||
answer = dns.query.udp(dns_update, nameserver, timeout, port)
|
||||
if answer.rcode() > 0:
|
||||
return False
|
||||
|
|
|
@ -99,20 +99,18 @@ def hosts_remove(hostsfile='/etc/hosts', entries=None):
|
|||
hosts = fp_.read()
|
||||
|
||||
host_list = entries.split(',')
|
||||
out_file = salt.utils.fopen(hostsfile, 'w')
|
||||
for line in hosts.splitlines():
|
||||
if not line or line.strip().startswith('#'):
|
||||
out_file.write('{0}\n'.format(line))
|
||||
continue
|
||||
comps = line.split()
|
||||
for host in host_list:
|
||||
if host in comps[1:]:
|
||||
comps.remove(host)
|
||||
if len(comps) > 1:
|
||||
out_file.write(' '.join(comps))
|
||||
out_file.write('\n')
|
||||
|
||||
out_file.close()
|
||||
with salt.utils.fopen(hostsfile, 'w') as out_file:
|
||||
for line in hosts.splitlines():
|
||||
if not line or line.strip().startswith('#'):
|
||||
out_file.write('{0}\n'.format(line))
|
||||
continue
|
||||
comps = line.split()
|
||||
for host in host_list:
|
||||
if host in comps[1:]:
|
||||
comps.remove(host)
|
||||
if len(comps) > 1:
|
||||
out_file.write(' '.join(comps))
|
||||
out_file.write('\n')
|
||||
|
||||
|
||||
def parse_zone(zonefile=None, zone=None):
|
||||
|
|
|
@ -3437,7 +3437,8 @@ def export(name,
|
|||
if compression != 'gzip':
|
||||
# gzip doesn't use a Compressor object, it uses a .open() method to
|
||||
# open the filehandle. If not using gzip, we need to open the
|
||||
# filehandle here.
|
||||
# filehandle here. We make sure to close it in the "finally" block
|
||||
# below.
|
||||
out = salt.utils.fopen(path, 'wb')
|
||||
response = _client_wrapper('export', name)
|
||||
buf = None
|
||||
|
|
|
@ -315,9 +315,10 @@ def _get_pkg_license(pkg):
|
|||
licenses = set()
|
||||
cpr = "/usr/share/doc/{0}/copyright".format(pkg)
|
||||
if os.path.exists(cpr):
|
||||
for line in open(cpr).read().split(os.linesep):
|
||||
if line.startswith("License:"):
|
||||
licenses.add(line.split(":", 1)[1].strip())
|
||||
with salt.utils.fopen(cpr) as fp_:
|
||||
for line in fp_.read().split(os.linesep):
|
||||
if line.startswith("License:"):
|
||||
licenses.add(line.split(":", 1)[1].strip())
|
||||
|
||||
return ", ".join(sorted(licenses))
|
||||
|
||||
|
@ -352,17 +353,18 @@ def _get_pkg_ds_avail():
|
|||
ret = dict()
|
||||
pkg_mrk = "Package:"
|
||||
pkg_name = "package"
|
||||
for pkg_info in open(avail).read().split(pkg_mrk):
|
||||
nfo = dict()
|
||||
for line in (pkg_mrk + pkg_info).split(os.linesep):
|
||||
line = line.split(": ", 1)
|
||||
if len(line) != 2:
|
||||
continue
|
||||
key, value = line
|
||||
if value.strip():
|
||||
nfo[key.lower()] = value
|
||||
if nfo.get(pkg_name):
|
||||
ret[nfo[pkg_name]] = nfo
|
||||
with salt.utils.fopen(avail) as fp_:
|
||||
for pkg_info in fp_.read().split(pkg_mrk):
|
||||
nfo = dict()
|
||||
for line in (pkg_mrk + pkg_info).split(os.linesep):
|
||||
line = line.split(": ", 1)
|
||||
if len(line) != 2:
|
||||
continue
|
||||
key, value = line
|
||||
if value.strip():
|
||||
nfo[key.lower()] = value
|
||||
if nfo.get(pkg_name):
|
||||
ret[nfo[pkg_name]] = nfo
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ def fire_master(data, tag, preload=None):
|
|||
|
||||
salt '*' event.fire_master '{"data":"my event data"}' 'tag'
|
||||
'''
|
||||
if __opts__.get('local', None):
|
||||
if (__opts__.get('local', None) or __opts__.get('file_client', None) == 'local') and not __opts__.get('use_master_when_local', False):
|
||||
# We can't send an event if we're in masterless mode
|
||||
log.warning('Local mode detected. Event with tag {0} will NOT be sent.'.format(tag))
|
||||
return False
|
||||
|
|
|
@ -1499,7 +1499,8 @@ def line(path, content, match=None, mode=None, location=None,
|
|||
if before is None and after is None and not match:
|
||||
match = content
|
||||
|
||||
body = salt.utils.fopen(path, mode='r').read()
|
||||
with salt.utils.fopen(path, mode='r') as fp_:
|
||||
body = fp_.read()
|
||||
body_before = hashlib.sha256(salt.utils.to_bytes(body)).hexdigest()
|
||||
after = _regex_to_static(body, after)
|
||||
before = _regex_to_static(body, before)
|
||||
|
@ -1640,7 +1641,9 @@ def line(path, content, match=None, mode=None, location=None,
|
|||
|
||||
if changed:
|
||||
if show_changes:
|
||||
changes_diff = ''.join(difflib.unified_diff(salt.utils.fopen(path, 'r').read().splitlines(), body.splitlines()))
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
path_content = fp_.read().splitlines()
|
||||
changes_diff = ''.join(difflib.unified_diff(path_content, body.splitlines()))
|
||||
if __opts__['test'] is False:
|
||||
fh_ = None
|
||||
try:
|
||||
|
|
|
@ -345,8 +345,9 @@ def append(key, val, convert=False, delimiter=DEFAULT_TARGET_DELIM):
|
|||
salt '*' grains.append key val
|
||||
'''
|
||||
grains = get(key, [], delimiter)
|
||||
if not isinstance(grains, list) and convert is True:
|
||||
grains = [grains]
|
||||
if convert:
|
||||
if not isinstance(grains, list):
|
||||
grains = [] if grains is None else [grains]
|
||||
if not isinstance(grains, list):
|
||||
return 'The key {0} is not a valid list'.format(key)
|
||||
if val in grains:
|
||||
|
|
|
@ -33,7 +33,8 @@ def _get_or_create_hostfile():
|
|||
if hfn is None:
|
||||
hfn = ''
|
||||
if not os.path.exists(hfn):
|
||||
salt.utils.fopen(hfn, 'w').close()
|
||||
with salt.utils.fopen(hfn, 'w'):
|
||||
pass
|
||||
return hfn
|
||||
|
||||
|
||||
|
@ -160,7 +161,8 @@ def set_host(ip, alias):
|
|||
if not alias.strip():
|
||||
line_to_add = ''
|
||||
|
||||
lines = salt.utils.fopen(hfn).readlines()
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind, line in enumerate(lines):
|
||||
tmpline = line.strip()
|
||||
if not tmpline:
|
||||
|
@ -198,7 +200,8 @@ def rm_host(ip, alias):
|
|||
if not has_pair(ip, alias):
|
||||
return True
|
||||
hfn = _get_or_create_hostfile()
|
||||
lines = salt.utils.fopen(hfn).readlines()
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind in range(len(lines)):
|
||||
tmpline = lines[ind].strip()
|
||||
if not tmpline:
|
||||
|
|
|
@ -120,9 +120,8 @@ def _write_file(folder, filename, data):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
fout = salt.utils.fopen(path, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(path, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
return 0
|
||||
|
||||
|
|
|
@ -475,7 +475,8 @@ def is_alive(pidfile):
|
|||
Check if PID is still alive.
|
||||
'''
|
||||
try:
|
||||
os.kill(int(open(pidfile).read().strip()), 0)
|
||||
with salt.utils.fopen(pidfile) as fp_:
|
||||
os.kill(int(fp_.read().strip()), 0)
|
||||
return True
|
||||
except Exception as ex:
|
||||
if os.access(pidfile, os.W_OK) and os.path.isfile(pidfile):
|
||||
|
@ -516,9 +517,8 @@ if __name__ == '__main__':
|
|||
pid = os.fork()
|
||||
if pid > 0:
|
||||
reinit_crypto()
|
||||
fpid = open(os.path.join(pidfile, EnvLoader.PID_FILE), "w")
|
||||
fpid.write("{0}\n".format(pid))
|
||||
fpid.close()
|
||||
with salt.utils.fopen(os.path.join(pidfile, EnvLoader.PID_FILE), 'w') as fp_:
|
||||
fp_.write('{0}\n'.format(pid))
|
||||
sys.exit(0)
|
||||
except OSError as ex:
|
||||
sys.exit(1)
|
||||
|
|
|
@ -42,7 +42,8 @@ def _check_systemd_salt_config():
|
|||
if not os.path.exists(sysctl_dir):
|
||||
os.makedirs(sysctl_dir)
|
||||
try:
|
||||
salt.utils.fopen(conf, 'w').close()
|
||||
with salt.utils.fopen(conf, 'w'):
|
||||
pass
|
||||
except (IOError, OSError):
|
||||
msg = 'Could not create file: {0}'
|
||||
raise CommandExecutionError(msg.format(conf))
|
||||
|
@ -84,16 +85,17 @@ def show(config_file=False):
|
|||
ret = {}
|
||||
if config_file:
|
||||
try:
|
||||
for line in salt.utils.fopen(config_file):
|
||||
if not line.startswith('#') and '=' in line:
|
||||
# search if we have some '=' instead of ' = ' separators
|
||||
SPLIT = ' = '
|
||||
if SPLIT not in line:
|
||||
SPLIT = SPLIT.strip()
|
||||
key, value = line.split(SPLIT, 1)
|
||||
key = key.strip()
|
||||
value = value.lstrip()
|
||||
ret[key] = value
|
||||
with salt.utils.fopen(config_file) as fp_:
|
||||
for line in fp_:
|
||||
if not line.startswith('#') and '=' in line:
|
||||
# search if we have some '=' instead of ' = ' separators
|
||||
SPLIT = ' = '
|
||||
if SPLIT not in line:
|
||||
SPLIT = SPLIT.strip()
|
||||
key, value = line.split(SPLIT, 1)
|
||||
key = key.strip()
|
||||
value = value.lstrip()
|
||||
ret[key] = value
|
||||
except (OSError, IOError):
|
||||
log.error('Could not open sysctl file')
|
||||
return None
|
||||
|
|
|
@ -117,7 +117,8 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
# create /etc/sysctl.conf if not present
|
||||
if not os.path.isfile(config):
|
||||
try:
|
||||
salt.utils.fopen(config, 'w+').close()
|
||||
with salt.utils.fopen(config, 'w+'):
|
||||
pass
|
||||
except (IOError, OSError):
|
||||
msg = 'Could not create {0}'
|
||||
raise CommandExecutionError(msg.format(config))
|
||||
|
|
|
@ -385,13 +385,22 @@ def _netstat_route_freebsd():
|
|||
out = __salt__['cmd.run'](cmd, python_shell=True)
|
||||
for line in out.splitlines():
|
||||
comps = line.split()
|
||||
ret.append({
|
||||
'addr_family': 'inet',
|
||||
'destination': comps[0],
|
||||
'gateway': comps[1],
|
||||
'netmask': comps[2],
|
||||
'flags': comps[3],
|
||||
'interface': comps[5]})
|
||||
if __grains__['os'] == 'FreeBSD' and __grains__.get('osmajorrelease', 0) < 10:
|
||||
ret.append({
|
||||
'addr_family': 'inet',
|
||||
'destination': comps[0],
|
||||
'gateway': comps[1],
|
||||
'netmask': comps[2],
|
||||
'flags': comps[3],
|
||||
'interface': comps[5]})
|
||||
else:
|
||||
ret.append({
|
||||
'addr_family': 'inet',
|
||||
'destination': comps[0],
|
||||
'gateway': comps[1],
|
||||
'netmask': '',
|
||||
'flags': comps[2],
|
||||
'interface': comps[3]})
|
||||
cmd = 'netstat -f inet6 -rn | tail -n+5'
|
||||
out = __salt__['cmd.run'](cmd, python_shell=True)
|
||||
for line in out.splitlines():
|
||||
|
@ -999,7 +1008,8 @@ def mod_hostname(hostname):
|
|||
|
||||
# Modify the /etc/hosts file to replace the old hostname with the
|
||||
# new hostname
|
||||
host_c = salt.utils.fopen('/etc/hosts', 'r').readlines()
|
||||
with salt.utils.fopen('/etc/hosts', 'r') as fp_:
|
||||
host_c = fp_.readlines()
|
||||
|
||||
with salt.utils.fopen('/etc/hosts', 'w') as fh_:
|
||||
for host in host_c:
|
||||
|
@ -1018,7 +1028,8 @@ def mod_hostname(hostname):
|
|||
# Modify the /etc/sysconfig/network configuration file to set the
|
||||
# new hostname
|
||||
if __grains__['os_family'] == 'RedHat':
|
||||
network_c = salt.utils.fopen('/etc/sysconfig/network', 'r').readlines()
|
||||
with salt.utils.fopen('/etc/sysconfig/network', 'r') as fp_:
|
||||
network_c = fp_.readlines()
|
||||
|
||||
with salt.utils.fopen('/etc/sysconfig/network', 'w') as fh_:
|
||||
for net in network_c:
|
||||
|
|
|
@ -260,7 +260,8 @@ def get_saved_rules(conf_file=None, family='ipv4'):
|
|||
if _conf() and not conf_file:
|
||||
conf_file = _conf()
|
||||
|
||||
lines = salt.utils.fopen(conf_file).readlines()
|
||||
with salt.utils.fopen(conf_file) as fp_:
|
||||
lines = fp_.readlines()
|
||||
rules = []
|
||||
for line in lines:
|
||||
tmpline = line.strip()
|
||||
|
|
|
@ -98,7 +98,8 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
# create /etc/sysctl.conf if not present
|
||||
if not os.path.isfile(config):
|
||||
try:
|
||||
salt.utils.fopen(config, 'w+').close()
|
||||
with salt.utils.fopen(config, 'w+'):
|
||||
pass
|
||||
except (IOError, OSError):
|
||||
msg = 'Could not create {0}'
|
||||
raise CommandExecutionError(msg.format(config))
|
||||
|
|
|
@ -5,6 +5,7 @@ Configure ``portage(5)``
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
@ -36,6 +37,8 @@ BASE_PATH = '/etc/portage/package.{0}'
|
|||
SUPPORTED_CONFS = ('accept_keywords', 'env', 'license', 'mask', 'properties',
|
||||
'unmask', 'use')
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
|
@ -216,7 +219,8 @@ def _package_conf_ordering(conf, clean=True, keep_backup=False):
|
|||
backup_files.append(file_path + '.bak')
|
||||
|
||||
if cp[0] == '/' or cp.split('/') > 2:
|
||||
rearrange.extend(list(salt.utils.fopen(file_path)))
|
||||
with salt.utils.fopen(file_path) as fp_:
|
||||
rearrange.extend(fp_.readlines())
|
||||
os.remove(file_path)
|
||||
else:
|
||||
new_contents = ''
|
||||
|
@ -365,43 +369,49 @@ def append_to_package_conf(conf, atom='', flags=None, string='', overwrite=False
|
|||
new_contents = ''
|
||||
added = False
|
||||
|
||||
for l in file_handler:
|
||||
l_strip = l.strip()
|
||||
if l_strip == '':
|
||||
new_contents += '\n'
|
||||
elif l_strip[0] == '#':
|
||||
new_contents += l
|
||||
elif l_strip.split()[0] == atom:
|
||||
if l_strip in to_delete_if_empty:
|
||||
continue
|
||||
if overwrite:
|
||||
new_contents += string.strip() + '\n'
|
||||
added = True
|
||||
else:
|
||||
old_flags = [flag for flag in l_strip.split(' ') if flag][1:]
|
||||
if conf == 'accept_keywords':
|
||||
if not old_flags:
|
||||
new_contents += l
|
||||
if not new_flags:
|
||||
added = True
|
||||
continue
|
||||
elif not new_flags:
|
||||
continue
|
||||
merged_flags = _merge_flags(new_flags, old_flags, conf)
|
||||
if merged_flags:
|
||||
new_contents += '{0} {1}\n'.format(
|
||||
atom, ' '.join(merged_flags))
|
||||
try:
|
||||
for l in file_handler:
|
||||
l_strip = l.strip()
|
||||
if l_strip == '':
|
||||
new_contents += '\n'
|
||||
elif l_strip[0] == '#':
|
||||
new_contents += l
|
||||
elif l_strip.split()[0] == atom:
|
||||
if l_strip in to_delete_if_empty:
|
||||
continue
|
||||
if overwrite:
|
||||
new_contents += string.strip() + '\n'
|
||||
added = True
|
||||
else:
|
||||
new_contents += '{0}\n'.format(atom)
|
||||
added = True
|
||||
else:
|
||||
new_contents += l
|
||||
if not added:
|
||||
new_contents += string.strip() + '\n'
|
||||
file_handler.seek(0)
|
||||
file_handler.truncate(len(new_contents))
|
||||
file_handler.write(new_contents)
|
||||
file_handler.close()
|
||||
old_flags = [flag for flag in l_strip.split(' ') if flag][1:]
|
||||
if conf == 'accept_keywords':
|
||||
if not old_flags:
|
||||
new_contents += l
|
||||
if not new_flags:
|
||||
added = True
|
||||
continue
|
||||
elif not new_flags:
|
||||
continue
|
||||
merged_flags = _merge_flags(new_flags, old_flags, conf)
|
||||
if merged_flags:
|
||||
new_contents += '{0} {1}\n'.format(
|
||||
atom, ' '.join(merged_flags))
|
||||
else:
|
||||
new_contents += '{0}\n'.format(atom)
|
||||
added = True
|
||||
else:
|
||||
new_contents += l
|
||||
if not added:
|
||||
new_contents += string.strip() + '\n'
|
||||
except Exception as exc:
|
||||
log.error('Failed to write to %s: %s', complete_file_path, exc)
|
||||
else:
|
||||
file_handler.seek(0)
|
||||
file_handler.truncate(len(new_contents))
|
||||
file_handler.write(new_contents)
|
||||
finally:
|
||||
file_handler.close()
|
||||
|
||||
try:
|
||||
os.remove(complete_file_path + '.bak')
|
||||
except OSError:
|
||||
|
@ -455,28 +465,27 @@ def get_flags_from_package_conf(conf, atom):
|
|||
|
||||
flags = []
|
||||
try:
|
||||
file_handler = salt.utils.fopen(package_file)
|
||||
with salt.utils.fopen(package_file) as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
if has_wildcard:
|
||||
found_match = line_package == atom
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
found_match = match_list.issubset(line_list)
|
||||
|
||||
if found_match:
|
||||
f_tmp = [flag for flag in line.strip().split(' ') if flag][1:]
|
||||
if f_tmp:
|
||||
flags.extend(f_tmp)
|
||||
else:
|
||||
flags.append('~ARCH')
|
||||
|
||||
return _merge_flags(flags)
|
||||
except IOError:
|
||||
return []
|
||||
else:
|
||||
for line in file_handler:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
found_match = False
|
||||
if has_wildcard:
|
||||
found_match = line_package == atom
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
found_match = match_list.issubset(line_list)
|
||||
|
||||
if found_match:
|
||||
f_tmp = [flag for flag in line.strip().split(' ') if flag][1:]
|
||||
if f_tmp:
|
||||
flags.extend(f_tmp)
|
||||
else:
|
||||
flags.append('~ARCH')
|
||||
return _merge_flags(flags)
|
||||
|
||||
|
||||
def has_flag(conf, atom, flag):
|
||||
|
@ -554,22 +563,21 @@ def is_present(conf, atom):
|
|||
match_list = set(_porttree().dbapi.xmatch("match-all", atom))
|
||||
|
||||
try:
|
||||
file_handler = salt.utils.fopen(package_file)
|
||||
except IOError:
|
||||
return False
|
||||
else:
|
||||
for line in file_handler:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
with salt.utils.fopen(package_file) as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
if has_wildcard:
|
||||
if line_package == str(atom):
|
||||
return True
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
if match_list.issubset(line_list):
|
||||
return True
|
||||
return False
|
||||
if has_wildcard:
|
||||
if line_package == str(atom):
|
||||
return True
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
if match_list.issubset(line_list):
|
||||
return True
|
||||
except IOError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def get_iuse(cp):
|
||||
|
|
|
@ -880,18 +880,16 @@ def _write_file_iface(iface, data, folder, pattern):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
fout = salt.utils.fopen(filename, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(filename, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
|
||||
def _write_file_network(data, filename):
|
||||
'''
|
||||
Writes a file to disk
|
||||
'''
|
||||
fout = salt.utils.fopen(filename, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(filename, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
|
||||
def _read_temp(data):
|
||||
|
|
|
@ -221,32 +221,34 @@ def cpustats():
|
|||
'''
|
||||
linux specific implementation of cpustats
|
||||
'''
|
||||
procf = '/proc/stat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == 'cpu':
|
||||
ret[comps[0]] = {'idle': _number(comps[4]),
|
||||
'iowait': _number(comps[5]),
|
||||
'irq': _number(comps[6]),
|
||||
'nice': _number(comps[2]),
|
||||
'softirq': _number(comps[7]),
|
||||
'steal': _number(comps[8]),
|
||||
'system': _number(comps[3]),
|
||||
'user': _number(comps[1])}
|
||||
elif comps[0] == 'intr':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'irqs': [_number(x) for x in comps[2:]]}
|
||||
elif comps[0] == 'softirq':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'softirqs': [_number(x) for x in comps[2:]]}
|
||||
else:
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
try:
|
||||
with salt.utils.fopen('/proc/stat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == 'cpu':
|
||||
ret[comps[0]] = {'idle': _number(comps[4]),
|
||||
'iowait': _number(comps[5]),
|
||||
'irq': _number(comps[6]),
|
||||
'nice': _number(comps[2]),
|
||||
'softirq': _number(comps[7]),
|
||||
'steal': _number(comps[8]),
|
||||
'system': _number(comps[3]),
|
||||
'user': _number(comps[1])}
|
||||
elif comps[0] == 'intr':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'irqs': [_number(x) for x in comps[2:]]}
|
||||
elif comps[0] == 'softirq':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'softirqs': [_number(x) for x in comps[2:]]}
|
||||
else:
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
return ret
|
||||
|
||||
def freebsd_cpustats():
|
||||
|
@ -309,21 +311,23 @@ def meminfo():
|
|||
'''
|
||||
linux specific implementation of meminfo
|
||||
'''
|
||||
procf = '/proc/meminfo'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
comps[0] = comps[0].replace(':', '')
|
||||
ret[comps[0]] = {
|
||||
'value': comps[1],
|
||||
}
|
||||
if len(comps) > 2:
|
||||
ret[comps[0]]['unit'] = comps[2]
|
||||
try:
|
||||
with salt.utils.fopen('/proc/meminfo', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
comps[0] = comps[0].replace(':', '')
|
||||
ret[comps[0]] = {
|
||||
'value': comps[1],
|
||||
}
|
||||
if len(comps) > 2:
|
||||
ret[comps[0]]['unit'] = comps[2]
|
||||
return ret
|
||||
|
||||
def freebsd_meminfo():
|
||||
|
@ -369,20 +373,22 @@ def cpuinfo():
|
|||
'''
|
||||
linux specific cpuinfo implementation
|
||||
'''
|
||||
procf = '/proc/cpuinfo'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split(':')
|
||||
comps[0] = comps[0].strip()
|
||||
if comps[0] == 'flags':
|
||||
ret[comps[0]] = comps[1].split()
|
||||
else:
|
||||
ret[comps[0]] = comps[1].strip()
|
||||
try:
|
||||
with salt.utils.fopen('/proc/cpuinfo', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split(':')
|
||||
comps[0] = comps[0].strip()
|
||||
if comps[0] == 'flags':
|
||||
ret[comps[0]] = comps[1].split()
|
||||
else:
|
||||
ret[comps[0]] = comps[1].strip()
|
||||
return ret
|
||||
|
||||
def bsd_cpuinfo():
|
||||
|
@ -493,29 +499,33 @@ def diskstats():
|
|||
'''
|
||||
linux specific implementation of diskstats
|
||||
'''
|
||||
procf = '/proc/diskstats'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[2]] = {'major': _number(comps[0]),
|
||||
'minor': _number(comps[1]),
|
||||
'device': _number(comps[2]),
|
||||
'reads_issued': _number(comps[3]),
|
||||
'reads_merged': _number(comps[4]),
|
||||
'sectors_read': _number(comps[5]),
|
||||
'ms_spent_reading': _number(comps[6]),
|
||||
'writes_completed': _number(comps[7]),
|
||||
'writes_merged': _number(comps[8]),
|
||||
'sectors_written': _number(comps[9]),
|
||||
'ms_spent_writing': _number(comps[10]),
|
||||
'io_in_progress': _number(comps[11]),
|
||||
'ms_spent_in_io': _number(comps[12]),
|
||||
'weighted_ms_spent_in_io': _number(comps[13])}
|
||||
try:
|
||||
with salt.utils.fopen('/proc/diskstats', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[2]] = {
|
||||
'major': _number(comps[0]),
|
||||
'minor': _number(comps[1]),
|
||||
'device': _number(comps[2]),
|
||||
'reads_issued': _number(comps[3]),
|
||||
'reads_merged': _number(comps[4]),
|
||||
'sectors_read': _number(comps[5]),
|
||||
'ms_spent_reading': _number(comps[6]),
|
||||
'writes_completed': _number(comps[7]),
|
||||
'writes_merged': _number(comps[8]),
|
||||
'sectors_written': _number(comps[9]),
|
||||
'ms_spent_writing': _number(comps[10]),
|
||||
'io_in_progress': _number(comps[11]),
|
||||
'ms_spent_in_io': _number(comps[12]),
|
||||
'weighted_ms_spent_in_io': _number(comps[13])
|
||||
}
|
||||
return ret
|
||||
|
||||
def generic_diskstats():
|
||||
|
@ -584,8 +594,10 @@ def diskusage(*args):
|
|||
)
|
||||
# ifile source of data varies with OS, otherwise all the same
|
||||
if __grains__['kernel'] == 'Linux':
|
||||
procf = '/proc/mounts'
|
||||
if not os.path.isfile(procf):
|
||||
try:
|
||||
with salt.utils.fopen('/proc/mounts', 'r') as fp_:
|
||||
ifile = fp_.read().splitlines()
|
||||
except OSError:
|
||||
return {}
|
||||
ifile = salt.utils.fopen(procf, 'r').readlines()
|
||||
elif __grains__['kernel'] in ('FreeBSD', 'SunOS'):
|
||||
|
@ -634,16 +646,18 @@ def vmstats():
|
|||
'''
|
||||
linux specific implementation of vmstats
|
||||
'''
|
||||
procf = '/proc/vmstat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
try:
|
||||
with salt.utils.fopen('/proc/vmstat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
return ret
|
||||
|
||||
def generic_vmstats():
|
||||
|
@ -699,28 +713,30 @@ def netstats():
|
|||
'''
|
||||
freebsd specific netstats implementation
|
||||
'''
|
||||
procf = '/proc/net/netstat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
headers = ['']
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == headers[0]:
|
||||
index = len(headers) - 1
|
||||
row = {}
|
||||
for field in range(index):
|
||||
if field < 1:
|
||||
continue
|
||||
else:
|
||||
row[headers[field]] = _number(comps[field])
|
||||
rowname = headers[0].replace(':', '')
|
||||
ret[rowname] = row
|
||||
else:
|
||||
headers = comps
|
||||
try:
|
||||
with salt.utils.fopen('/proc/net/netstat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
headers = ['']
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == headers[0]:
|
||||
index = len(headers) - 1
|
||||
row = {}
|
||||
for field in range(index):
|
||||
if field < 1:
|
||||
continue
|
||||
else:
|
||||
row[headers[field]] = _number(comps[field])
|
||||
rowname = headers[0].replace(':', '')
|
||||
ret[rowname] = row
|
||||
else:
|
||||
headers = comps
|
||||
return ret
|
||||
|
||||
def freebsd_netstats():
|
||||
|
@ -786,38 +802,40 @@ def netdev():
|
|||
'''
|
||||
linux specific implementation of netdev
|
||||
'''
|
||||
procf = '/proc/net/dev'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
if line.find(':') < 0:
|
||||
continue
|
||||
comps = line.split()
|
||||
# Fix lines like eth0:9999..'
|
||||
comps[0] = line.split(':')[0].strip()
|
||||
# Support lines both like eth0:999 and eth0: 9999
|
||||
comps.insert(1, line.split(':')[1].strip().split()[0])
|
||||
ret[comps[0]] = {'iface': comps[0],
|
||||
'rx_bytes': _number(comps[1]),
|
||||
'rx_compressed': _number(comps[7]),
|
||||
'rx_drop': _number(comps[4]),
|
||||
'rx_errs': _number(comps[3]),
|
||||
'rx_fifo': _number(comps[5]),
|
||||
'rx_frame': _number(comps[6]),
|
||||
'rx_multicast': _number(comps[8]),
|
||||
'rx_packets': _number(comps[2]),
|
||||
'tx_bytes': _number(comps[9]),
|
||||
'tx_carrier': _number(comps[15]),
|
||||
'tx_colls': _number(comps[14]),
|
||||
'tx_compressed': _number(comps[16]),
|
||||
'tx_drop': _number(comps[12]),
|
||||
'tx_errs': _number(comps[11]),
|
||||
'tx_fifo': _number(comps[13]),
|
||||
'tx_packets': _number(comps[10])}
|
||||
try:
|
||||
with salt.utils.fopen('/proc/net/dev', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
if line.find(':') < 0:
|
||||
continue
|
||||
comps = line.split()
|
||||
# Fix lines like eth0:9999..'
|
||||
comps[0] = line.split(':')[0].strip()
|
||||
# Support lines both like eth0:999 and eth0: 9999
|
||||
comps.insert(1, line.split(':')[1].strip().split()[0])
|
||||
ret[comps[0]] = {'iface': comps[0],
|
||||
'rx_bytes': _number(comps[1]),
|
||||
'rx_compressed': _number(comps[7]),
|
||||
'rx_drop': _number(comps[4]),
|
||||
'rx_errs': _number(comps[3]),
|
||||
'rx_fifo': _number(comps[5]),
|
||||
'rx_frame': _number(comps[6]),
|
||||
'rx_multicast': _number(comps[8]),
|
||||
'rx_packets': _number(comps[2]),
|
||||
'tx_bytes': _number(comps[9]),
|
||||
'tx_carrier': _number(comps[15]),
|
||||
'tx_colls': _number(comps[14]),
|
||||
'tx_compressed': _number(comps[16]),
|
||||
'tx_drop': _number(comps[12]),
|
||||
'tx_errs': _number(comps[11]),
|
||||
'tx_fifo': _number(comps[13]),
|
||||
'tx_packets': _number(comps[10])}
|
||||
return ret
|
||||
|
||||
def freebsd_netdev():
|
||||
|
@ -976,10 +994,11 @@ def version():
|
|||
'''
|
||||
linux specific implementation of version
|
||||
'''
|
||||
procf = '/proc/version'
|
||||
if not os.path.isfile(procf):
|
||||
try:
|
||||
with salt.utils.fopen('/proc/version', 'r') as fp_:
|
||||
return fp_.read().strip()
|
||||
except IOError:
|
||||
return {}
|
||||
return salt.utils.fopen(procf, 'r').read().strip()
|
||||
|
||||
# dict that returns a function that does the right thing per platform
|
||||
get_version = {
|
||||
|
|
|
@ -1638,21 +1638,22 @@ def create_empty_crl(
|
|||
return 'CRL "{0}" already exists'.format(crl_file)
|
||||
|
||||
try:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
with salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename
|
||||
)).read()
|
||||
)
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
ca_filename)) as fp_:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename)).read()
|
||||
)
|
||||
ca_filename)) as fp_:
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
|
@ -1723,21 +1724,22 @@ def revoke_cert(
|
|||
cert_filename = '{0}'.format(CN)
|
||||
|
||||
try:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
with salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename
|
||||
)).read()
|
||||
)
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
ca_filename)) as fp_:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename)).read()
|
||||
)
|
||||
ca_filename)) as fp_:
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
|
@ -1773,8 +1775,8 @@ def revoke_cert(
|
|||
index_serial_subject)
|
||||
|
||||
ret = {}
|
||||
with salt.utils.fopen(index_file) as f:
|
||||
for line in f:
|
||||
with salt.utils.fopen(index_file) as fp_:
|
||||
for line in fp_:
|
||||
if index_r_data_pattern.match(line):
|
||||
revoke_date = line.split('\t')[2]
|
||||
try:
|
||||
|
@ -1802,8 +1804,8 @@ def revoke_cert(
|
|||
|
||||
crl = OpenSSL.crypto.CRL()
|
||||
|
||||
with salt.utils.fopen(index_file) as f:
|
||||
for line in f:
|
||||
with salt.utils.fopen(index_file) as fp_:
|
||||
for line in fp_:
|
||||
if line.startswith('R'):
|
||||
fields = line.split('\t')
|
||||
revoked = OpenSSL.crypto.Revoked()
|
||||
|
@ -1828,8 +1830,8 @@ def revoke_cert(
|
|||
crl_file)
|
||||
return ret
|
||||
|
||||
with salt.utils.fopen(crl_file, 'w') as f:
|
||||
f.write(crl_text)
|
||||
with salt.utils.fopen(crl_file, 'w') as fp_:
|
||||
fp_.write(crl_text)
|
||||
|
||||
return ('Revoked Certificate: "{0}/{1}.crt", '
|
||||
'serial number: {2}').format(
|
||||
|
|
|
@ -1320,9 +1320,11 @@ def create_xml_path(path):
|
|||
|
||||
salt '*' virt.create_xml_path <path to XML file on the node>
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return create_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return create_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def define_xml_str(xml):
|
||||
|
@ -1350,9 +1352,11 @@ def define_xml_path(path):
|
|||
salt '*' virt.define_xml_path <path to XML file on the node>
|
||||
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return define_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return define_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def define_vol_xml_str(xml):
|
||||
|
@ -1382,9 +1386,11 @@ def define_vol_xml_path(path):
|
|||
salt '*' virt.define_vol_xml_path <path to XML file on the node>
|
||||
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return define_vol_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return define_vol_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def migrate_non_shared(vm_, target, ssh=False):
|
||||
|
@ -1574,8 +1580,9 @@ def is_kvm_hyper():
|
|||
salt '*' virt.is_kvm_hyper
|
||||
'''
|
||||
try:
|
||||
if 'kvm_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'kvm_' not in fp_.read():
|
||||
return False
|
||||
except IOError:
|
||||
# No /proc/modules? Are we on Windows? Or Solaris?
|
||||
return False
|
||||
|
@ -1599,9 +1606,10 @@ def is_xen_hyper():
|
|||
# virtual_subtype isn't set everywhere.
|
||||
return False
|
||||
try:
|
||||
if 'xen_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
except IOError:
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'xen_' not in fp_.read():
|
||||
return False
|
||||
except (OSError, IOError):
|
||||
# No /proc/modules? Are we on Windows? Or Solaris?
|
||||
return False
|
||||
return 'libvirtd' in __salt__['cmd.run'](__grains__['ps'])
|
||||
|
|
|
@ -310,7 +310,8 @@ def _text_or_file(input_):
|
|||
content to be parsed.
|
||||
'''
|
||||
if os.path.isfile(input_):
|
||||
return salt.utils.fopen(input_).read()
|
||||
with salt.utils.fopen(input_) as fp_:
|
||||
return fp_.read()
|
||||
else:
|
||||
return input_
|
||||
|
||||
|
|
|
@ -773,9 +773,10 @@ def is_hyper():
|
|||
# virtual_subtype isn't set everywhere.
|
||||
return False
|
||||
try:
|
||||
if 'xen_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
except IOError:
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'xen_' not in fp_.read():
|
||||
return False
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
# there must be a smarter way...
|
||||
return 'xenstore' in __salt__['cmd.run'](__grains__['ps'])
|
||||
|
|
|
@ -518,9 +518,10 @@ def upgrade_bootstrap(directory='.',
|
|||
if not os.path.isdir(dbuild):
|
||||
os.makedirs(dbuild)
|
||||
# only try to download once per buildout checkout
|
||||
salt.utils.fopen(os.path.join(
|
||||
dbuild,
|
||||
'{0}.updated_bootstrap'.format(buildout_ver)))
|
||||
with salt.utils.fopen(os.path.join(
|
||||
dbuild,
|
||||
'{0}.updated_bootstrap'.format(buildout_ver))):
|
||||
pass
|
||||
except (OSError, IOError):
|
||||
LOG.info('Bootstrap updated from repository')
|
||||
data = _urlopen(booturl).read()
|
||||
|
|
|
@ -81,6 +81,8 @@ def _walk_through(job_dir):
|
|||
if not os.path.isfile(load_path):
|
||||
continue
|
||||
|
||||
# serial.load() closes the filehandle, no need to enclose this in a
|
||||
# "with" block.
|
||||
job = serial.load(salt.utils.fopen(load_path, 'rb'))
|
||||
jid = job['jid']
|
||||
yield jid, job, t_path, final
|
||||
|
|
|
@ -1876,11 +1876,14 @@ class State(object):
|
|||
found = True
|
||||
reqs[r_state].append(chunk)
|
||||
continue
|
||||
if (fnmatch.fnmatch(chunk['name'], req_val) or
|
||||
fnmatch.fnmatch(chunk['__id__'], req_val)):
|
||||
if req_key == 'id' or chunk['state'] == req_key:
|
||||
found = True
|
||||
reqs[r_state].append(chunk)
|
||||
try:
|
||||
if (fnmatch.fnmatch(chunk['name'], req_val) or
|
||||
fnmatch.fnmatch(chunk['__id__'], req_val)):
|
||||
if req_key == 'id' or chunk['state'] == req_key:
|
||||
found = True
|
||||
reqs[r_state].append(chunk)
|
||||
except KeyError:
|
||||
raise SaltRenderError('Could not locate requisite of [{0}] present in state with name [{1}]'.format(req_key, chunk['name']))
|
||||
if not found:
|
||||
return 'unmet', ()
|
||||
fun_stats = set()
|
||||
|
|
|
@ -435,7 +435,7 @@ def _clean_dir(root, keep, exclude_pat):
|
|||
while True:
|
||||
fn_ = os.path.dirname(fn_)
|
||||
real_keep.add(fn_)
|
||||
if fn_ in ['/', ''.join([os.path.splitdrive(fn_)[0], '\\'])]:
|
||||
if fn_ in ['/', ''.join([os.path.splitdrive(fn_)[0], '\\\\'])]:
|
||||
break
|
||||
|
||||
def _delete_not_kept(nfn):
|
||||
|
@ -1269,9 +1269,15 @@ def managed(name,
|
|||
incompatible with the ``contents`` options.
|
||||
|
||||
template
|
||||
If this setting is applied then the named templating engine will be
|
||||
used to render the downloaded file, currently jinja, mako, and wempy
|
||||
are supported
|
||||
If this setting is applied, the named templating engine will be used to
|
||||
render the downloaded file. The following templates are supported:
|
||||
|
||||
- :mod:`cheetah<salt.renderers.cheetah>`
|
||||
- :mod:`genshi<salt.renderers.genshi>`
|
||||
- :mod:`jinja<salt.renderers.jinja>`
|
||||
- :mod:`mako<salt.renderers.mako>`
|
||||
- :mod:`py<salt.renderers.py>`
|
||||
- :mod:`wempy<salt.renderers.wempy>`
|
||||
|
||||
makedirs : False
|
||||
If set to ``True``, then the parent directories will be created to
|
||||
|
@ -2543,8 +2549,7 @@ def recurse(name,
|
|||
return _error(
|
||||
ret, 'The path {0} exists and is not a directory'.format(name))
|
||||
if not __opts__['test']:
|
||||
__salt__['file.makedirs_perms'](
|
||||
name, user, group, int(str(dir_mode), 8) if dir_mode else None)
|
||||
__salt__['file.makedirs_perms'](name, user, group, dir_mode)
|
||||
|
||||
def add_comment(path, comment):
|
||||
comments = ret['comment'].setdefault(path, [])
|
||||
|
@ -2612,7 +2617,7 @@ def recurse(name,
|
|||
merge_ret(path, _ret)
|
||||
return
|
||||
else:
|
||||
os.remove(path)
|
||||
__salt__['file.remove'](path)
|
||||
_ret['changes'] = {'diff': 'Replaced file with a directory'}
|
||||
merge_ret(path, _ret)
|
||||
|
||||
|
|
|
@ -2565,7 +2565,8 @@ def mod_init(low):
|
|||
if low['fun'] == 'installed' or low['fun'] == 'latest':
|
||||
rtag = __gen_rtag()
|
||||
if not os.path.exists(rtag):
|
||||
salt.utils.fopen(rtag, 'w+').write('')
|
||||
with salt.utils.fopen(rtag, 'w+'):
|
||||
pass
|
||||
return ret
|
||||
return False
|
||||
|
||||
|
|
|
@ -397,7 +397,8 @@ def managed(name, ppa=None, **kwargs):
|
|||
|
||||
# empty file before configure
|
||||
if kwargs.get('clean_file', False):
|
||||
salt.utils.fopen(kwargs['file'], 'w').close()
|
||||
with salt.utils.fopen(kwargs['file'], 'w'):
|
||||
pass
|
||||
|
||||
try:
|
||||
if __grains__['os_family'] == 'Debian':
|
||||
|
|
|
@ -112,7 +112,8 @@ def present(
|
|||
result = __salt__['ssh.check_known_host'](user, name,
|
||||
key=key,
|
||||
fingerprint=fingerprint,
|
||||
config=config)
|
||||
config=config,
|
||||
port=port)
|
||||
except CommandNotFoundError as err:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'ssh.check_known_host error: {0}'.format(err)
|
||||
|
|
|
@ -627,10 +627,11 @@ def pem_managed(name,
|
|||
|
||||
new = __salt__['x509.get_pem_entry'](text=text)
|
||||
|
||||
if os.path.isfile(name):
|
||||
current = salt.utils.fopen(name).read()
|
||||
else:
|
||||
current = '{0} does not exist.'.format(name)
|
||||
try:
|
||||
with salt.utils.fopen(name) as fp_:
|
||||
current = fp_.read()
|
||||
except (OSError, IOError):
|
||||
current = '{0} does not exist or is unreadable'.format(name)
|
||||
|
||||
if new == current:
|
||||
ret['result'] = True
|
||||
|
|
|
@ -231,21 +231,22 @@ class AESReqServerMixin(object):
|
|||
|
||||
elif os.path.isfile(pubfn):
|
||||
# The key has been accepted, check it
|
||||
if salt.utils.fopen(pubfn, 'r').read().strip() != load['pub'].strip():
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys did not match. This may be an attempt to compromise '
|
||||
'the Salt cluster.'.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
with salt.utils.fopen(pubfn, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read().strip() != load['pub'].strip():
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys did not match. This may be an attempt to compromise '
|
||||
'the Salt cluster.'.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
|
||||
elif not os.path.isfile(pubfn_pend):
|
||||
# The key has not been accepted, this is a new minion
|
||||
|
@ -317,58 +318,60 @@ class AESReqServerMixin(object):
|
|||
# Check if the keys are the same and error out if this is the
|
||||
# case. Otherwise log the fact that the minion is still
|
||||
# pending.
|
||||
if salt.utils.fopen(pubfn_pend, 'r').read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'key in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
log.info(
|
||||
'Authentication failed from host {id}, the key is in '
|
||||
'pending and needs to be accepted with salt-key '
|
||||
'-a {id}'.format(**load)
|
||||
)
|
||||
eload = {'result': True,
|
||||
'act': 'pend',
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': True}}
|
||||
with salt.utils.fopen(pubfn_pend, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'key in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
log.info(
|
||||
'Authentication failed from host {id}, the key is in '
|
||||
'pending and needs to be accepted with salt-key '
|
||||
'-a {id}'.format(**load)
|
||||
)
|
||||
eload = {'result': True,
|
||||
'act': 'pend',
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': True}}
|
||||
else:
|
||||
# This key is in pending and has been configured to be
|
||||
# auto-signed. Check to see if it is the same key, and if
|
||||
# so, pass on doing anything here, and let it get automatically
|
||||
# accepted below.
|
||||
if salt.utils.fopen(pubfn_pend, 'r').read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
pass
|
||||
with salt.utils.fopen(pubfn_pend, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
pass
|
||||
|
||||
else:
|
||||
# Something happened that I have not accounted for, FAIL!
|
||||
|
|
|
@ -641,7 +641,7 @@ def output_profile(pr, stats_path='/tmp/stats', stop=False, id_=None):
|
|||
ficn = os.path.join(stats_path, '{0}.{1}.stats'.format(id_, date))
|
||||
if not os.path.exists(ficp):
|
||||
pr.dump_stats(ficp)
|
||||
with open(ficn, 'w') as fic:
|
||||
with fopen(ficn, 'w') as fic:
|
||||
pstats.Stats(pr, stream=fic).sort_stats('cumulative')
|
||||
log.info('PROFILING: {0} generated'.format(ficp))
|
||||
log.info('PROFILING (cumulative): {0} generated'.format(ficn))
|
||||
|
|
|
@ -2407,7 +2407,8 @@ def lock_file(filename, interval=.5, timeout=15):
|
|||
else:
|
||||
break
|
||||
|
||||
salt.utils.fopen(lock, 'a').close()
|
||||
with salt.utils.fopen(lock, 'a'):
|
||||
pass
|
||||
|
||||
|
||||
def unlock_file(filename):
|
||||
|
|
|
@ -46,7 +46,8 @@ def cache_jobs(opts, jid, ret):
|
|||
jdir = os.path.dirname(fn_)
|
||||
if not os.path.isdir(jdir):
|
||||
os.makedirs(jdir)
|
||||
salt.utils.fopen(fn_, 'w+b').write(serial.dumps(ret))
|
||||
with salt.utils.fopen(fn_, 'w+b') as fp_:
|
||||
fp_.write(serial.dumps(ret))
|
||||
|
||||
|
||||
def _read_proc_file(path, opts):
|
||||
|
|
|
@ -646,8 +646,8 @@ class SaltNova(object):
|
|||
'''
|
||||
nt_ks = self.compute_conn
|
||||
if pubfile:
|
||||
ifile = salt.utils.fopen(pubfile, 'r')
|
||||
pubkey = ifile.read()
|
||||
with salt.utils.fopen(pubfile, 'r') as fp_:
|
||||
pubkey = fp_.read()
|
||||
if not pubkey:
|
||||
return False
|
||||
nt_ks.keypairs.create(name, public_key=pubkey)
|
||||
|
|
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||
|
||||
# Import python libs
|
||||
import logging
|
||||
from sys import stdout
|
||||
import sys
|
||||
from os import makedirs
|
||||
from os.path import dirname, isdir
|
||||
from errno import EEXIST
|
||||
|
@ -172,7 +172,7 @@ class SaltSwift(object):
|
|||
headers, body = self.conn.get_object(cont, obj, resp_chunk_size=65536)
|
||||
|
||||
if return_bin is True:
|
||||
fp = stdout
|
||||
fp = sys.stdout
|
||||
else:
|
||||
dirpath = dirname(local_file)
|
||||
if dirpath and not isdir(dirpath):
|
||||
|
|
|
@ -446,7 +446,7 @@ class ProcessManager(object):
|
|||
# call 'taskkill', it will leave a 'taskkill' zombie process.
|
||||
# We want to avoid this.
|
||||
return
|
||||
with open(os.devnull, 'wb') as devnull:
|
||||
with salt.utils.fopen(os.devnull, 'wb') as devnull:
|
||||
for pid, p_map in six.iteritems(self._process_map):
|
||||
# On Windows, we need to explicitly terminate sub-processes
|
||||
# because the processes don't have a sigterm handler.
|
||||
|
|
|
@ -278,8 +278,10 @@ def _get_jinja_error(trace, context=None):
|
|||
if add_log:
|
||||
if template_path:
|
||||
out = '\n{0}\n'.format(msg.splitlines()[0])
|
||||
with salt.utils.fopen(template_path) as fp_:
|
||||
template_contents = fp_.read()
|
||||
out += salt.utils.get_context(
|
||||
salt.utils.fopen(template_path).read(),
|
||||
template_contents,
|
||||
line,
|
||||
marker=' <======================')
|
||||
else:
|
||||
|
|
|
@ -23,6 +23,7 @@ else:
|
|||
|
||||
# Import salt libs
|
||||
from salt.log import is_console_configured
|
||||
from salt.log.setup import LOG_LEVELS
|
||||
from salt.exceptions import SaltClientError, SaltSystemExit, \
|
||||
CommandExecutionError
|
||||
import salt.defaults.exitcodes
|
||||
|
@ -515,8 +516,10 @@ def verify_log(opts):
|
|||
'''
|
||||
If an insecre logging configuration is found, show a warning
|
||||
'''
|
||||
if opts.get('log_level') in ('garbage', 'trace', 'debug'):
|
||||
log.warning('Insecure logging configuration detected! Sensitive data may be logged.')
|
||||
level = LOG_LEVELS.get(opts.get('log_level').lower(), logging.NOTSET)
|
||||
|
||||
if level < logging.INFO:
|
||||
log.warn('Insecure logging configuration detected! Sensitive data may be logged.')
|
||||
|
||||
|
||||
def win_verify_env(dirs, user, permissive=False, pki_dir='', skip_extra=False):
|
||||
|
|
|
@ -34,10 +34,16 @@ class VirtKey(object):
|
|||
Accept the provided key
|
||||
'''
|
||||
try:
|
||||
expiry = int(salt.utils.fopen(self.path, 'r').read())
|
||||
except IOError:
|
||||
log.error('Request to sign key for minion "{0}" on hyper "{1}" denied: '
|
||||
'no authorization'.format(self.id, self.hyper))
|
||||
with salt.utils.fopen(self.path, 'r') as fp_:
|
||||
expiry = int(fp_.read())
|
||||
except (OSError, IOError):
|
||||
log.error(
|
||||
'Request to sign key for minion \'%s\' on hyper \'%s\' '
|
||||
'denied: no authorization', self.id, self.hyper
|
||||
)
|
||||
return False
|
||||
except ValueError:
|
||||
log.error('Invalid expiry data in %s', self.path)
|
||||
return False
|
||||
|
||||
# Limit acceptance window to 10 minutes
|
||||
|
|
|
@ -119,10 +119,9 @@ class Terminal(object):
|
|||
# Let's avoid Zombies!!!
|
||||
_cleanup()
|
||||
|
||||
if not args and not executable and not shell:
|
||||
if not args and not executable:
|
||||
raise TerminalException(
|
||||
'You need to pass at least one of \'args\', \'executable\' '
|
||||
'or \'shell=True\''
|
||||
'You need to pass at least one of "args", "executable" '
|
||||
)
|
||||
|
||||
self.args = args
|
||||
|
|
|
@ -1153,9 +1153,8 @@ class TestDaemon(object):
|
|||
|
||||
for entry in ('master', 'minion', 'sub_minion', 'syndic', 'syndic_master'):
|
||||
computed_config = copy.deepcopy(locals()['{0}_opts'.format(entry)])
|
||||
salt.utils.fopen(os.path.join(TMP_CONF_DIR, entry), 'w').write(
|
||||
yaml.dump(computed_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(os.path.join(TMP_CONF_DIR, entry), 'w') as fp_:
|
||||
fp_.write(yaml.dump(computed_config, default_flow_style=False))
|
||||
sub_minion_computed_config = copy.deepcopy(sub_minion_opts)
|
||||
salt.utils.fopen(os.path.join(TMP_SUB_MINION_CONF_DIR, 'minion'), 'w').write(
|
||||
yaml.dump(sub_minion_computed_config, default_flow_style=False)
|
||||
|
|
|
@ -64,7 +64,8 @@ class GrainsTargetingTest(integration.ShellCase):
|
|||
# Create a minion key, but do not start the "fake" minion. This mimics a
|
||||
# disconnected minion.
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected')
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
|
||||
# ping disconnected minion and ensure it times out and returns with correct message
|
||||
try:
|
||||
|
|
|
@ -45,7 +45,8 @@ class StdTest(integration.ModuleCase):
|
|||
# create fake minion
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'footest')
|
||||
# touch the file
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
# ping that minion and ensure it times out
|
||||
try:
|
||||
cmd_iter = self.client.cmd_cli(
|
||||
|
@ -125,7 +126,8 @@ class StdTest(integration.ModuleCase):
|
|||
# Create a minion key, but do not start the "fake" minion. This mimics
|
||||
# a disconnected minion.
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected')
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
|
||||
# ping disconnected minion and ensure it times out and returns with correct message
|
||||
try:
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Hello world!
|
|
@ -168,8 +168,8 @@ class HostsModuleTest(integration.ModuleCase):
|
|||
# use an empty one so we can prove the syntax of the entries
|
||||
# being added by the hosts module
|
||||
self.__clear_hosts()
|
||||
f = salt.utils.fopen(HFN, 'w')
|
||||
f.close()
|
||||
with salt.utils.fopen(HFN, 'w'):
|
||||
pass
|
||||
|
||||
self.assertTrue(
|
||||
self.run_function(
|
||||
|
@ -207,7 +207,8 @@ class HostsModuleTest(integration.ModuleCase):
|
|||
)
|
||||
|
||||
# now read the lines and ensure they're formatted correctly
|
||||
lines = salt.utils.fopen(HFN, 'r').read().splitlines()
|
||||
with salt.utils.fopen(HFN, 'r') as fp_:
|
||||
lines = fp_.read().splitlines()
|
||||
self.assertEqual(lines, [
|
||||
'192.168.1.3\t\thost3.fqdn.com',
|
||||
'192.168.1.1\t\thost1.fqdn.com host1 host1-reorder',
|
||||
|
|
|
@ -160,6 +160,9 @@ class StateModuleTest(integration.ModuleCase,
|
|||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
testfile_contents = fp_.read()
|
||||
|
||||
contents = textwrap.dedent('''\
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
|
||||
|
@ -183,7 +186,7 @@ class StateModuleTest(integration.ModuleCase,
|
|||
contents += os.linesep
|
||||
|
||||
self.assertMultiLineEqual(
|
||||
contents, salt.utils.fopen(testfile, 'r').read())
|
||||
contents, testfile_contents)
|
||||
|
||||
# Re-append switching order
|
||||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
|
@ -192,8 +195,10 @@ class StateModuleTest(integration.ModuleCase,
|
|||
ret = self.run_function('state.sls', mods='testappend.step-1')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self.assertMultiLineEqual(
|
||||
contents, salt.utils.fopen(testfile, 'r').read())
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
testfile_contents = fp_.read()
|
||||
|
||||
self.assertMultiLineEqual(contents, testfile_contents)
|
||||
|
||||
def test_issue_1876_syntax_error(self):
|
||||
'''
|
||||
|
@ -218,7 +223,7 @@ class StateModuleTest(integration.ModuleCase,
|
|||
)
|
||||
|
||||
def test_issue_1879_too_simple_contains_check(self):
|
||||
contents = textwrap.dedent('''\
|
||||
expected = textwrap.dedent('''\
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
|
||||
debian_chroot=$(cat /etc/debian_chroot)
|
||||
|
@ -257,10 +262,9 @@ class StateModuleTest(integration.ModuleCase,
|
|||
|
||||
# Does it match?
|
||||
try:
|
||||
self.assertMultiLineEqual(
|
||||
contents,
|
||||
salt.utils.fopen(testfile, 'r').read()
|
||||
)
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(expected, contents)
|
||||
# Make sure we don't re-append existing text
|
||||
ret = self.run_function(
|
||||
'state.sls', mods='issue-1879.step-1', timeout=120
|
||||
|
@ -271,10 +275,10 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'state.sls', mods='issue-1879.step-2', timeout=120
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertMultiLineEqual(
|
||||
contents,
|
||||
salt.utils.fopen(testfile, 'r').read()
|
||||
)
|
||||
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(expected, contents)
|
||||
except Exception:
|
||||
if os.path.exists(testfile):
|
||||
shutil.copy(testfile, testfile + '.bak')
|
||||
|
@ -346,7 +350,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'files', 'file', 'base', 'issue-2068-template-str-no-dot.sls'
|
||||
)
|
||||
|
||||
template = salt.utils.fopen(template_path, 'r').read()
|
||||
with salt.utils.fopen(template_path, 'r') as fp_:
|
||||
template = fp_.read()
|
||||
try:
|
||||
ret = self.run_function(
|
||||
'state.template_str', [template], timeout=120
|
||||
|
@ -387,7 +392,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'files', 'file', 'base', 'issue-2068-template-str.sls'
|
||||
)
|
||||
|
||||
template = salt.utils.fopen(template_path, 'r').read()
|
||||
with salt.utils.fopen(template_path, 'r') as fp_:
|
||||
template = fp_.read()
|
||||
try:
|
||||
ret = self.run_function(
|
||||
'state.template_str', [template], timeout=120
|
||||
|
|
|
@ -56,7 +56,8 @@ class EnabledTest(integration.ModuleCase):
|
|||
ret_key = 'test_|-shell_enabled_|-{0}_|-configurable_test_state'.format(enabled_ret)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{{% set shell_enabled = salt['cmd.run']("{0}").strip() %}}
|
||||
|
||||
shell_enabled:
|
||||
|
@ -83,7 +84,8 @@ class EnabledTest(integration.ModuleCase):
|
|||
ret_key = 'test_|-shell_enabled_|-{0}_|-configurable_test_state'.format(disabled_ret)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{{% set shell_disabled = salt['cmd.run']("{0}", python_shell=False) %}}
|
||||
|
||||
shell_enabled:
|
||||
|
|
|
@ -63,7 +63,8 @@ class CMDRunRedirectTest(integration.ModuleCase,
|
|||
test cmd.run unless
|
||||
'''
|
||||
state_key = 'cmd_|-/var/log/messages_|-/var/log/messages_|-run'
|
||||
salt.utils.fopen(self.state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(self.state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
/var/log/messages:
|
||||
cmd.run:
|
||||
- unless: echo cheese > {0}
|
||||
|
@ -77,7 +78,8 @@ class CMDRunRedirectTest(integration.ModuleCase,
|
|||
test cmd.run creates already there
|
||||
'''
|
||||
state_key = 'cmd_|-touch {0}_|-touch {0}_|-run'.format(self.test_file)
|
||||
salt.utils.fopen(self.state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(self.state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
touch {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
|
@ -93,7 +95,8 @@ class CMDRunRedirectTest(integration.ModuleCase,
|
|||
'''
|
||||
os.remove(self.test_file)
|
||||
state_key = 'cmd_|-touch {0}_|-touch {0}_|-run'.format(self.test_file)
|
||||
salt.utils.fopen(self.state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(self.state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
touch {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
|
@ -108,7 +111,8 @@ class CMDRunRedirectTest(integration.ModuleCase,
|
|||
test cmd.run with shell redirect
|
||||
'''
|
||||
state_key = 'cmd_|-date > {0}_|-date > {0}_|-run'.format(self.test_file)
|
||||
salt.utils.fopen(self.state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(self.state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
date > {0}:
|
||||
cmd.run
|
||||
'''.format(self.test_file)))
|
||||
|
@ -139,7 +143,8 @@ class CMDRunWatchTest(integration.ModuleCase,
|
|||
saltines_key = 'cmd_|-saltines_|-echo_|-run'
|
||||
biscuits_key = 'cmd_|-biscuits_|-echo hello_|-wait'
|
||||
|
||||
salt.utils.fopen(self.state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(self.state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
saltines:
|
||||
cmd.run:
|
||||
- name: echo
|
||||
|
|
|
@ -407,8 +407,10 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
state_key = 'file_|-{0}_|-{0}_|-managed'.format(funny_file)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(funny_url_path, 'w').close()
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(funny_url_path, 'w'):
|
||||
pass
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{0}:
|
||||
file.managed:
|
||||
- source: {1}
|
||||
|
@ -438,7 +440,8 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
managed_files[typ] = tempfile.mkstemp()[1]
|
||||
state_keys[typ] = 'file_|-{0} file_|-{1}_|-managed'.format(typ, managed_files[typ])
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
bool file:
|
||||
file.managed:
|
||||
- name: {bool}
|
||||
|
@ -550,13 +553,15 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
salt.utils.fopen(os.path.join(straydir, 'strayfile2'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(straydir, 'strayfile2'), 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory', name=name, clean=True)
|
||||
try:
|
||||
|
@ -576,17 +581,20 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
strayfile2 = os.path.join(straydir, 'strayfile2')
|
||||
salt.utils.fopen(strayfile2, 'w').close()
|
||||
with salt.utils.fopen(strayfile2, 'w'):
|
||||
pass
|
||||
|
||||
keepfile = os.path.join(straydir, 'keepfile')
|
||||
salt.utils.fopen(keepfile, 'w').close()
|
||||
with salt.utils.fopen(keepfile, 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory',
|
||||
name=name,
|
||||
|
@ -610,17 +618,20 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
strayfile2 = os.path.join(straydir, 'strayfile2')
|
||||
salt.utils.fopen(strayfile2, 'w').close()
|
||||
with salt.utils.fopen(strayfile2, 'w'):
|
||||
pass
|
||||
|
||||
keepfile = os.path.join(straydir, 'keepfile')
|
||||
salt.utils.fopen(keepfile, 'w').close()
|
||||
with salt.utils.fopen(keepfile, 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory',
|
||||
test=True,
|
||||
|
@ -853,10 +864,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
if not os.path.isdir(name):
|
||||
os.makedirs(name)
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
# Corner cases: replacing file with a directory and vice versa
|
||||
salt.utils.fopen(os.path.join(name, '36'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(name, '36'), 'w'):
|
||||
pass
|
||||
os.makedirs(os.path.join(name, 'scene33'))
|
||||
ret = self.run_state(
|
||||
'file.recurse', name=name, source='salt://grail', clean=True)
|
||||
|
@ -876,10 +889,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
if not os.path.isdir(name):
|
||||
os.makedirs(name)
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
# Corner cases: replacing file with a directory and vice versa
|
||||
salt.utils.fopen(os.path.join(name, '32'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(name, '32'), 'w'):
|
||||
pass
|
||||
os.makedirs(os.path.join(name, 'scene34'))
|
||||
ret = self.run_state('file.recurse',
|
||||
name=name,
|
||||
|
@ -894,6 +909,36 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
|
||||
def test_recurse_issue_34945(self):
|
||||
'''
|
||||
This tests the case where the source dir for the file.recurse state
|
||||
does not contain any files (only subdirectories), and the dir_mode is
|
||||
being managed. For a long time, this corner case resulted in the top
|
||||
level of the destination directory being created with the wrong initial
|
||||
permissions, a problem that would be corrected later on in the
|
||||
file.recurse state via running state.directory. However, the
|
||||
file.directory state only gets called when there are files to be
|
||||
managed in that directory, and when the source directory contains only
|
||||
subdirectories, the incorrectly-set initial perms would not be
|
||||
repaired.
|
||||
|
||||
This was fixed in https://github.com/saltstack/salt/pull/35309
|
||||
'''
|
||||
dir_mode = '2775'
|
||||
issue_dir = 'issue-34945'
|
||||
name = os.path.join(integration.TMP, issue_dir)
|
||||
|
||||
try:
|
||||
ret = self.run_state('file.recurse',
|
||||
name=name,
|
||||
source='salt://' + issue_dir,
|
||||
dir_mode=dir_mode)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
actual_dir_mode = oct(stat.S_IMODE(os.stat(name).st_mode))[-4:]
|
||||
self.assertEqual(dir_mode, actual_dir_mode)
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
|
||||
def test_replace(self):
|
||||
'''
|
||||
file.replace
|
||||
|
@ -1573,7 +1618,8 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
# Get a path to the temporary file
|
||||
tmp_file = os.path.join(integration.TMP, 'issue-2041-comment.txt')
|
||||
# Write some data to it
|
||||
salt.utils.fopen(tmp_file, 'w').write('hello\nworld\n')
|
||||
with salt.utils.fopen(tmp_file, 'w') as fp_:
|
||||
fp_.write('hello\nworld\n')
|
||||
# create the sls template
|
||||
template_lines = [
|
||||
'{0}:'.format(tmp_file),
|
||||
|
@ -1607,11 +1653,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
# Get a path to the temporary file
|
||||
tmp_file = os.path.join(integration.TMP, 'issue-2379-file-append.txt')
|
||||
# Write some data to it
|
||||
salt.utils.fopen(tmp_file, 'w').write(
|
||||
'hello\nworld\n' + # Some junk
|
||||
'#PermitRootLogin yes\n' + # Commented text
|
||||
'# PermitRootLogin yes\n' # Commented text with space
|
||||
)
|
||||
with salt.utils.fopen(tmp_file, 'w') as fp_:
|
||||
fp_.write(
|
||||
'hello\nworld\n' # Some junk
|
||||
'#PermitRootLogin yes\n' # Commented text
|
||||
'# PermitRootLogin yes\n' # Commented text with space
|
||||
)
|
||||
# create the sls template
|
||||
template_lines = [
|
||||
'{0}:'.format(tmp_file),
|
||||
|
|
|
@ -35,12 +35,13 @@ class StateMatchTest(integration.ModuleCase):
|
|||
top_filename = 'issue-2167-ipcidr-match.sls'
|
||||
top_file = os.path.join(STATE_DIR, top_filename)
|
||||
try:
|
||||
salt.utils.fopen(top_file, 'w').write(
|
||||
'base:\n'
|
||||
' {0}:\n'
|
||||
' - match: ipcidr\n'
|
||||
' - test\n'.format(subnets[0])
|
||||
)
|
||||
with salt.utils.fopen(top_file, 'w') as fp_:
|
||||
fp_.write(
|
||||
'base:\n'
|
||||
' {0}:\n'
|
||||
' - match: ipcidr\n'
|
||||
' - test\n'.format(subnets[0])
|
||||
)
|
||||
ret = self.run_function('state.top', [top_filename])
|
||||
self.assertNotIn(
|
||||
'AttributeError: \'Matcher\' object has no attribute '
|
||||
|
|
|
@ -69,10 +69,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
def test_proper_path_joining(self):
|
||||
fpath = tempfile.mktemp()
|
||||
try:
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
"root_dir: /\n"
|
||||
"key_logfile: key\n"
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: /\n'
|
||||
'key_logfile: key\n'
|
||||
)
|
||||
config = sconfig.master_config(fpath)
|
||||
# os.path.join behavior
|
||||
self.assertEqual(config['key_logfile'], os.path.join('/', 'key'))
|
||||
|
@ -88,10 +89,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
config = sconfig.master_config(fpath)
|
||||
self.assertEqual(config['log_file'], fpath)
|
||||
finally:
|
||||
|
@ -107,10 +109,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_MASTER_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file.
|
||||
|
@ -122,10 +125,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -149,10 +153,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_MINION_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -164,10 +169,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -193,19 +199,21 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# configuration settings using the provided client configuration
|
||||
# file
|
||||
master_config = os.path.join(env_root_dir, 'master')
|
||||
salt.utils.fopen(master_config, 'w').write(
|
||||
'blah: true\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, master_config)
|
||||
)
|
||||
with salt.utils.fopen(master_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: true\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, master_config)
|
||||
)
|
||||
os.environ['SALT_MASTER_CONFIG'] = master_config
|
||||
|
||||
# Now the client configuration file
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_CLIENT_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -218,10 +226,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -245,11 +254,12 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
|
||||
# Let's populate a minion configuration file with some basic
|
||||
# settings
|
||||
salt.utils.fopen(minion_config, 'w').write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, minion_config)
|
||||
)
|
||||
with salt.utils.fopen(minion_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, minion_config)
|
||||
)
|
||||
|
||||
# Now, let's populate an extra configuration file under minion.d
|
||||
# Notice that above we've set blah as False and below as True.
|
||||
|
@ -257,9 +267,8 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# file so overrides can happen, the final value of blah should be
|
||||
# True.
|
||||
extra_config = os.path.join(minion_confd, 'extra.conf')
|
||||
salt.utils.fopen(extra_config, 'w').write(
|
||||
'blah: true\n'
|
||||
)
|
||||
with salt.utils.fopen(extra_config, 'w') as fp_:
|
||||
fp_.write('blah: true\n')
|
||||
|
||||
# Let's load the configuration
|
||||
config = sconfig.minion_config(minion_config)
|
||||
|
@ -280,11 +289,12 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
|
||||
# Let's populate a master configuration file with some basic
|
||||
# settings
|
||||
salt.utils.fopen(master_config, 'w').write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, master_config)
|
||||
)
|
||||
with salt.utils.fopen(master_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, master_config)
|
||||
)
|
||||
|
||||
# Now, let's populate an extra configuration file under master.d
|
||||
# Notice that above we've set blah as False and below as True.
|
||||
|
@ -292,9 +302,8 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# file so overrides can happen, the final value of blah should be
|
||||
# True.
|
||||
extra_config = os.path.join(master_confd, 'extra.conf')
|
||||
salt.utils.fopen(extra_config, 'w').write(
|
||||
'blah: true\n'
|
||||
)
|
||||
with salt.utils.fopen(extra_config, 'w') as fp_:
|
||||
fp_.write('blah: true\n')
|
||||
|
||||
# Let's load the configuration
|
||||
config = sconfig.master_config(master_config)
|
||||
|
@ -791,10 +800,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_CLOUD_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -806,10 +816,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
|
|
@ -64,8 +64,7 @@ if NO_MOCK is False:
|
|||
call.write('::1 localhost'),
|
||||
call.write('\n'),
|
||||
call.write('fe80::1%lo0 localhost'),
|
||||
call.write('\n'),
|
||||
call.close()]
|
||||
call.write('\n')]
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
|
|
|
@ -5,19 +5,26 @@
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import Salt Testing libs
|
||||
from integration import TMP_CONF_DIR
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
from salttesting.mock import (
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
patch
|
||||
)
|
||||
|
||||
|
||||
ensure_in_syspath('../')
|
||||
|
||||
# Import Salt libs
|
||||
from salt import state
|
||||
import salt.state
|
||||
import salt.exceptions
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
|
@ -38,7 +45,21 @@ class StateCompilerTestCase(TestCase):
|
|||
ret = {'changes': {u'Français': {'old': 'something old',
|
||||
'new': 'something new'}},
|
||||
'result': True}
|
||||
state.format_log(ret)
|
||||
salt.state.format_log(ret)
|
||||
|
||||
@skipIf(sys.version_info < (2, 7), 'Context manager in assertEquals only available in > Py2.7')
|
||||
@patch('salt.state.State._gather_pillar')
|
||||
def test_render_error_on_invalid_requisite(self, state_patch):
|
||||
'''
|
||||
Test that the state compiler correctly deliver a rendering
|
||||
exception when a requisite cannot be resolved
|
||||
'''
|
||||
high_data = {'git': OrderedDict([('pkg', [OrderedDict([('require', [OrderedDict([('file', OrderedDict([('test1', 'test')]))])])]), 'installed', {'order': 10000}]), ('__sls__', u'issue_35226'), ('__env__', 'base')])}
|
||||
minion_opts = salt.config.minion_config(os.path.join(TMP_CONF_DIR, 'minion'))
|
||||
minion_opts['pillar'] = {'git': OrderedDict([('test1', 'test')])}
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with self.assertRaises(salt.exceptions.SaltRenderError):
|
||||
state_obj.call_high(high_data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -172,8 +172,8 @@ class TestGetTemplate(TestCase):
|
|||
fn_ = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple')
|
||||
with salt.utils.fopen(fn_) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
self.assertEqual(out, 'world\n')
|
||||
|
||||
def test_fallback_noloader(self):
|
||||
|
@ -182,8 +182,9 @@ class TestGetTemplate(TestCase):
|
|||
if the file is not contained in the searchpath
|
||||
'''
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
self.assertEqual(out, 'Hey world !a b !\n')
|
||||
|
||||
|
@ -199,8 +200,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
@ -226,12 +228,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_macro_additional_log_for_undefined(self):
|
||||
|
@ -251,12 +254,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_macro_additional_log_syntaxerror(self):
|
||||
|
@ -276,12 +280,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_non_ascii_encoding(self):
|
||||
|
@ -290,8 +295,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
@ -303,8 +309,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'non_ascii')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
|
|
@ -306,9 +306,8 @@ class TestGrepOption(TestCase):
|
|||
|
||||
def test_grep_option_match_regular_file(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
option = salt.utils.find.GrepOption('grep', 'foo')
|
||||
self.assertEqual(
|
||||
option.match(self.tmpdir, 'hello.txt', os.stat(hello_file)),
|
||||
|
@ -366,9 +365,8 @@ class TestPrintOption(TestCase):
|
|||
|
||||
def test_print_option_execute(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
|
||||
option = salt.utils.find.PrintOption('print', '')
|
||||
self.assertEqual(option.execute('', [0] * 9), '')
|
||||
|
@ -556,9 +554,8 @@ class TestFinder(TestCase):
|
|||
|
||||
def test_find(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
|
||||
finder = salt.utils.find.Finder({})
|
||||
self.assertEqual(list(finder.find(self.tmpdir)), [self.tmpdir, hello_file])
|
||||
|
|
|
@ -21,6 +21,12 @@ from salttesting.helpers import (
|
|||
requires_network,
|
||||
TestsLoggingHandler
|
||||
)
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
|
@ -32,7 +38,9 @@ from salt.utils.verify import (
|
|||
verify_socket,
|
||||
zmq_version,
|
||||
check_max_open_files,
|
||||
valid_id
|
||||
valid_id,
|
||||
log,
|
||||
verify_log,
|
||||
)
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -114,7 +122,6 @@ class TestVerify(TestCase):
|
|||
|
||||
@skipIf(True, 'Skipping until we can find why Jenkins is bailing out')
|
||||
def test_max_open_files(self):
|
||||
|
||||
with TestsLoggingHandler() as handler:
|
||||
logmsg_dbg = (
|
||||
'DEBUG:This salt-master instance has accepted {0} minion keys.'
|
||||
|
@ -215,6 +222,28 @@ class TestVerify(TestCase):
|
|||
shutil.rmtree(tempdir)
|
||||
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h))
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_verify_log(self):
|
||||
'''
|
||||
Test that verify_log works as expected
|
||||
'''
|
||||
message = 'Insecure logging configuration detected! Sensitive data may be logged.'
|
||||
|
||||
mock_cheese = MagicMock()
|
||||
with patch.object(log, 'warn', mock_cheese):
|
||||
verify_log({'log_level': 'cheeseshop'})
|
||||
mock_cheese.assert_called_once_with(message)
|
||||
|
||||
mock_trace = MagicMock()
|
||||
with patch.object(log, 'warn', mock_trace):
|
||||
verify_log({'log_level': 'trace'})
|
||||
mock_trace.assert_called_once_with(message)
|
||||
|
||||
mock_info = MagicMock()
|
||||
with patch.object(log, 'warn', mock_info):
|
||||
verify_log({'log_level': 'info'})
|
||||
mock_info.assert_not_called()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
|
|
@ -23,7 +23,8 @@ from salttesting.helpers import ensure_in_syspath
|
|||
ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils import fopen, is_darwin, vt
|
||||
import salt.utils
|
||||
import salt.utils.vt
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
@ -37,7 +38,7 @@ class VTTestCase(TestCase):
|
|||
if not sys.stdin.isatty():
|
||||
self.skipTest('Not attached to a TTY. The test would fail.')
|
||||
cols = random.choice(range(80, 250))
|
||||
terminal = vt.Terminal(
|
||||
terminal = salt.utils.vt.Terminal(
|
||||
'echo "Foo!"',
|
||||
shell=True,
|
||||
cols=cols,
|
||||
|
@ -61,7 +62,7 @@ class VTTestCase(TestCase):
|
|||
# Get current number of PTY's
|
||||
try:
|
||||
if os.path.exists('/proc/sys/kernel/pty/nr'):
|
||||
with fopen('/proc/sys/kernel/pty/nr') as fh_:
|
||||
with salt.utils.fopen('/proc/sys/kernel/pty/nr') as fh_:
|
||||
return int(fh_.read().strip())
|
||||
|
||||
proc = subprocess.Popen(
|
||||
|
@ -72,7 +73,7 @@ class VTTestCase(TestCase):
|
|||
stdout, _ = proc.communicate()
|
||||
return int(stdout.strip())
|
||||
except (ValueError, OSError, IOError):
|
||||
if is_darwin():
|
||||
if salt.utils.is_darwin():
|
||||
# We're unable to findout how many PTY's are open
|
||||
self.skipTest(
|
||||
'Unable to find out how many PTY\'s are open on Darwin - '
|
||||
|
@ -85,7 +86,7 @@ class VTTestCase(TestCase):
|
|||
# Using context manager's
|
||||
for idx in range(0, nr_ptys + n_executions):
|
||||
try:
|
||||
with vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
with salt.utils.vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False) as terminal:
|
||||
|
@ -105,7 +106,7 @@ class VTTestCase(TestCase):
|
|||
# Not using context manager's
|
||||
for idx in range(0, nr_ptys + n_executions):
|
||||
try:
|
||||
terminal = vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
terminal = salt.utils.vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
|
@ -125,7 +126,10 @@ class VTTestCase(TestCase):
|
|||
@skipIf(True, 'Disabled until we can figure out how to make this more reliable.')
|
||||
def test_isalive_while_theres_data_to_read(self):
|
||||
expected_data = 'Alive!\n'
|
||||
term = vt.Terminal('echo "Alive!"', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!"',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -150,7 +154,10 @@ class VTTestCase(TestCase):
|
|||
term.close(terminate=True, kill=True)
|
||||
|
||||
expected_data = 'Alive!\n'
|
||||
term = vt.Terminal('echo "Alive!" 1>&2', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!" 1>&2',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -175,7 +182,10 @@ class VTTestCase(TestCase):
|
|||
term.close(terminate=True, kill=True)
|
||||
|
||||
expected_data = 'Alive!\nAlive!\n'
|
||||
term = vt.Terminal('echo "Alive!"; sleep 5; echo "Alive!"', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!"; sleep 5; echo "Alive!"',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
|
Loading…
Add table
Reference in a new issue