mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2015.8' into '2016.3'
Conflicts: - salt/grains/core.py - salt/modules/file.py - salt/modules/hosts.py - salt/modules/inspectlib/collector.py - salt/modules/portage_config.py - salt/modules/x509.py - salt/scripts.py - tests/integration/shell/enabled.py
This commit is contained in:
commit
cf038ee3fe
52 changed files with 711 additions and 584 deletions
|
@ -2,10 +2,6 @@
|
|||
FreeBSD
|
||||
=======
|
||||
|
||||
Salt was added to the FreeBSD ports tree Dec 26th, 2011 by Christer Edwards
|
||||
<christer.edwards@gmail.com>. It has been tested on FreeBSD 7.4, 8.2, 9.0,
|
||||
9.1, 10.0 and later releases.
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
|
@ -40,9 +36,7 @@ following file to your system:
|
|||
|
||||
saltstack: {
|
||||
url: "https://repo.saltstack.com/freebsd/${ABI}/",
|
||||
mirror_type: "http",
|
||||
enabled: yes
|
||||
priority: 10
|
||||
}
|
||||
|
||||
You should now be able to install Salt from this new repository:
|
||||
|
|
|
@ -230,7 +230,8 @@ class MasterKeys(dict):
|
|||
self.sig_path = os.path.join(self.opts['pki_dir'],
|
||||
opts['master_pubkey_signature'])
|
||||
if os.path.isfile(self.sig_path):
|
||||
self.pub_signature = salt.utils.fopen(self.sig_path).read()
|
||||
with salt.utils.fopen(self.sig_path) as fp_:
|
||||
self.pub_signature = fp_.read()
|
||||
log.info('Read {0}\'s signature from {1}'
|
||||
''.format(os.path.basename(self.pub_path),
|
||||
self.opts['master_pubkey_signature']))
|
||||
|
|
|
@ -689,6 +689,8 @@ class Client(object):
|
|||
result.append(chunk)
|
||||
else:
|
||||
dest_tmp = "{0}.part".format(dest)
|
||||
# We need an open filehandle to use in the on_chunk callback,
|
||||
# that's why we're not using a with clause here.
|
||||
destfp = salt.utils.fopen(dest_tmp, 'wb')
|
||||
|
||||
def on_chunk(chunk):
|
||||
|
@ -1142,6 +1144,8 @@ class RemoteClient(Client):
|
|||
os.makedirs(destdir)
|
||||
else:
|
||||
return False
|
||||
# We need an open filehandle here, that's why we're not using a
|
||||
# with clause:
|
||||
fn_ = salt.utils.fopen(dest, 'wb+')
|
||||
else:
|
||||
log.debug('No dest file found {0}'.format(dest))
|
||||
|
|
|
@ -74,7 +74,8 @@ def recv(files, dest):
|
|||
return 'Destination unavailable'
|
||||
|
||||
try:
|
||||
salt.utils.fopen(final, 'w+').write(data)
|
||||
with salt.utils.fopen(final, 'w+') as fp_:
|
||||
fp_.write(data)
|
||||
ret[final] = True
|
||||
except IOError:
|
||||
ret[final] = False
|
||||
|
|
|
@ -52,6 +52,7 @@ def load():
|
|||
|
||||
try:
|
||||
datastore_path = os.path.join(__opts__['cachedir'], 'datastore')
|
||||
# serial.load() will close the filehandle, no need for a "with" block
|
||||
fn_ = salt.utils.fopen(datastore_path, 'rb')
|
||||
return serial.load(fn_)
|
||||
except (IOError, OSError, NameError):
|
||||
|
|
|
@ -99,20 +99,18 @@ def hosts_remove(hostsfile='/etc/hosts', entries=None):
|
|||
hosts = fp_.read()
|
||||
|
||||
host_list = entries.split(',')
|
||||
out_file = salt.utils.fopen(hostsfile, 'w')
|
||||
for line in hosts.splitlines():
|
||||
if not line or line.strip().startswith('#'):
|
||||
out_file.write('{0}\n'.format(line))
|
||||
continue
|
||||
comps = line.split()
|
||||
for host in host_list:
|
||||
if host in comps[1:]:
|
||||
comps.remove(host)
|
||||
if len(comps) > 1:
|
||||
out_file.write(' '.join(comps))
|
||||
out_file.write('\n')
|
||||
|
||||
out_file.close()
|
||||
with salt.utils.fopen(hostsfile, 'w') as out_file:
|
||||
for line in hosts.splitlines():
|
||||
if not line or line.strip().startswith('#'):
|
||||
out_file.write('{0}\n'.format(line))
|
||||
continue
|
||||
comps = line.split()
|
||||
for host in host_list:
|
||||
if host in comps[1:]:
|
||||
comps.remove(host)
|
||||
if len(comps) > 1:
|
||||
out_file.write(' '.join(comps))
|
||||
out_file.write('\n')
|
||||
|
||||
|
||||
def parse_zone(zonefile=None, zone=None):
|
||||
|
|
|
@ -3287,7 +3287,8 @@ def export(name,
|
|||
if compression != 'gzip':
|
||||
# gzip doesn't use a Compressor object, it uses a .open() method to
|
||||
# open the filehandle. If not using gzip, we need to open the
|
||||
# filehandle here.
|
||||
# filehandle here. We make sure to close it in the "finally" block
|
||||
# below.
|
||||
out = salt.utils.fopen(path, 'wb')
|
||||
response = _client_wrapper('export', name)
|
||||
buf = None
|
||||
|
|
|
@ -315,9 +315,10 @@ def _get_pkg_license(pkg):
|
|||
licenses = set()
|
||||
cpr = "/usr/share/doc/{0}/copyright".format(pkg)
|
||||
if os.path.exists(cpr):
|
||||
for line in open(cpr).read().split(os.linesep):
|
||||
if line.startswith("License:"):
|
||||
licenses.add(line.split(":", 1)[1].strip())
|
||||
with salt.utils.fopen(cpr) as fp_:
|
||||
for line in fp_.read().split(os.linesep):
|
||||
if line.startswith("License:"):
|
||||
licenses.add(line.split(":", 1)[1].strip())
|
||||
|
||||
return ", ".join(sorted(licenses))
|
||||
|
||||
|
@ -352,17 +353,18 @@ def _get_pkg_ds_avail():
|
|||
ret = dict()
|
||||
pkg_mrk = "Package:"
|
||||
pkg_name = "package"
|
||||
for pkg_info in open(avail).read().split(pkg_mrk):
|
||||
nfo = dict()
|
||||
for line in (pkg_mrk + pkg_info).split(os.linesep):
|
||||
line = line.split(": ", 1)
|
||||
if len(line) != 2:
|
||||
continue
|
||||
key, value = line
|
||||
if value.strip():
|
||||
nfo[key.lower()] = value
|
||||
if nfo.get(pkg_name):
|
||||
ret[nfo[pkg_name]] = nfo
|
||||
with salt.utils.fopen(avail) as fp_:
|
||||
for pkg_info in fp_.read().split(pkg_mrk):
|
||||
nfo = dict()
|
||||
for line in (pkg_mrk + pkg_info).split(os.linesep):
|
||||
line = line.split(": ", 1)
|
||||
if len(line) != 2:
|
||||
continue
|
||||
key, value = line
|
||||
if value.strip():
|
||||
nfo[key.lower()] = value
|
||||
if nfo.get(pkg_name):
|
||||
ret[nfo[pkg_name]] = nfo
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -1499,7 +1499,8 @@ def line(path, content, match=None, mode=None, location=None,
|
|||
if before is None and after is None and not match:
|
||||
match = content
|
||||
|
||||
body = salt.utils.fopen(path, mode='r').read()
|
||||
with salt.utils.fopen(path, mode='r') as fp_:
|
||||
body = fp_.read()
|
||||
body_before = hashlib.sha256(salt.utils.to_bytes(body)).hexdigest()
|
||||
after = _regex_to_static(body, after)
|
||||
before = _regex_to_static(body, before)
|
||||
|
@ -1640,7 +1641,9 @@ def line(path, content, match=None, mode=None, location=None,
|
|||
|
||||
if changed:
|
||||
if show_changes:
|
||||
changes_diff = ''.join(difflib.unified_diff(salt.utils.fopen(path, 'r').read().splitlines(), body.splitlines()))
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
path_content = fp_.read().splitlines()
|
||||
changes_diff = ''.join(difflib.unified_diff(path_content, body.splitlines()))
|
||||
if __opts__['test'] is False:
|
||||
fh_ = None
|
||||
try:
|
||||
|
|
|
@ -37,7 +37,8 @@ def _get_or_create_hostfile():
|
|||
if hfn is None:
|
||||
hfn = ''
|
||||
if not os.path.exists(hfn):
|
||||
salt.utils.fopen(hfn, 'w').close()
|
||||
with salt.utils.fopen(hfn, 'w'):
|
||||
pass
|
||||
return hfn
|
||||
|
||||
|
||||
|
@ -164,7 +165,8 @@ def set_host(ip, alias):
|
|||
if not alias.strip():
|
||||
line_to_add = ''
|
||||
|
||||
lines = salt.utils.fopen(hfn).readlines()
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind, line in enumerate(lines):
|
||||
tmpline = line.strip()
|
||||
if not tmpline:
|
||||
|
@ -202,7 +204,8 @@ def rm_host(ip, alias):
|
|||
if not has_pair(ip, alias):
|
||||
return True
|
||||
hfn = _get_or_create_hostfile()
|
||||
lines = salt.utils.fopen(hfn).readlines()
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind in range(len(lines)):
|
||||
tmpline = lines[ind].strip()
|
||||
if not tmpline:
|
||||
|
|
|
@ -120,9 +120,8 @@ def _write_file(folder, filename, data):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
fout = salt.utils.fopen(path, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(path, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
return 0
|
||||
|
||||
|
|
|
@ -445,7 +445,8 @@ def is_alive(pidfile):
|
|||
'''
|
||||
# Just silencing os.kill exception if no such PID, therefore try/pass.
|
||||
try:
|
||||
os.kill(int(open(pidfile).read().strip()), 0)
|
||||
with salt.utils.fopen(pidfile) as fp_:
|
||||
os.kill(int(fp_.read().strip()), 0)
|
||||
sys.exit(1)
|
||||
except Exception as ex:
|
||||
pass
|
||||
|
@ -483,9 +484,8 @@ if __name__ == '__main__':
|
|||
pid = os.fork()
|
||||
if pid > 0:
|
||||
reinit_crypto()
|
||||
fpid = open(os.path.join(pidfile, EnvLoader.PID_FILE), "w")
|
||||
fpid.write("{0}\n".format(pid))
|
||||
fpid.close()
|
||||
with salt.utils.fopen(os.path.join(pidfile, EnvLoader.PID_FILE), 'w') as fp_:
|
||||
fp_.write('{0}\n'.format(pid))
|
||||
sys.exit(0)
|
||||
except OSError as ex:
|
||||
sys.exit(1)
|
||||
|
|
|
@ -41,7 +41,8 @@ def _check_systemd_salt_config():
|
|||
sysctl_dir = os.path.split(conf)[0]
|
||||
if not os.path.exists(sysctl_dir):
|
||||
os.makedirs(sysctl_dir)
|
||||
salt.utils.fopen(conf, 'w').close()
|
||||
with salt.utils.fopen(conf, 'w'):
|
||||
pass
|
||||
return conf
|
||||
|
||||
|
||||
|
@ -80,16 +81,17 @@ def show(config_file=False):
|
|||
ret = {}
|
||||
if config_file:
|
||||
try:
|
||||
for line in salt.utils.fopen(config_file):
|
||||
if not line.startswith('#') and '=' in line:
|
||||
# search if we have some '=' instead of ' = ' separators
|
||||
SPLIT = ' = '
|
||||
if SPLIT not in line:
|
||||
SPLIT = SPLIT.strip()
|
||||
key, value = line.split(SPLIT, 1)
|
||||
key = key.strip()
|
||||
value = value.lstrip()
|
||||
ret[key] = value
|
||||
with salt.utils.fopen(config_file) as fp_:
|
||||
for line in fp_:
|
||||
if not line.startswith('#') and '=' in line:
|
||||
# search if we have some '=' instead of ' = ' separators
|
||||
SPLIT = ' = '
|
||||
if SPLIT not in line:
|
||||
SPLIT = SPLIT.strip()
|
||||
key, value = line.split(SPLIT, 1)
|
||||
key = key.strip()
|
||||
value = value.lstrip()
|
||||
ret[key] = value
|
||||
except (OSError, IOError):
|
||||
log.error('Could not open sysctl file')
|
||||
return None
|
||||
|
|
|
@ -117,7 +117,8 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
# create /etc/sysctl.conf if not present
|
||||
if not os.path.isfile(config):
|
||||
try:
|
||||
salt.utils.fopen(config, 'w+').close()
|
||||
with salt.utils.fopen(config, 'w+'):
|
||||
pass
|
||||
except (IOError, OSError):
|
||||
msg = 'Could not create {0}'
|
||||
raise CommandExecutionError(msg.format(config))
|
||||
|
|
|
@ -1007,7 +1007,8 @@ def mod_hostname(hostname):
|
|||
|
||||
# Modify the /etc/hosts file to replace the old hostname with the
|
||||
# new hostname
|
||||
host_c = salt.utils.fopen('/etc/hosts', 'r').readlines()
|
||||
with salt.utils.fopen('/etc/hosts', 'r') as fp_:
|
||||
host_c = fp_.readlines()
|
||||
|
||||
with salt.utils.fopen('/etc/hosts', 'w') as fh_:
|
||||
for host in host_c:
|
||||
|
@ -1026,7 +1027,8 @@ def mod_hostname(hostname):
|
|||
# Modify the /etc/sysconfig/network configuration file to set the
|
||||
# new hostname
|
||||
if __grains__['os_family'] == 'RedHat':
|
||||
network_c = salt.utils.fopen('/etc/sysconfig/network', 'r').readlines()
|
||||
with salt.utils.fopen('/etc/sysconfig/network', 'r') as fp_:
|
||||
network_c = fp_.readlines()
|
||||
|
||||
with salt.utils.fopen('/etc/sysconfig/network', 'w') as fh_:
|
||||
for net in network_c:
|
||||
|
|
|
@ -258,7 +258,8 @@ def get_saved_rules(conf_file=None, family='ipv4'):
|
|||
if _conf() and not conf_file:
|
||||
conf_file = _conf()
|
||||
|
||||
lines = salt.utils.fopen(conf_file).readlines()
|
||||
with salt.utils.fopen(conf_file) as fp_:
|
||||
lines = fp_.readlines()
|
||||
rules = []
|
||||
for line in lines:
|
||||
tmpline = line.strip()
|
||||
|
|
|
@ -98,7 +98,8 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
# create /etc/sysctl.conf if not present
|
||||
if not os.path.isfile(config):
|
||||
try:
|
||||
salt.utils.fopen(config, 'w+').close()
|
||||
with salt.utils.fopen(config, 'w+'):
|
||||
pass
|
||||
except (IOError, OSError):
|
||||
msg = 'Could not create {0}'
|
||||
raise CommandExecutionError(msg.format(config))
|
||||
|
|
|
@ -5,6 +5,7 @@ Configure ``portage(5)``
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
|
@ -36,6 +37,8 @@ BASE_PATH = '/etc/portage/package.{0}'
|
|||
SUPPORTED_CONFS = ('accept_keywords', 'env', 'license', 'mask', 'properties',
|
||||
'unmask', 'use')
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
|
@ -216,7 +219,8 @@ def _package_conf_ordering(conf, clean=True, keep_backup=False):
|
|||
backup_files.append(file_path + '.bak')
|
||||
|
||||
if cp[0] == '/' or cp.split('/') > 2:
|
||||
rearrange.extend(list(salt.utils.fopen(file_path)))
|
||||
with salt.utils.fopen(file_path) as fp_:
|
||||
rearrange.extend(fp_.readlines())
|
||||
os.remove(file_path)
|
||||
else:
|
||||
new_contents = ''
|
||||
|
@ -365,43 +369,49 @@ def append_to_package_conf(conf, atom='', flags=None, string='', overwrite=False
|
|||
new_contents = ''
|
||||
added = False
|
||||
|
||||
for l in file_handler:
|
||||
l_strip = l.strip()
|
||||
if l_strip == '':
|
||||
new_contents += '\n'
|
||||
elif l_strip[0] == '#':
|
||||
new_contents += l
|
||||
elif l_strip.split()[0] == atom:
|
||||
if l_strip in to_delete_if_empty:
|
||||
continue
|
||||
if overwrite:
|
||||
new_contents += string.strip() + '\n'
|
||||
added = True
|
||||
else:
|
||||
old_flags = [flag for flag in l_strip.split(' ') if flag][1:]
|
||||
if conf == 'accept_keywords':
|
||||
if not old_flags:
|
||||
new_contents += l
|
||||
if not new_flags:
|
||||
added = True
|
||||
continue
|
||||
elif not new_flags:
|
||||
continue
|
||||
merged_flags = _merge_flags(new_flags, old_flags, conf)
|
||||
if merged_flags:
|
||||
new_contents += '{0} {1}\n'.format(
|
||||
atom, ' '.join(merged_flags))
|
||||
try:
|
||||
for l in file_handler:
|
||||
l_strip = l.strip()
|
||||
if l_strip == '':
|
||||
new_contents += '\n'
|
||||
elif l_strip[0] == '#':
|
||||
new_contents += l
|
||||
elif l_strip.split()[0] == atom:
|
||||
if l_strip in to_delete_if_empty:
|
||||
continue
|
||||
if overwrite:
|
||||
new_contents += string.strip() + '\n'
|
||||
added = True
|
||||
else:
|
||||
new_contents += '{0}\n'.format(atom)
|
||||
added = True
|
||||
else:
|
||||
new_contents += l
|
||||
if not added:
|
||||
new_contents += string.strip() + '\n'
|
||||
file_handler.seek(0)
|
||||
file_handler.truncate(len(new_contents))
|
||||
file_handler.write(new_contents)
|
||||
file_handler.close()
|
||||
old_flags = [flag for flag in l_strip.split(' ') if flag][1:]
|
||||
if conf == 'accept_keywords':
|
||||
if not old_flags:
|
||||
new_contents += l
|
||||
if not new_flags:
|
||||
added = True
|
||||
continue
|
||||
elif not new_flags:
|
||||
continue
|
||||
merged_flags = _merge_flags(new_flags, old_flags, conf)
|
||||
if merged_flags:
|
||||
new_contents += '{0} {1}\n'.format(
|
||||
atom, ' '.join(merged_flags))
|
||||
else:
|
||||
new_contents += '{0}\n'.format(atom)
|
||||
added = True
|
||||
else:
|
||||
new_contents += l
|
||||
if not added:
|
||||
new_contents += string.strip() + '\n'
|
||||
except Exception as exc:
|
||||
log.error('Failed to write to %s: %s', complete_file_path, exc)
|
||||
else:
|
||||
file_handler.seek(0)
|
||||
file_handler.truncate(len(new_contents))
|
||||
file_handler.write(new_contents)
|
||||
finally:
|
||||
file_handler.close()
|
||||
|
||||
try:
|
||||
os.remove(complete_file_path + '.bak')
|
||||
except OSError:
|
||||
|
@ -455,28 +465,28 @@ def get_flags_from_package_conf(conf, atom):
|
|||
|
||||
flags = []
|
||||
try:
|
||||
file_handler = salt.utils.fopen(package_file)
|
||||
with salt.utils.fopen(package_file) as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
found_match = False
|
||||
if has_wildcard:
|
||||
found_match = line_package == atom
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
found_match = match_list.issubset(line_list)
|
||||
|
||||
if found_match:
|
||||
f_tmp = [flag for flag in line.strip().split(' ') if flag][1:]
|
||||
if f_tmp:
|
||||
flags.extend(f_tmp)
|
||||
else:
|
||||
flags.append('~ARCH')
|
||||
|
||||
return _merge_flags(flags)
|
||||
except IOError:
|
||||
return []
|
||||
else:
|
||||
for line in file_handler:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
found_match = False
|
||||
if has_wildcard:
|
||||
found_match = line_package == atom
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
found_match = match_list.issubset(line_list)
|
||||
|
||||
if found_match:
|
||||
f_tmp = [flag for flag in line.strip().split(' ') if flag][1:]
|
||||
if f_tmp:
|
||||
flags.extend(f_tmp)
|
||||
else:
|
||||
flags.append('~ARCH')
|
||||
return _merge_flags(flags)
|
||||
|
||||
|
||||
def has_flag(conf, atom, flag):
|
||||
|
@ -554,22 +564,21 @@ def is_present(conf, atom):
|
|||
match_list = set(_porttree().dbapi.xmatch("match-all", atom))
|
||||
|
||||
try:
|
||||
file_handler = salt.utils.fopen(package_file)
|
||||
with salt.utils.fopen(package_file) as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
if has_wildcard:
|
||||
if line_package == str(atom):
|
||||
return True
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
if match_list.issubset(line_list):
|
||||
return True
|
||||
except IOError:
|
||||
return False
|
||||
else:
|
||||
for line in file_handler:
|
||||
line = line.strip()
|
||||
line_package = line.split()[0]
|
||||
|
||||
if has_wildcard:
|
||||
if line_package == str(atom):
|
||||
return True
|
||||
else:
|
||||
line_list = _porttree().dbapi.xmatch("match-all", line_package)
|
||||
if match_list.issubset(line_list):
|
||||
return True
|
||||
return False
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def get_iuse(cp):
|
||||
|
|
|
@ -877,18 +877,16 @@ def _write_file_iface(iface, data, folder, pattern):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
fout = salt.utils.fopen(filename, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(filename, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
|
||||
def _write_file_network(data, filename):
|
||||
'''
|
||||
Writes a file to disk
|
||||
'''
|
||||
fout = salt.utils.fopen(filename, 'w')
|
||||
fout.write(data)
|
||||
fout.close()
|
||||
with salt.utils.fopen(filename, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
|
||||
def _read_temp(data):
|
||||
|
|
|
@ -221,32 +221,34 @@ def cpustats():
|
|||
'''
|
||||
linux specific implementation of cpustats
|
||||
'''
|
||||
procf = '/proc/stat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == 'cpu':
|
||||
ret[comps[0]] = {'idle': _number(comps[4]),
|
||||
'iowait': _number(comps[5]),
|
||||
'irq': _number(comps[6]),
|
||||
'nice': _number(comps[2]),
|
||||
'softirq': _number(comps[7]),
|
||||
'steal': _number(comps[8]),
|
||||
'system': _number(comps[3]),
|
||||
'user': _number(comps[1])}
|
||||
elif comps[0] == 'intr':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'irqs': [_number(x) for x in comps[2:]]}
|
||||
elif comps[0] == 'softirq':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'softirqs': [_number(x) for x in comps[2:]]}
|
||||
else:
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
try:
|
||||
with salt.utils.fopen('/proc/stat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == 'cpu':
|
||||
ret[comps[0]] = {'idle': _number(comps[4]),
|
||||
'iowait': _number(comps[5]),
|
||||
'irq': _number(comps[6]),
|
||||
'nice': _number(comps[2]),
|
||||
'softirq': _number(comps[7]),
|
||||
'steal': _number(comps[8]),
|
||||
'system': _number(comps[3]),
|
||||
'user': _number(comps[1])}
|
||||
elif comps[0] == 'intr':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'irqs': [_number(x) for x in comps[2:]]}
|
||||
elif comps[0] == 'softirq':
|
||||
ret[comps[0]] = {'total': _number(comps[1]),
|
||||
'softirqs': [_number(x) for x in comps[2:]]}
|
||||
else:
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
return ret
|
||||
|
||||
def freebsd_cpustats():
|
||||
|
@ -309,21 +311,23 @@ def meminfo():
|
|||
'''
|
||||
linux specific implementation of meminfo
|
||||
'''
|
||||
procf = '/proc/meminfo'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
comps[0] = comps[0].replace(':', '')
|
||||
ret[comps[0]] = {
|
||||
'value': comps[1],
|
||||
}
|
||||
if len(comps) > 2:
|
||||
ret[comps[0]]['unit'] = comps[2]
|
||||
try:
|
||||
with salt.utils.fopen('/proc/meminfo', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
comps[0] = comps[0].replace(':', '')
|
||||
ret[comps[0]] = {
|
||||
'value': comps[1],
|
||||
}
|
||||
if len(comps) > 2:
|
||||
ret[comps[0]]['unit'] = comps[2]
|
||||
return ret
|
||||
|
||||
def freebsd_meminfo():
|
||||
|
@ -369,20 +373,22 @@ def cpuinfo():
|
|||
'''
|
||||
linux specific cpuinfo implementation
|
||||
'''
|
||||
procf = '/proc/cpuinfo'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split(':')
|
||||
comps[0] = comps[0].strip()
|
||||
if comps[0] == 'flags':
|
||||
ret[comps[0]] = comps[1].split()
|
||||
else:
|
||||
ret[comps[0]] = comps[1].strip()
|
||||
try:
|
||||
with salt.utils.fopen('/proc/cpuinfo', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split(':')
|
||||
comps[0] = comps[0].strip()
|
||||
if comps[0] == 'flags':
|
||||
ret[comps[0]] = comps[1].split()
|
||||
else:
|
||||
ret[comps[0]] = comps[1].strip()
|
||||
return ret
|
||||
|
||||
def bsd_cpuinfo():
|
||||
|
@ -493,29 +499,33 @@ def diskstats():
|
|||
'''
|
||||
linux specific implementation of diskstats
|
||||
'''
|
||||
procf = '/proc/diskstats'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[2]] = {'major': _number(comps[0]),
|
||||
'minor': _number(comps[1]),
|
||||
'device': _number(comps[2]),
|
||||
'reads_issued': _number(comps[3]),
|
||||
'reads_merged': _number(comps[4]),
|
||||
'sectors_read': _number(comps[5]),
|
||||
'ms_spent_reading': _number(comps[6]),
|
||||
'writes_completed': _number(comps[7]),
|
||||
'writes_merged': _number(comps[8]),
|
||||
'sectors_written': _number(comps[9]),
|
||||
'ms_spent_writing': _number(comps[10]),
|
||||
'io_in_progress': _number(comps[11]),
|
||||
'ms_spent_in_io': _number(comps[12]),
|
||||
'weighted_ms_spent_in_io': _number(comps[13])}
|
||||
try:
|
||||
with salt.utils.fopen('/proc/diskstats', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[2]] = {
|
||||
'major': _number(comps[0]),
|
||||
'minor': _number(comps[1]),
|
||||
'device': _number(comps[2]),
|
||||
'reads_issued': _number(comps[3]),
|
||||
'reads_merged': _number(comps[4]),
|
||||
'sectors_read': _number(comps[5]),
|
||||
'ms_spent_reading': _number(comps[6]),
|
||||
'writes_completed': _number(comps[7]),
|
||||
'writes_merged': _number(comps[8]),
|
||||
'sectors_written': _number(comps[9]),
|
||||
'ms_spent_writing': _number(comps[10]),
|
||||
'io_in_progress': _number(comps[11]),
|
||||
'ms_spent_in_io': _number(comps[12]),
|
||||
'weighted_ms_spent_in_io': _number(comps[13])
|
||||
}
|
||||
return ret
|
||||
|
||||
def generic_diskstats():
|
||||
|
@ -584,10 +594,11 @@ def diskusage(*args):
|
|||
)
|
||||
# ifile source of data varies with OS, otherwise all the same
|
||||
if __grains__['kernel'] == 'Linux':
|
||||
procf = '/proc/mounts'
|
||||
if not os.path.isfile(procf):
|
||||
try:
|
||||
with salt.utils.fopen('/proc/mounts', 'r') as fp_:
|
||||
ifile = fp_.read().splitlines()
|
||||
except OSError:
|
||||
return {}
|
||||
ifile = salt.utils.fopen(procf, 'r').readlines()
|
||||
elif __grains__['kernel'] == 'FreeBSD':
|
||||
ifile = __salt__['cmd.run']('mount -p').splitlines()
|
||||
elif __grains__['kernel'] == 'SunOS':
|
||||
|
@ -634,16 +645,18 @@ def vmstats():
|
|||
'''
|
||||
linux specific implementation of vmstats
|
||||
'''
|
||||
procf = '/proc/vmstat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
try:
|
||||
with salt.utils.fopen('/proc/vmstat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
ret[comps[0]] = _number(comps[1])
|
||||
return ret
|
||||
|
||||
def generic_vmstats():
|
||||
|
@ -699,28 +712,30 @@ def netstats():
|
|||
'''
|
||||
freebsd specific netstats implementation
|
||||
'''
|
||||
procf = '/proc/net/netstat'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
headers = ['']
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == headers[0]:
|
||||
index = len(headers) - 1
|
||||
row = {}
|
||||
for field in range(index):
|
||||
if field < 1:
|
||||
continue
|
||||
else:
|
||||
row[headers[field]] = _number(comps[field])
|
||||
rowname = headers[0].replace(':', '')
|
||||
ret[rowname] = row
|
||||
else:
|
||||
headers = comps
|
||||
try:
|
||||
with salt.utils.fopen('/proc/net/netstat', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
headers = ['']
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
comps = line.split()
|
||||
if comps[0] == headers[0]:
|
||||
index = len(headers) - 1
|
||||
row = {}
|
||||
for field in range(index):
|
||||
if field < 1:
|
||||
continue
|
||||
else:
|
||||
row[headers[field]] = _number(comps[field])
|
||||
rowname = headers[0].replace(':', '')
|
||||
ret[rowname] = row
|
||||
else:
|
||||
headers = comps
|
||||
return ret
|
||||
|
||||
def freebsd_netstats():
|
||||
|
@ -786,38 +801,40 @@ def netdev():
|
|||
'''
|
||||
linux specific implementation of netdev
|
||||
'''
|
||||
procf = '/proc/net/dev'
|
||||
if not os.path.isfile(procf):
|
||||
return {}
|
||||
stats = salt.utils.fopen(procf, 'r').read().splitlines()
|
||||
ret = {}
|
||||
for line in stats:
|
||||
if not line:
|
||||
continue
|
||||
if line.find(':') < 0:
|
||||
continue
|
||||
comps = line.split()
|
||||
# Fix lines like eth0:9999..'
|
||||
comps[0] = line.split(':')[0].strip()
|
||||
# Support lines both like eth0:999 and eth0: 9999
|
||||
comps.insert(1, line.split(':')[1].strip().split()[0])
|
||||
ret[comps[0]] = {'iface': comps[0],
|
||||
'rx_bytes': _number(comps[1]),
|
||||
'rx_compressed': _number(comps[7]),
|
||||
'rx_drop': _number(comps[4]),
|
||||
'rx_errs': _number(comps[3]),
|
||||
'rx_fifo': _number(comps[5]),
|
||||
'rx_frame': _number(comps[6]),
|
||||
'rx_multicast': _number(comps[8]),
|
||||
'rx_packets': _number(comps[2]),
|
||||
'tx_bytes': _number(comps[9]),
|
||||
'tx_carrier': _number(comps[15]),
|
||||
'tx_colls': _number(comps[14]),
|
||||
'tx_compressed': _number(comps[16]),
|
||||
'tx_drop': _number(comps[12]),
|
||||
'tx_errs': _number(comps[11]),
|
||||
'tx_fifo': _number(comps[13]),
|
||||
'tx_packets': _number(comps[10])}
|
||||
try:
|
||||
with salt.utils.fopen('/proc/net/dev', 'r') as fp_:
|
||||
stats = fp_.read()
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
for line in stats.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
if line.find(':') < 0:
|
||||
continue
|
||||
comps = line.split()
|
||||
# Fix lines like eth0:9999..'
|
||||
comps[0] = line.split(':')[0].strip()
|
||||
# Support lines both like eth0:999 and eth0: 9999
|
||||
comps.insert(1, line.split(':')[1].strip().split()[0])
|
||||
ret[comps[0]] = {'iface': comps[0],
|
||||
'rx_bytes': _number(comps[1]),
|
||||
'rx_compressed': _number(comps[7]),
|
||||
'rx_drop': _number(comps[4]),
|
||||
'rx_errs': _number(comps[3]),
|
||||
'rx_fifo': _number(comps[5]),
|
||||
'rx_frame': _number(comps[6]),
|
||||
'rx_multicast': _number(comps[8]),
|
||||
'rx_packets': _number(comps[2]),
|
||||
'tx_bytes': _number(comps[9]),
|
||||
'tx_carrier': _number(comps[15]),
|
||||
'tx_colls': _number(comps[14]),
|
||||
'tx_compressed': _number(comps[16]),
|
||||
'tx_drop': _number(comps[12]),
|
||||
'tx_errs': _number(comps[11]),
|
||||
'tx_fifo': _number(comps[13]),
|
||||
'tx_packets': _number(comps[10])}
|
||||
return ret
|
||||
|
||||
def freebsd_netdev():
|
||||
|
@ -976,10 +993,11 @@ def version():
|
|||
'''
|
||||
linux specific implementation of version
|
||||
'''
|
||||
procf = '/proc/version'
|
||||
if not os.path.isfile(procf):
|
||||
try:
|
||||
with salt.utils.fopen('/proc/version', 'r') as fp_:
|
||||
return fp_.read().strip()
|
||||
except IOError:
|
||||
return {}
|
||||
return salt.utils.fopen(procf, 'r').read().strip()
|
||||
|
||||
# dict that returns a function that does the right thing per platform
|
||||
get_version = {
|
||||
|
|
|
@ -1638,21 +1638,22 @@ def create_empty_crl(
|
|||
return 'CRL "{0}" already exists'.format(crl_file)
|
||||
|
||||
try:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
with salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename
|
||||
)).read()
|
||||
)
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
ca_filename)) as fp_:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename)).read()
|
||||
)
|
||||
ca_filename)) as fp_:
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
|
@ -1723,21 +1724,22 @@ def revoke_cert(
|
|||
cert_filename = '{0}'.format(CN)
|
||||
|
||||
try:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
with salt.utils.fopen('{0}/{1}/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename
|
||||
)).read()
|
||||
)
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
ca_filename)) as fp_:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_filename)).read()
|
||||
)
|
||||
ca_filename)) as fp_:
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fp_.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
|
@ -1773,8 +1775,8 @@ def revoke_cert(
|
|||
index_serial_subject)
|
||||
|
||||
ret = {}
|
||||
with salt.utils.fopen(index_file) as f:
|
||||
for line in f:
|
||||
with salt.utils.fopen(index_file) as fp_:
|
||||
for line in fp_:
|
||||
if index_r_data_pattern.match(line):
|
||||
revoke_date = line.split('\t')[2]
|
||||
try:
|
||||
|
@ -1802,8 +1804,8 @@ def revoke_cert(
|
|||
|
||||
crl = OpenSSL.crypto.CRL()
|
||||
|
||||
with salt.utils.fopen(index_file) as f:
|
||||
for line in f:
|
||||
with salt.utils.fopen(index_file) as fp_:
|
||||
for line in fp_:
|
||||
if line.startswith('R'):
|
||||
fields = line.split('\t')
|
||||
revoked = OpenSSL.crypto.Revoked()
|
||||
|
@ -1828,8 +1830,8 @@ def revoke_cert(
|
|||
crl_file)
|
||||
return ret
|
||||
|
||||
with salt.utils.fopen(crl_file, 'w') as f:
|
||||
f.write(crl_text)
|
||||
with salt.utils.fopen(crl_file, 'w') as fp_:
|
||||
fp_.write(crl_text)
|
||||
|
||||
return ('Revoked Certificate: "{0}/{1}.crt", '
|
||||
'serial number: {2}').format(
|
||||
|
|
|
@ -1320,9 +1320,11 @@ def create_xml_path(path):
|
|||
|
||||
salt '*' virt.create_xml_path <path to XML file on the node>
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return create_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return create_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def define_xml_str(xml):
|
||||
|
@ -1350,9 +1352,11 @@ def define_xml_path(path):
|
|||
salt '*' virt.define_xml_path <path to XML file on the node>
|
||||
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return define_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return define_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def define_vol_xml_str(xml):
|
||||
|
@ -1382,9 +1386,11 @@ def define_vol_xml_path(path):
|
|||
salt '*' virt.define_vol_xml_path <path to XML file on the node>
|
||||
|
||||
'''
|
||||
if not os.path.isfile(path):
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
return define_vol_xml_str(fp_.read())
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
return define_vol_xml_str(salt.utils.fopen(path, 'r').read())
|
||||
|
||||
|
||||
def migrate_non_shared(vm_, target, ssh=False):
|
||||
|
@ -1574,8 +1580,9 @@ def is_kvm_hyper():
|
|||
salt '*' virt.is_kvm_hyper
|
||||
'''
|
||||
try:
|
||||
if 'kvm_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'kvm_' not in fp_.read():
|
||||
return False
|
||||
except IOError:
|
||||
# No /proc/modules? Are we on Windows? Or Solaris?
|
||||
return False
|
||||
|
@ -1599,9 +1606,10 @@ def is_xen_hyper():
|
|||
# virtual_subtype isn't set everywhere.
|
||||
return False
|
||||
try:
|
||||
if 'xen_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
except IOError:
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'xen_' not in fp_.read():
|
||||
return False
|
||||
except (OSError, IOError):
|
||||
# No /proc/modules? Are we on Windows? Or Solaris?
|
||||
return False
|
||||
return 'libvirtd' in __salt__['cmd.run'](__grains__['ps'])
|
||||
|
|
|
@ -265,7 +265,8 @@ def _text_or_file(input_):
|
|||
Determines if input is a path to a file, or a string with the content to be parsed.
|
||||
'''
|
||||
if os.path.isfile(input_):
|
||||
return salt.utils.fopen(input_).read()
|
||||
with salt.utils.fopen(input_) as fp_:
|
||||
return fp_.read()
|
||||
else:
|
||||
return input_
|
||||
|
||||
|
@ -626,7 +627,8 @@ def write_pem(text, path, pem_type=None):
|
|||
'''
|
||||
old_umask = os.umask(0o77)
|
||||
text = get_pem_entry(text, pem_type=pem_type)
|
||||
salt.utils.fopen(path, 'w').write(text)
|
||||
with salt.utils.fopen(path, 'w') as fp_:
|
||||
fp_.write(text)
|
||||
os.umask(old_umask)
|
||||
return 'PEM written to {0}'.format(path)
|
||||
|
||||
|
|
|
@ -773,9 +773,10 @@ def is_hyper():
|
|||
# virtual_subtype isn't set everywhere.
|
||||
return False
|
||||
try:
|
||||
if 'xen_' not in salt.utils.fopen('/proc/modules').read():
|
||||
return False
|
||||
except IOError:
|
||||
with salt.utils.fopen('/proc/modules') as fp_:
|
||||
if 'xen_' not in fp_.read():
|
||||
return False
|
||||
except (OSError, IOError):
|
||||
return False
|
||||
# there must be a smarter way...
|
||||
return 'xenstore' in __salt__['cmd.run'](__grains__['ps'])
|
||||
|
|
|
@ -518,9 +518,10 @@ def upgrade_bootstrap(directory='.',
|
|||
if not os.path.isdir(dbuild):
|
||||
os.makedirs(dbuild)
|
||||
# only try to download once per buildout checkout
|
||||
salt.utils.fopen(os.path.join(
|
||||
dbuild,
|
||||
'{0}.updated_bootstrap'.format(buildout_ver)))
|
||||
with salt.utils.fopen(os.path.join(
|
||||
dbuild,
|
||||
'{0}.updated_bootstrap'.format(buildout_ver))):
|
||||
pass
|
||||
except (OSError, IOError):
|
||||
LOG.info('Bootstrap updated from repository')
|
||||
data = _urlopen(booturl).read()
|
||||
|
|
|
@ -80,6 +80,8 @@ def _walk_through(job_dir):
|
|||
if not os.path.isfile(load_path):
|
||||
continue
|
||||
|
||||
# serial.load() closes the filehandle, no need to enclose this in a
|
||||
# "with" block.
|
||||
job = serial.load(salt.utils.fopen(load_path, 'rb'))
|
||||
jid = job['jid']
|
||||
yield jid, job, t_path, final
|
||||
|
|
|
@ -2355,7 +2355,8 @@ def mod_init(low):
|
|||
if low['fun'] == 'installed' or low['fun'] == 'latest':
|
||||
rtag = __gen_rtag()
|
||||
if not os.path.exists(rtag):
|
||||
salt.utils.fopen(rtag, 'w+').write('')
|
||||
with salt.utils.fopen(rtag, 'w+'):
|
||||
pass
|
||||
return ret
|
||||
return False
|
||||
|
||||
|
|
|
@ -397,7 +397,8 @@ def managed(name, ppa=None, **kwargs):
|
|||
|
||||
# empty file before configure
|
||||
if kwargs.get('clean_file', False):
|
||||
salt.utils.fopen(kwargs['file'], 'w').close()
|
||||
with salt.utils.fopen(kwargs['file'], 'w'):
|
||||
pass
|
||||
|
||||
try:
|
||||
if __grains__['os_family'] == 'Debian':
|
||||
|
|
|
@ -617,10 +617,11 @@ def pem_managed(name,
|
|||
|
||||
new = __salt__['x509.get_pem_entry'](text=text)
|
||||
|
||||
if os.path.isfile(name):
|
||||
current = salt.utils.fopen(name).read()
|
||||
else:
|
||||
current = '{0} does not exist.'.format(name)
|
||||
try:
|
||||
with salt.utils.fopen(name) as fp_:
|
||||
current = fp_.read()
|
||||
except (OSError, IOError):
|
||||
current = '{0} does not exist or is unreadable'.format(name)
|
||||
|
||||
if new == current:
|
||||
ret['result'] = True
|
||||
|
|
|
@ -226,21 +226,22 @@ class AESReqServerMixin(object):
|
|||
|
||||
elif os.path.isfile(pubfn):
|
||||
# The key has been accepted, check it
|
||||
if salt.utils.fopen(pubfn, 'r').read().strip() != load['pub'].strip():
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys did not match. This may be an attempt to compromise '
|
||||
'the Salt cluster.'.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
with salt.utils.fopen(pubfn, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read().strip() != load['pub'].strip():
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys did not match. This may be an attempt to compromise '
|
||||
'the Salt cluster.'.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
|
||||
elif not os.path.isfile(pubfn_pend):
|
||||
# The key has not been accepted, this is a new minion
|
||||
|
@ -312,58 +313,60 @@ class AESReqServerMixin(object):
|
|||
# Check if the keys are the same and error out if this is the
|
||||
# case. Otherwise log the fact that the minion is still
|
||||
# pending.
|
||||
if salt.utils.fopen(pubfn_pend, 'r').read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'key in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
log.info(
|
||||
'Authentication failed from host {id}, the key is in '
|
||||
'pending and needs to be accepted with salt-key '
|
||||
'-a {id}'.format(**load)
|
||||
)
|
||||
eload = {'result': True,
|
||||
'act': 'pend',
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': True}}
|
||||
with salt.utils.fopen(pubfn_pend, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'key in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
log.info(
|
||||
'Authentication failed from host {id}, the key is in '
|
||||
'pending and needs to be accepted with salt-key '
|
||||
'-a {id}'.format(**load)
|
||||
)
|
||||
eload = {'result': True,
|
||||
'act': 'pend',
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': True}}
|
||||
else:
|
||||
# This key is in pending and has been configured to be
|
||||
# auto-signed. Check to see if it is the same key, and if
|
||||
# so, pass on doing anything here, and let it get automatically
|
||||
# accepted below.
|
||||
if salt.utils.fopen(pubfn_pend, 'r').read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
pass
|
||||
with salt.utils.fopen(pubfn_pend, 'r') as pubfn_handle:
|
||||
if pubfn_handle.read() != load['pub']:
|
||||
log.error(
|
||||
'Authentication attempt from {id} failed, the public '
|
||||
'keys in pending did not match. This may be an '
|
||||
'attempt to compromise the Salt cluster.'
|
||||
.format(**load)
|
||||
)
|
||||
# put denied minion key into minions_denied
|
||||
with salt.utils.fopen(pubfn_denied, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
eload = {'result': False,
|
||||
'id': load['id'],
|
||||
'pub': load['pub']}
|
||||
self.event.fire_event(eload, salt.utils.event.tagify(prefix='auth'))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
else:
|
||||
pass
|
||||
|
||||
else:
|
||||
# Something happened that I have not accounted for, FAIL!
|
||||
|
|
|
@ -613,7 +613,7 @@ def output_profile(pr, stats_path='/tmp/stats', stop=False, id_=None):
|
|||
ficn = os.path.join(stats_path, '{0}.{1}.stats'.format(id_, date))
|
||||
if not os.path.exists(ficp):
|
||||
pr.dump_stats(ficp)
|
||||
with open(ficn, 'w') as fic:
|
||||
with fopen(ficn, 'w') as fic:
|
||||
pstats.Stats(pr, stream=fic).sort_stats('cumulative')
|
||||
log.info('PROFILING: {0} generated'.format(ficp))
|
||||
log.info('PROFILING (cumulative): {0} generated'.format(ficn))
|
||||
|
|
|
@ -2434,7 +2434,8 @@ def lock_file(filename, interval=.5, timeout=15):
|
|||
else:
|
||||
break
|
||||
|
||||
salt.utils.fopen(lock, 'a').close()
|
||||
with salt.utils.fopen(lock, 'a'):
|
||||
pass
|
||||
|
||||
|
||||
def unlock_file(filename):
|
||||
|
|
|
@ -46,7 +46,8 @@ def cache_jobs(opts, jid, ret):
|
|||
jdir = os.path.dirname(fn_)
|
||||
if not os.path.isdir(jdir):
|
||||
os.makedirs(jdir)
|
||||
salt.utils.fopen(fn_, 'w+b').write(serial.dumps(ret))
|
||||
with salt.utils.fopen(fn_, 'w+b') as fp_:
|
||||
fp_.write(serial.dumps(ret))
|
||||
|
||||
|
||||
def _read_proc_file(path, opts):
|
||||
|
|
|
@ -641,8 +641,8 @@ class SaltNova(object):
|
|||
'''
|
||||
nt_ks = self.compute_conn
|
||||
if pubfile:
|
||||
ifile = salt.utils.fopen(pubfile, 'r')
|
||||
pubkey = ifile.read()
|
||||
with salt.utils.fopen(pubfile, 'r') as fp_:
|
||||
pubkey = fp_.read()
|
||||
if not pubkey:
|
||||
return False
|
||||
nt_ks.keypairs.create(name, public_key=pubkey)
|
||||
|
|
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||
|
||||
# Import python libs
|
||||
import logging
|
||||
from sys import stdout
|
||||
import sys
|
||||
from os import makedirs
|
||||
from os.path import dirname, isdir
|
||||
from errno import EEXIST
|
||||
|
@ -172,7 +172,7 @@ class SaltSwift(object):
|
|||
headers, body = self.conn.get_object(cont, obj, resp_chunk_size=65536)
|
||||
|
||||
if return_bin is True:
|
||||
fp = stdout
|
||||
fp = sys.stdout
|
||||
else:
|
||||
dirpath = dirname(local_file)
|
||||
if dirpath and not isdir(dirpath):
|
||||
|
|
|
@ -419,7 +419,7 @@ class ProcessManager(object):
|
|||
else:
|
||||
return
|
||||
if salt.utils.is_windows():
|
||||
with open(os.devnull, 'wb') as devnull:
|
||||
with salt.utils.fopen(os.devnull, 'wb') as devnull:
|
||||
for pid, p_map in six.iteritems(self._process_map):
|
||||
# On Windows, we need to explicitly terminate sub-processes
|
||||
# because the processes don't have a sigterm handler.
|
||||
|
|
|
@ -278,8 +278,10 @@ def _get_jinja_error(trace, context=None):
|
|||
if add_log:
|
||||
if template_path:
|
||||
out = '\n{0}\n'.format(msg.splitlines()[0])
|
||||
with salt.utils.fopen(template_path) as fp_:
|
||||
template_contents = fp_.read()
|
||||
out += salt.utils.get_context(
|
||||
salt.utils.fopen(template_path).read(),
|
||||
template_contents,
|
||||
line,
|
||||
marker=' <======================')
|
||||
else:
|
||||
|
|
|
@ -34,10 +34,16 @@ class VirtKey(object):
|
|||
Accept the provided key
|
||||
'''
|
||||
try:
|
||||
expiry = int(salt.utils.fopen(self.path, 'r').read())
|
||||
except IOError:
|
||||
log.error('Request to sign key for minion "{0}" on hyper "{1}" denied: '
|
||||
'no authorization'.format(self.id, self.hyper))
|
||||
with salt.utils.fopen(self.path, 'r') as fp_:
|
||||
expiry = int(fp_.read())
|
||||
except (OSError, IOError):
|
||||
log.error(
|
||||
'Request to sign key for minion \'%s\' on hyper \'%s\' '
|
||||
'denied: no authorization', self.id, self.hyper
|
||||
)
|
||||
return False
|
||||
except ValueError:
|
||||
log.error('Invalid expiry data in %s', self.path)
|
||||
return False
|
||||
|
||||
# Limit acceptance window to 10 minutes
|
||||
|
|
|
@ -608,9 +608,8 @@ class TestDaemon(object):
|
|||
|
||||
for entry in ('master', 'minion', 'sub_minion', 'syndic_master'):
|
||||
computed_config = copy.deepcopy(locals()['{0}_opts'.format(entry)])
|
||||
salt.utils.fopen(os.path.join(TMP_CONF_DIR, entry), 'w').write(
|
||||
yaml.dump(computed_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(os.path.join(TMP_CONF_DIR, entry), 'w') as fp_:
|
||||
fp_.write(yaml.dump(computed_config, default_flow_style=False))
|
||||
# <---- Transcribe Configuration -----------------------------------------------------------------------------
|
||||
|
||||
# ----- Verify Environment ---------------------------------------------------------------------------------->
|
||||
|
@ -734,14 +733,13 @@ class TestDaemon(object):
|
|||
sync_needed = self.parser.options.clean
|
||||
if self.parser.options.clean is False:
|
||||
def sumfile(fpath):
|
||||
# Since we will be doing this for small files, it should be ok
|
||||
fobj = salt.utils.fopen(fpath)
|
||||
m = md5()
|
||||
while True:
|
||||
d = fobj.read(8096)
|
||||
if not d:
|
||||
break
|
||||
m.update(d)
|
||||
with salt.utils.fopen(fpath) as fobj:
|
||||
while True:
|
||||
d = fobj.read(8096)
|
||||
if not d:
|
||||
break
|
||||
m.update(d)
|
||||
return m.hexdigest()
|
||||
# Since we're not cleaning up, let's see if modules are already up
|
||||
# to date so we don't need to re-sync them
|
||||
|
|
|
@ -64,7 +64,8 @@ class GrainsTargetingTest(integration.ShellCase):
|
|||
# Create a minion key, but do not start the "fake" minion. This mimics a
|
||||
# disconnected minion.
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected')
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
|
||||
# ping disconnected minion and ensure it times out and returns with correct message
|
||||
try:
|
||||
|
|
|
@ -45,7 +45,8 @@ class StdTest(integration.ModuleCase):
|
|||
# create fake minion
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'footest')
|
||||
# touch the file
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
# ping that minion and ensure it times out
|
||||
try:
|
||||
cmd_iter = self.client.cmd_cli(
|
||||
|
@ -125,7 +126,8 @@ class StdTest(integration.ModuleCase):
|
|||
# Create a minion key, but do not start the "fake" minion. This mimics
|
||||
# a disconnected minion.
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected')
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
with salt.utils.fopen(key_file, 'a'):
|
||||
pass
|
||||
|
||||
# ping disconnected minion and ensure it times out and returns with correct message
|
||||
try:
|
||||
|
|
|
@ -168,8 +168,8 @@ class HostsModuleTest(integration.ModuleCase):
|
|||
# use an empty one so we can prove the syntax of the entries
|
||||
# being added by the hosts module
|
||||
self.__clear_hosts()
|
||||
f = salt.utils.fopen(HFN, 'w')
|
||||
f.close()
|
||||
with salt.utils.fopen(HFN, 'w'):
|
||||
pass
|
||||
|
||||
self.assertTrue(
|
||||
self.run_function(
|
||||
|
@ -207,7 +207,8 @@ class HostsModuleTest(integration.ModuleCase):
|
|||
)
|
||||
|
||||
# now read the lines and ensure they're formatted correctly
|
||||
lines = salt.utils.fopen(HFN, 'r').read().splitlines()
|
||||
with salt.utils.fopen(HFN, 'r') as fp_:
|
||||
lines = fp_.read().splitlines()
|
||||
self.assertEqual(lines, [
|
||||
'192.168.1.3\t\thost3.fqdn.com',
|
||||
'192.168.1.1\t\thost1.fqdn.com host1 host1-reorder',
|
||||
|
|
|
@ -145,6 +145,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(textwrap.dedent('''\
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
|
||||
|
@ -155,7 +157,7 @@ class StateModuleTest(integration.ModuleCase,
|
|||
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
|
||||
. /etc/bash_completion
|
||||
fi
|
||||
'''), salt.utils.fopen(testfile, 'r').read())
|
||||
'''), contents)
|
||||
|
||||
# Re-append switching order
|
||||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
|
@ -164,6 +166,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
ret = self.run_function('state.sls', mods='testappend.step-1')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(textwrap.dedent('''\
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
|
||||
|
@ -174,7 +178,7 @@ class StateModuleTest(integration.ModuleCase,
|
|||
if [ -f /etc/bash_completion ] && ! shopt -oq posix; then
|
||||
. /etc/bash_completion
|
||||
fi
|
||||
'''), salt.utils.fopen(testfile, 'r').read())
|
||||
'''), contents)
|
||||
|
||||
def test_issue_1876_syntax_error(self):
|
||||
'''
|
||||
|
@ -199,7 +203,7 @@ class StateModuleTest(integration.ModuleCase,
|
|||
)
|
||||
|
||||
def test_issue_1879_too_simple_contains_check(self):
|
||||
contents = textwrap.dedent('''\
|
||||
expected = textwrap.dedent('''\
|
||||
# set variable identifying the chroot you work in (used in the prompt below)
|
||||
if [ -z "$debian_chroot" ] && [ -r /etc/debian_chroot ]; then
|
||||
debian_chroot=$(cat /etc/debian_chroot)
|
||||
|
@ -232,10 +236,9 @@ class StateModuleTest(integration.ModuleCase,
|
|||
|
||||
# Does it match?
|
||||
try:
|
||||
self.assertMultiLineEqual(
|
||||
contents,
|
||||
salt.utils.fopen(testfile, 'r').read()
|
||||
)
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(expected, contents)
|
||||
# Make sure we don't re-append existing text
|
||||
ret = self.run_function(
|
||||
'state.sls', mods='issue-1879.step-1', timeout=120
|
||||
|
@ -246,10 +249,10 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'state.sls', mods='issue-1879.step-2', timeout=120
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertMultiLineEqual(
|
||||
contents,
|
||||
salt.utils.fopen(testfile, 'r').read()
|
||||
)
|
||||
|
||||
with salt.utils.fopen(testfile, 'r') as fp_:
|
||||
contents = fp_.read()
|
||||
self.assertMultiLineEqual(expected, contents)
|
||||
except Exception:
|
||||
if os.path.exists(testfile):
|
||||
shutil.copy(testfile, testfile + '.bak')
|
||||
|
@ -321,7 +324,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'files', 'file', 'base', 'issue-2068-template-str-no-dot.sls'
|
||||
)
|
||||
|
||||
template = salt.utils.fopen(template_path, 'r').read()
|
||||
with salt.utils.fopen(template_path, 'r') as fp_:
|
||||
template = fp_.read()
|
||||
try:
|
||||
ret = self.run_function(
|
||||
'state.template_str', [template], timeout=120
|
||||
|
@ -362,7 +366,8 @@ class StateModuleTest(integration.ModuleCase,
|
|||
'files', 'file', 'base', 'issue-2068-template-str.sls'
|
||||
)
|
||||
|
||||
template = salt.utils.fopen(template_path, 'r').read()
|
||||
with salt.utils.fopen(template_path, 'r') as fp_:
|
||||
template = fp_.read()
|
||||
try:
|
||||
ret = self.run_function(
|
||||
'state.template_str', [template], timeout=120
|
||||
|
|
|
@ -56,7 +56,8 @@ class EnabledTest(integration.ModuleCase):
|
|||
ret_key = 'test_|-shell_enabled_|-{0}_|-configurable_test_state'.format(enabled_ret)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{{% set shell_enabled = salt['cmd.run']("{0}").strip() %}}
|
||||
|
||||
shell_enabled:
|
||||
|
@ -83,7 +84,8 @@ class EnabledTest(integration.ModuleCase):
|
|||
ret_key = 'test_|-shell_enabled_|-{0}_|-configurable_test_state'.format(disabled_ret)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{{% set shell_disabled = salt['cmd.run']("{0}", python_shell=False) %}}
|
||||
|
||||
shell_enabled:
|
||||
|
|
|
@ -51,7 +51,8 @@ class CMDTest(integration.ModuleCase,
|
|||
date_file = tempfile.mkstemp()[1]
|
||||
state_key = 'cmd_|-date > {0}_|-date > {0}_|-run'.format(date_file)
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
date > {0}:
|
||||
cmd.run
|
||||
'''.format(date_file)))
|
||||
|
@ -73,7 +74,8 @@ class CMDTest(integration.ModuleCase,
|
|||
unless_file = tempfile.mkstemp()[1]
|
||||
state_key = 'cmd_|-/var/log/messages_|-/var/log/messages_|-run'
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
/var/log/messages:
|
||||
cmd.run:
|
||||
- unless: echo cheese > {0}
|
||||
|
@ -96,7 +98,8 @@ class CMDTest(integration.ModuleCase,
|
|||
creates_file = tempfile.mkstemp()[1]
|
||||
state_key = 'cmd_|-touch {0}_|-touch {0}_|-run'.format(creates_file)
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
touch {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
|
@ -121,7 +124,8 @@ class CMDTest(integration.ModuleCase,
|
|||
os.remove(creates_file)
|
||||
state_key = 'cmd_|-touch {0}_|-touch {0}_|-run'.format(creates_file)
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
touch {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
|
@ -146,7 +150,8 @@ class CMDTest(integration.ModuleCase,
|
|||
biscuits_key = 'cmd_|-biscuits_|-echo hello_|-wait'
|
||||
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
saltines:
|
||||
cmd.run:
|
||||
- name: echo
|
||||
|
|
|
@ -322,8 +322,10 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
state_key = 'file_|-{0}_|-{0}_|-managed'.format(funny_file)
|
||||
|
||||
try:
|
||||
salt.utils.fopen(funny_url_path, 'w').close()
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(funny_url_path, 'w'):
|
||||
pass
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
{0}:
|
||||
file.managed:
|
||||
- source: {1}
|
||||
|
@ -353,7 +355,8 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
managed_files[typ] = tempfile.mkstemp()[1]
|
||||
state_keys[typ] = 'file_|-{0} file_|-{1}_|-managed'.format(typ, managed_files[typ])
|
||||
try:
|
||||
salt.utils.fopen(state_file, 'w').write(textwrap.dedent('''\
|
||||
with salt.utils.fopen(state_file, 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
bool file:
|
||||
file.managed:
|
||||
- name: {bool}
|
||||
|
@ -424,13 +427,15 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
salt.utils.fopen(os.path.join(straydir, 'strayfile2'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(straydir, 'strayfile2'), 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory', name=name, clean=True)
|
||||
try:
|
||||
|
@ -450,17 +455,20 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
strayfile2 = os.path.join(straydir, 'strayfile2')
|
||||
salt.utils.fopen(strayfile2, 'w').close()
|
||||
with salt.utils.fopen(strayfile2, 'w'):
|
||||
pass
|
||||
|
||||
keepfile = os.path.join(straydir, 'keepfile')
|
||||
salt.utils.fopen(keepfile, 'w').close()
|
||||
with salt.utils.fopen(keepfile, 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory',
|
||||
name=name,
|
||||
|
@ -484,17 +492,20 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
os.makedirs(name)
|
||||
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
straydir = os.path.join(name, 'straydir')
|
||||
if not os.path.isdir(straydir):
|
||||
os.makedirs(straydir)
|
||||
|
||||
strayfile2 = os.path.join(straydir, 'strayfile2')
|
||||
salt.utils.fopen(strayfile2, 'w').close()
|
||||
with salt.utils.fopen(strayfile2, 'w'):
|
||||
pass
|
||||
|
||||
keepfile = os.path.join(straydir, 'keepfile')
|
||||
salt.utils.fopen(keepfile, 'w').close()
|
||||
with salt.utils.fopen(keepfile, 'w'):
|
||||
pass
|
||||
|
||||
ret = self.run_state('file.directory',
|
||||
test=True,
|
||||
|
@ -727,10 +738,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
if not os.path.isdir(name):
|
||||
os.makedirs(name)
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
# Corner cases: replacing file with a directory and vice versa
|
||||
salt.utils.fopen(os.path.join(name, '36'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(name, '36'), 'w'):
|
||||
pass
|
||||
os.makedirs(os.path.join(name, 'scene33'))
|
||||
ret = self.run_state(
|
||||
'file.recurse', name=name, source='salt://grail', clean=True)
|
||||
|
@ -750,10 +763,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
if not os.path.isdir(name):
|
||||
os.makedirs(name)
|
||||
strayfile = os.path.join(name, 'strayfile')
|
||||
salt.utils.fopen(strayfile, 'w').close()
|
||||
with salt.utils.fopen(strayfile, 'w'):
|
||||
pass
|
||||
|
||||
# Corner cases: replacing file with a directory and vice versa
|
||||
salt.utils.fopen(os.path.join(name, '32'), 'w').close()
|
||||
with salt.utils.fopen(os.path.join(name, '32'), 'w'):
|
||||
pass
|
||||
os.makedirs(os.path.join(name, 'scene34'))
|
||||
ret = self.run_state('file.recurse',
|
||||
name=name,
|
||||
|
@ -1477,7 +1492,8 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
# Get a path to the temporary file
|
||||
tmp_file = os.path.join(integration.TMP, 'issue-2041-comment.txt')
|
||||
# Write some data to it
|
||||
salt.utils.fopen(tmp_file, 'w').write('hello\nworld\n')
|
||||
with salt.utils.fopen(tmp_file, 'w') as fp_:
|
||||
fp_.write('hello\nworld\n')
|
||||
# create the sls template
|
||||
template_lines = [
|
||||
'{0}:'.format(tmp_file),
|
||||
|
@ -1511,11 +1527,12 @@ class FileTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
|
|||
# Get a path to the temporary file
|
||||
tmp_file = os.path.join(integration.TMP, 'issue-2379-file-append.txt')
|
||||
# Write some data to it
|
||||
salt.utils.fopen(tmp_file, 'w').write(
|
||||
'hello\nworld\n' + # Some junk
|
||||
'#PermitRootLogin yes\n' + # Commented text
|
||||
'# PermitRootLogin yes\n' # Commented text with space
|
||||
)
|
||||
with salt.utils.fopen(tmp_file, 'w') as fp_:
|
||||
fp_.write(
|
||||
'hello\nworld\n' # Some junk
|
||||
'#PermitRootLogin yes\n' # Commented text
|
||||
'# PermitRootLogin yes\n' # Commented text with space
|
||||
)
|
||||
# create the sls template
|
||||
template_lines = [
|
||||
'{0}:'.format(tmp_file),
|
||||
|
|
|
@ -35,12 +35,13 @@ class StateMatchTest(integration.ModuleCase):
|
|||
top_filename = 'issue-2167-ipcidr-match.sls'
|
||||
top_file = os.path.join(STATE_DIR, top_filename)
|
||||
try:
|
||||
salt.utils.fopen(top_file, 'w').write(
|
||||
'base:\n'
|
||||
' {0}:\n'
|
||||
' - match: ipcidr\n'
|
||||
' - test\n'.format(subnets[0])
|
||||
)
|
||||
with salt.utils.fopen(top_file, 'w') as fp_:
|
||||
fp_.write(
|
||||
'base:\n'
|
||||
' {0}:\n'
|
||||
' - match: ipcidr\n'
|
||||
' - test\n'.format(subnets[0])
|
||||
)
|
||||
ret = self.run_function('state.top', [top_filename])
|
||||
self.assertNotIn(
|
||||
'AttributeError: \'Matcher\' object has no attribute '
|
||||
|
|
|
@ -102,10 +102,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
def test_proper_path_joining(self):
|
||||
fpath = tempfile.mktemp()
|
||||
try:
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
"root_dir: /\n"
|
||||
"key_logfile: key\n"
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: /\n'
|
||||
'key_logfile: key\n'
|
||||
)
|
||||
config = sconfig.master_config(fpath)
|
||||
# os.path.join behavior
|
||||
self.assertEqual(config['key_logfile'], os.path.join('/', 'key'))
|
||||
|
@ -121,10 +122,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
config = sconfig.master_config(fpath)
|
||||
self.assertEqual(config['log_file'], fpath)
|
||||
finally:
|
||||
|
@ -140,10 +142,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_MASTER_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file.
|
||||
|
@ -155,10 +158,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -182,10 +186,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_MINION_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -197,10 +202,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -226,19 +232,21 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# configuration settings using the provided client configuration
|
||||
# file
|
||||
master_config = os.path.join(env_root_dir, 'master')
|
||||
salt.utils.fopen(master_config, 'w').write(
|
||||
'blah: true\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, master_config)
|
||||
)
|
||||
with salt.utils.fopen(master_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: true\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, master_config)
|
||||
)
|
||||
os.environ['SALT_MASTER_CONFIG'] = master_config
|
||||
|
||||
# Now the client configuration file
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_CLIENT_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -251,10 +259,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
@ -278,11 +287,12 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
|
||||
# Let's populate a minion configuration file with some basic
|
||||
# settings
|
||||
salt.utils.fopen(minion_config, 'w').write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, minion_config)
|
||||
)
|
||||
with salt.utils.fopen(minion_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, minion_config)
|
||||
)
|
||||
|
||||
# Now, let's populate an extra configuration file under minion.d
|
||||
# Notice that above we've set blah as False and below as True.
|
||||
|
@ -290,9 +300,8 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# file so overrides can happen, the final value of blah should be
|
||||
# True.
|
||||
extra_config = os.path.join(minion_confd, 'extra.conf')
|
||||
salt.utils.fopen(extra_config, 'w').write(
|
||||
'blah: true\n'
|
||||
)
|
||||
with salt.utils.fopen(extra_config, 'w') as fp_:
|
||||
fp_.write('blah: true\n')
|
||||
|
||||
# Let's load the configuration
|
||||
config = sconfig.minion_config(minion_config)
|
||||
|
@ -313,11 +322,12 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
|
||||
# Let's populate a master configuration file with some basic
|
||||
# settings
|
||||
salt.utils.fopen(master_config, 'w').write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, master_config)
|
||||
)
|
||||
with salt.utils.fopen(master_config, 'w') as fp_:
|
||||
fp_.write(
|
||||
'blah: false\n'
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(tempdir, master_config)
|
||||
)
|
||||
|
||||
# Now, let's populate an extra configuration file under master.d
|
||||
# Notice that above we've set blah as False and below as True.
|
||||
|
@ -325,9 +335,8 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
# file so overrides can happen, the final value of blah should be
|
||||
# True.
|
||||
extra_config = os.path.join(master_confd, 'extra.conf')
|
||||
salt.utils.fopen(extra_config, 'w').write(
|
||||
'blah: true\n'
|
||||
)
|
||||
with salt.utils.fopen(extra_config, 'w') as fp_:
|
||||
fp_.write('blah: true\n')
|
||||
|
||||
# Let's load the configuration
|
||||
config = sconfig.master_config(master_config)
|
||||
|
@ -846,10 +855,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
os.makedirs(env_root_dir)
|
||||
env_fpath = os.path.join(env_root_dir, 'config-env')
|
||||
|
||||
salt.utils.fopen(env_fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
with salt.utils.fopen(env_fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(env_root_dir, env_fpath)
|
||||
)
|
||||
|
||||
os.environ['SALT_CLOUD_CONFIG'] = env_fpath
|
||||
# Should load from env variable, not the default configuration file
|
||||
|
@ -861,10 +871,11 @@ class ConfigTestCase(TestCase, integration.AdaptedConfigurationTestCaseMixIn):
|
|||
root_dir = os.path.join(tempdir, 'foo', 'bar')
|
||||
os.makedirs(root_dir)
|
||||
fpath = os.path.join(root_dir, 'config')
|
||||
salt.utils.fopen(fpath, 'w').write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
with salt.utils.fopen(fpath, 'w') as fp_:
|
||||
fp_.write(
|
||||
'root_dir: {0}\n'
|
||||
'log_file: {1}\n'.format(root_dir, fpath)
|
||||
)
|
||||
# Let's set the environment variable, yet, since the configuration
|
||||
# file path is not the default one, i.e., the user has passed an
|
||||
# alternative configuration file form the CLI parser, the
|
||||
|
|
|
@ -64,8 +64,7 @@ if NO_MOCK is False:
|
|||
call.write('::1 localhost'),
|
||||
call.write('\n'),
|
||||
call.write('fe80::1%lo0 localhost'),
|
||||
call.write('\n'),
|
||||
call.close()]
|
||||
call.write('\n')]
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
|
|
|
@ -170,8 +170,8 @@ class TestGetTemplate(TestCase):
|
|||
fn_ = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple')
|
||||
with salt.utils.fopen(fn_) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
self.assertEqual(out, 'world\n')
|
||||
|
||||
def test_fallback_noloader(self):
|
||||
|
@ -180,8 +180,9 @@ class TestGetTemplate(TestCase):
|
|||
if the file is not contained in the searchpath
|
||||
'''
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
self.assertEqual(out, 'Hey world !a b !\n')
|
||||
|
||||
|
@ -197,8 +198,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
@ -224,12 +226,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_macro_additional_log_for_undefined(self):
|
||||
|
@ -249,12 +252,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_macro_additional_log_syntaxerror(self):
|
||||
|
@ -274,12 +278,13 @@ class TestGetTemplate(TestCase):
|
|||
fc = MockFileClient()
|
||||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
salt.utils.fopen(filename).read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
self.assertRaisesRegexp(
|
||||
SaltRenderError,
|
||||
expected,
|
||||
render_jinja_tmpl,
|
||||
fp_.read(),
|
||||
dict(opts=self.local_opts, saltenv='test'))
|
||||
SaltCacheLoader.file_client = _fc
|
||||
|
||||
def test_non_ascii_encoding(self):
|
||||
|
@ -288,8 +293,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
@ -301,8 +307,9 @@ class TestGetTemplate(TestCase):
|
|||
_fc = SaltCacheLoader.file_client
|
||||
SaltCacheLoader.file_client = lambda loader: fc
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'non_ascii')
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.fopen(filename).read(),
|
||||
with salt.utils.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
fp_.read(),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
|
|
|
@ -306,9 +306,8 @@ class TestGrepOption(TestCase):
|
|||
|
||||
def test_grep_option_match_regular_file(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
option = salt.utils.find.GrepOption('grep', 'foo')
|
||||
self.assertEqual(
|
||||
option.match(self.tmpdir, 'hello.txt', os.stat(hello_file)),
|
||||
|
@ -366,9 +365,8 @@ class TestPrintOption(TestCase):
|
|||
|
||||
def test_print_option_execute(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
|
||||
option = salt.utils.find.PrintOption('print', '')
|
||||
self.assertEqual(option.execute('', [0] * 9), '')
|
||||
|
@ -556,9 +554,8 @@ class TestFinder(TestCase):
|
|||
|
||||
def test_find(self):
|
||||
hello_file = os.path.join(self.tmpdir, 'hello.txt')
|
||||
fd = salt.utils.fopen(hello_file, 'w')
|
||||
fd.write("foo")
|
||||
fd.close()
|
||||
with salt.utils.fopen(hello_file, 'w') as fp_:
|
||||
fp_.write('foo')
|
||||
|
||||
finder = salt.utils.find.Finder({'name': 'test_name'})
|
||||
self.assertEqual(list(finder.find('')), [])
|
||||
|
|
|
@ -23,7 +23,8 @@ from salttesting.helpers import ensure_in_syspath
|
|||
ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils import fopen, is_darwin, vt
|
||||
import salt.utils
|
||||
import salt.utils.vt
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
@ -37,7 +38,7 @@ class VTTestCase(TestCase):
|
|||
if not sys.stdin.isatty():
|
||||
self.skipTest('Not attached to a TTY. The test would fail.')
|
||||
cols = random.choice(range(80, 250))
|
||||
terminal = vt.Terminal(
|
||||
terminal = salt.utils.vt.Terminal(
|
||||
'echo "Foo!"',
|
||||
shell=True,
|
||||
cols=cols,
|
||||
|
@ -61,7 +62,7 @@ class VTTestCase(TestCase):
|
|||
# Get current number of PTY's
|
||||
try:
|
||||
if os.path.exists('/proc/sys/kernel/pty/nr'):
|
||||
with fopen('/proc/sys/kernel/pty/nr') as fh_:
|
||||
with salt.utils.fopen('/proc/sys/kernel/pty/nr') as fh_:
|
||||
return int(fh_.read().strip())
|
||||
|
||||
proc = subprocess.Popen(
|
||||
|
@ -72,7 +73,7 @@ class VTTestCase(TestCase):
|
|||
stdout, _ = proc.communicate()
|
||||
return int(stdout.strip())
|
||||
except (ValueError, OSError, IOError):
|
||||
if is_darwin():
|
||||
if salt.utils.is_darwin():
|
||||
# We're unable to findout how many PTY's are open
|
||||
self.skipTest(
|
||||
'Unable to find out how many PTY\'s are open on Darwin - '
|
||||
|
@ -85,7 +86,7 @@ class VTTestCase(TestCase):
|
|||
# Using context manager's
|
||||
for idx in range(0, nr_ptys + n_executions):
|
||||
try:
|
||||
with vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
with salt.utils.vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False) as terminal:
|
||||
|
@ -105,7 +106,7 @@ class VTTestCase(TestCase):
|
|||
# Not using context manager's
|
||||
for idx in range(0, nr_ptys + n_executions):
|
||||
try:
|
||||
terminal = vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
terminal = salt.utils.vt.Terminal('echo "Run {0}"'.format(idx),
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
|
@ -125,7 +126,10 @@ class VTTestCase(TestCase):
|
|||
@skipIf(True, 'Disabled until we can figure out how to make this more reliable.')
|
||||
def test_isalive_while_theres_data_to_read(self):
|
||||
expected_data = 'Alive!\n'
|
||||
term = vt.Terminal('echo "Alive!"', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!"',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -150,7 +154,10 @@ class VTTestCase(TestCase):
|
|||
term.close(terminate=True, kill=True)
|
||||
|
||||
expected_data = 'Alive!\n'
|
||||
term = vt.Terminal('echo "Alive!" 1>&2', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!" 1>&2',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -175,7 +182,10 @@ class VTTestCase(TestCase):
|
|||
term.close(terminate=True, kill=True)
|
||||
|
||||
expected_data = 'Alive!\nAlive!\n'
|
||||
term = vt.Terminal('echo "Alive!"; sleep 5; echo "Alive!"', shell=True, stream_stdout=False, stream_stderr=False)
|
||||
term = salt.utils.vt.Terminal('echo "Alive!"; sleep 5; echo "Alive!"',
|
||||
shell=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
buffer_o = buffer_e = ''
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
|
Loading…
Add table
Reference in a new issue