mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Update all references for moved functions to use "files" util
- fopen - flopen - fpopen - safe_rm - is_empty - is_fcntl_available
This commit is contained in:
parent
fe972f59e7
commit
ccf790a83f
363 changed files with 1777 additions and 1571 deletions
|
@ -371,6 +371,7 @@ on a minion event bus.
|
|||
.. code-block:: python
|
||||
|
||||
import tests.integration as integration
|
||||
import salt.utils.event
|
||||
|
||||
class TestEvent(integration.SaltEventAssertsMixin):
|
||||
'''
|
||||
|
@ -443,7 +444,7 @@ to test states:
|
|||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
HFILE = os.path.join(TMP, 'hosts')
|
||||
|
||||
|
@ -470,7 +471,7 @@ to test states:
|
|||
ip = '10.10.10.10'
|
||||
ret = self.run_state('host.present', name=name, ip=ip)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
with salt.utils.fopen(HFILE) as fp_:
|
||||
with salt.utils.files.fopen(HFILE) as fp_:
|
||||
output = fp_.read()
|
||||
self.assertIn('{0}\t\t{1}'.format(ip, name), output)
|
||||
|
||||
|
|
|
@ -256,7 +256,7 @@ This function will not generally be more complex than:
|
|||
.. code-block:: python
|
||||
|
||||
def hash_file(path, hashobj, conn=None):
|
||||
with salt.utils.fopen(path, 'r') as f:
|
||||
with salt.utils.files.fopen(path, 'r') as f:
|
||||
hashobj.update(f.read())
|
||||
return hashobj.hexdigest()
|
||||
|
||||
|
|
|
@ -229,7 +229,7 @@ class LoadAuth(object):
|
|||
|
||||
try:
|
||||
with salt.utils.files.set_umask(0o177):
|
||||
with salt.utils.fopen(t_path, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(t_path, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(tdata))
|
||||
except (IOError, OSError):
|
||||
log.warning('Authentication failure: can not write token file "{0}".'.format(t_path))
|
||||
|
@ -245,7 +245,7 @@ class LoadAuth(object):
|
|||
if not os.path.isfile(t_path):
|
||||
return {}
|
||||
try:
|
||||
with salt.utils.fopen(t_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(t_path, 'rb') as fp_:
|
||||
tdata = self.serial.loads(fp_.read())
|
||||
except (IOError, OSError):
|
||||
log.warning('Authentication failure: can not read token file "{0}".'.format(t_path))
|
||||
|
@ -670,7 +670,7 @@ class Resolver(object):
|
|||
return tdata
|
||||
try:
|
||||
with salt.utils.files.set_umask(0o177):
|
||||
with salt.utils.fopen(self.opts['token_file'], 'w+') as fp_:
|
||||
with salt.utils.files.fopen(self.opts['token_file'], 'w+') as fp_:
|
||||
fp_.write(tdata['token'])
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
|
|
@ -102,6 +102,7 @@ import os
|
|||
|
||||
# Import salt utils
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -158,7 +159,7 @@ def _text(username, password, **kwargs):
|
|||
username_field = kwargs['username_field']-1
|
||||
password_field = kwargs['password_field']-1
|
||||
|
||||
with salt.utils.fopen(filename, 'r') as pwfile:
|
||||
with salt.utils.files.fopen(filename, 'r') as pwfile:
|
||||
for line in pwfile.readlines():
|
||||
fields = line.strip().split(field_separator)
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ except ImportError:
|
|||
# pylint: enable=import-error
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -74,7 +74,7 @@ def auth(username, password, **kwargs):
|
|||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem)
|
||||
|
||||
cacert_file = __salt__['config.get']('external_auth:pki:ca_file')
|
||||
with salt.utils.fopen(cacert_file) as f:
|
||||
with salt.utils.files.fopen(cacert_file) as f:
|
||||
cacert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, f.read())
|
||||
|
||||
log.debug('Attempting to authenticate via pki.')
|
||||
|
|
|
@ -14,7 +14,7 @@ import os
|
|||
import struct
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
__virtualname__ = 'btmp'
|
||||
BTMP = '/var/log/btmp'
|
||||
|
@ -71,7 +71,7 @@ def beacon(config):
|
|||
btmp: {}
|
||||
'''
|
||||
ret = []
|
||||
with salt.utils.fopen(BTMP, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(BTMP, 'rb') as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
|
|
@ -12,6 +12,7 @@ import logging
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
|
||||
try:
|
||||
|
@ -79,7 +80,7 @@ def beacon(config):
|
|||
ret.append(event)
|
||||
return ret
|
||||
|
||||
with salt.utils.fopen(config['file'], 'r') as fp_:
|
||||
with salt.utils.files.fopen(config['file'], 'r') as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
|
|
@ -14,7 +14,7 @@ import os
|
|||
import struct
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
__virtualname__ = 'wtmp'
|
||||
WTMP = '/var/log/wtmp'
|
||||
|
@ -73,7 +73,7 @@ def beacon(config):
|
|||
wtmp: {}
|
||||
'''
|
||||
ret = []
|
||||
with salt.utils.fopen(WTMP, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(WTMP, 'rb') as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
|
5
salt/cache/localfs.py
vendored
5
salt/cache/localfs.py
vendored
|
@ -20,6 +20,7 @@ import tempfile
|
|||
from salt.exceptions import SaltCacheError
|
||||
import salt.utils
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -58,7 +59,7 @@ def store(bank, key, data, cachedir):
|
|||
tmpfh, tmpfname = tempfile.mkstemp(dir=base)
|
||||
os.close(tmpfh)
|
||||
try:
|
||||
with salt.utils.fopen(tmpfname, 'w+b') as fh_:
|
||||
with salt.utils.files.fopen(tmpfname, 'w+b') as fh_:
|
||||
fh_.write(__context__['serial'].dumps(data))
|
||||
# On Windows, os.rename will fail if the destination file exists.
|
||||
salt.utils.atomicfile.atomic_rename(tmpfname, outfile)
|
||||
|
@ -85,7 +86,7 @@ def fetch(bank, key, cachedir):
|
|||
log.debug('Cache file "%s" does not exist', key_file)
|
||||
return {}
|
||||
try:
|
||||
with salt.utils.fopen(key_file, 'rb') as fh_:
|
||||
with salt.utils.files.fopen(key_file, 'rb') as fh_:
|
||||
if inkey:
|
||||
return __context__['serial'].load(fh_)[key]
|
||||
else:
|
||||
|
|
|
@ -21,6 +21,7 @@ import salt.output
|
|||
import salt.payload
|
||||
import salt.transport
|
||||
import salt.utils.args
|
||||
import salt.utils.files
|
||||
import salt.utils.jid
|
||||
import salt.utils.minion
|
||||
import salt.defaults.exitcodes
|
||||
|
@ -189,7 +190,7 @@ class BaseCaller(object):
|
|||
no_parse=self.opts.get('no_parse', [])),
|
||||
data=sdata)
|
||||
try:
|
||||
with salt.utils.fopen(proc_fn, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(proc_fn, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(sdata))
|
||||
except NameError:
|
||||
# Don't require msgpack with local
|
||||
|
|
|
@ -75,8 +75,9 @@ class SaltCMD(parsers.SaltCMDOptionParser):
|
|||
'show_jid': self.options.show_jid}
|
||||
|
||||
if 'token' in self.config:
|
||||
import salt.utils.files
|
||||
try:
|
||||
with salt.utils.fopen(os.path.join(self.config['cachedir'], '.root_key'), 'r') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(self.config['cachedir'], '.root_key'), 'r') as fp_:
|
||||
kwargs['key'] = fp_.readline()
|
||||
except IOError:
|
||||
kwargs['token'] = self.config['token']
|
||||
|
|
|
@ -35,6 +35,7 @@ import salt.loader
|
|||
import salt.utils
|
||||
import salt.utils.args
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.minions
|
||||
import salt.utils.verify
|
||||
import salt.utils.jid
|
||||
|
@ -193,7 +194,7 @@ class LocalClient(object):
|
|||
self.skip_perm_errors)
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(keyfile, 'r') as key:
|
||||
with salt.utils.files.fopen(keyfile, 'r') as key:
|
||||
return key.read()
|
||||
except (OSError, IOError):
|
||||
# Fall back to eauth
|
||||
|
|
|
@ -39,12 +39,13 @@ import salt.serializers.yaml
|
|||
import salt.state
|
||||
import salt.utils
|
||||
import salt.utils.args
|
||||
import salt.utils.event
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.network
|
||||
import salt.utils.thin
|
||||
import salt.utils.url
|
||||
import salt.utils.verify
|
||||
import salt.utils.network
|
||||
from salt.utils import is_windows
|
||||
from salt.utils.process import MultiprocessingProcess
|
||||
|
||||
|
@ -195,7 +196,7 @@ if not is_windows():
|
|||
if not os.path.exists(shim_file):
|
||||
# On esky builds we only have the .pyc file
|
||||
shim_file += "c"
|
||||
with salt.utils.fopen(shim_file) as ssh_py_shim:
|
||||
with salt.utils.files.fopen(shim_file) as ssh_py_shim:
|
||||
SSH_PY_SHIM = ssh_py_shim.read()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -337,7 +338,7 @@ class SSH(object):
|
|||
)
|
||||
)
|
||||
pub = '{0}.pub'.format(priv)
|
||||
with salt.utils.fopen(pub, 'r') as fp_:
|
||||
with salt.utils.files.fopen(pub, 'r') as fp_:
|
||||
return '{0} rsa root@master'.format(fp_.read().split()[1])
|
||||
|
||||
def key_deploy(self, host, ret):
|
||||
|
@ -941,12 +942,12 @@ class Single(object):
|
|||
'grains': opts_pkg['grains'],
|
||||
'pillar': pillar_data}
|
||||
if data_cache:
|
||||
with salt.utils.fopen(datap, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(datap, 'w+b') as fp_:
|
||||
fp_.write(
|
||||
self.serial.dumps(data)
|
||||
)
|
||||
if not data and data_cache:
|
||||
with salt.utils.fopen(datap, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(datap, 'rb') as fp_:
|
||||
data = self.serial.load(fp_)
|
||||
opts = data.get('opts', {})
|
||||
opts['grains'] = data.get('grains')
|
||||
|
@ -1412,7 +1413,7 @@ def mod_data(fsclient):
|
|||
return mods
|
||||
tfp = tarfile.open(ext_tar_path, 'w:gz')
|
||||
verfile = os.path.join(fsclient.opts['cachedir'], 'ext_mods.ver')
|
||||
with salt.utils.fopen(verfile, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(verfile, 'w+') as fp_:
|
||||
fp_.write(ver)
|
||||
tfp.add(verfile, 'ext_version')
|
||||
for ref in ret:
|
||||
|
|
|
@ -15,10 +15,10 @@ from contextlib import closing
|
|||
# Import salt libs
|
||||
import salt.client.ssh.shell
|
||||
import salt.client.ssh
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.thin
|
||||
import salt.utils.url
|
||||
import salt.utils.verify
|
||||
import salt.roster
|
||||
import salt.state
|
||||
import salt.loader
|
||||
|
@ -177,13 +177,13 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
[salt.utils.url.create('_output')],
|
||||
[salt.utils.url.create('_utils')],
|
||||
]
|
||||
with salt.utils.fopen(lowfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(lowfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(chunks))
|
||||
if pillar:
|
||||
with salt.utils.fopen(pillarfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(pillarfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(pillar))
|
||||
if roster_grains:
|
||||
with salt.utils.fopen(roster_grainsfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(roster_grainsfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(roster_grains))
|
||||
|
||||
if id_ is None:
|
||||
|
|
|
@ -2,13 +2,15 @@
|
|||
'''
|
||||
Wrap the cp module allowing for managed ssh file transfers
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh
|
||||
import salt.utils.files
|
||||
import logging
|
||||
import os
|
||||
import salt.utils.templates
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -138,14 +140,14 @@ def _render_filenames(path, dest, saltenv, template):
|
|||
'''
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
**kwargs
|
||||
)
|
||||
salt.utils.safe_rm(tmp_path_fn)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data['result']:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
|
|
|
@ -1396,7 +1396,7 @@ class Cloud(object):
|
|||
vm_overrides = {}
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(self.opts['conf_file'], 'r') as mcc:
|
||||
with salt.utils.files.fopen(self.opts['conf_file'], 'r') as mcc:
|
||||
main_cloud_config = yaml.safe_load(mcc)
|
||||
if not main_cloud_config:
|
||||
main_cloud_config = {}
|
||||
|
@ -2106,7 +2106,7 @@ class Map(Cloud):
|
|||
# Generate the fingerprint of the master pubkey in order to
|
||||
# mitigate man-in-the-middle attacks
|
||||
master_temp_pub = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(master_temp_pub, 'w') as mtp:
|
||||
with salt.utils.files.fopen(master_temp_pub, 'w') as mtp:
|
||||
mtp.write(pub)
|
||||
master_finger = salt.utils.pem_finger(master_temp_pub, sum_type=self.opts['hash_type'])
|
||||
os.unlink(master_temp_pub)
|
||||
|
|
|
@ -64,6 +64,7 @@ import salt.cache
|
|||
import salt.config as config
|
||||
import salt.utils
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.ext.six as six
|
||||
import salt.version
|
||||
from salt.exceptions import (
|
||||
|
@ -1002,7 +1003,7 @@ def request_instance(call=None, kwargs=None): # pylint: disable=unused-argument
|
|||
)
|
||||
else:
|
||||
if os.path.exists(userdata_file):
|
||||
with salt.utils.fopen(userdata_file, 'r') as fh_:
|
||||
with salt.utils.files.fopen(userdata_file, 'r') as fh_:
|
||||
userdata = fh_.read()
|
||||
|
||||
userdata = salt.utils.cloud.userdata_template(__opts__, vm_, userdata)
|
||||
|
|
|
@ -36,6 +36,7 @@ import decimal
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.config as config
|
||||
from salt.exceptions import (
|
||||
SaltCloudConfigError,
|
||||
|
@ -383,7 +384,7 @@ def create(vm_):
|
|||
)
|
||||
if userdata_file is not None:
|
||||
try:
|
||||
with salt.utils.fopen(userdata_file, 'r') as fp_:
|
||||
with salt.utils.files.fopen(userdata_file, 'r') as fp_:
|
||||
kwargs['user_data'] = salt.utils.cloud.userdata_template(
|
||||
__opts__, vm_, fp_.read()
|
||||
)
|
||||
|
@ -713,7 +714,7 @@ def import_keypair(kwargs=None, call=None):
|
|||
file(mandatory): public key file-name
|
||||
keyname(mandatory): public key name in the provider
|
||||
'''
|
||||
with salt.utils.fopen(kwargs['file'], 'r') as public_key_filename:
|
||||
with salt.utils.files.fopen(kwargs['file'], 'r') as public_key_filename:
|
||||
public_key_content = public_key_filename.read()
|
||||
|
||||
digital_ocean_kwargs = {
|
||||
|
|
|
@ -89,14 +89,12 @@ import re
|
|||
import decimal
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.utils.hashutils
|
||||
from salt._compat import ElementTree as ET
|
||||
import salt.utils.http as http
|
||||
import salt.utils.aws as aws
|
||||
|
||||
# Import salt.cloud libs
|
||||
import salt.utils.cloud
|
||||
import salt.config as config
|
||||
from salt.exceptions import (
|
||||
SaltCloudException,
|
||||
|
@ -1787,7 +1785,7 @@ def request_instance(vm_=None, call=None):
|
|||
else:
|
||||
log.trace('userdata_file: {0}'.format(userdata_file))
|
||||
if os.path.exists(userdata_file):
|
||||
with salt.utils.fopen(userdata_file, 'r') as fh_:
|
||||
with salt.utils.files.fopen(userdata_file, 'r') as fh_:
|
||||
userdata = fh_.read()
|
||||
|
||||
userdata = salt.utils.cloud.userdata_template(__opts__, vm_, userdata)
|
||||
|
@ -2444,7 +2442,7 @@ def wait_for_instance(
|
|||
continue
|
||||
keys += '\n{0} {1}'.format(ip_address, line)
|
||||
|
||||
with salt.utils.fopen(known_hosts_file, 'a') as fp_:
|
||||
with salt.utils.files.fopen(known_hosts_file, 'a') as fp_:
|
||||
fp_.write(keys)
|
||||
fp_.close()
|
||||
|
||||
|
@ -4389,7 +4387,7 @@ def import_keypair(kwargs=None, call=None):
|
|||
public_key_file = kwargs['file']
|
||||
|
||||
if os.path.exists(public_key_file):
|
||||
with salt.utils.fopen(public_key_file, 'r') as fh_:
|
||||
with salt.utils.files.fopen(public_key_file, 'r') as fh_:
|
||||
public_key = fh_.read()
|
||||
|
||||
if public_key is not None:
|
||||
|
@ -4778,7 +4776,7 @@ def get_password_data(
|
|||
|
||||
if 'key' not in kwargs:
|
||||
if 'key_file' in kwargs:
|
||||
with salt.utils.fopen(kwargs['key_file'], 'r') as kf_:
|
||||
with salt.utils.files.fopen(kwargs['key_file'], 'r') as kf_:
|
||||
kwargs['key'] = kf_.read()
|
||||
|
||||
if 'key' in kwargs:
|
||||
|
@ -4878,7 +4876,7 @@ def _parse_pricing(url, name):
|
|||
outfile = os.path.join(
|
||||
__opts__['cachedir'], 'ec2-pricing-{0}.p'.format(name)
|
||||
)
|
||||
with salt.utils.fopen(outfile, 'w') as fho:
|
||||
with salt.utils.files.fopen(outfile, 'w') as fho:
|
||||
msgpack.dump(regions, fho)
|
||||
|
||||
return True
|
||||
|
@ -4946,7 +4944,7 @@ def show_pricing(kwargs=None, call=None):
|
|||
if not os.path.isfile(pricefile):
|
||||
update_pricing({'type': name}, 'function')
|
||||
|
||||
with salt.utils.fopen(pricefile, 'r') as fhi:
|
||||
with salt.utils.files.fopen(pricefile, 'r') as fhi:
|
||||
ec2_price = msgpack.load(fhi)
|
||||
|
||||
region = get_location(profile)
|
||||
|
|
|
@ -85,6 +85,7 @@ except ImportError:
|
|||
from salt.utils import namespaced_function
|
||||
import salt.ext.six as six
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.config as config
|
||||
from salt.utils import http
|
||||
from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
|
||||
|
@ -2619,7 +2620,7 @@ def update_pricing(kwargs=None, call=None):
|
|||
outfile = os.path.join(
|
||||
__opts__['cachedir'], 'gce-pricing.p'
|
||||
)
|
||||
with salt.utils.fopen(outfile, 'w') as fho:
|
||||
with salt.utils.files.fopen(outfile, 'w') as fho:
|
||||
msgpack.dump(price_json['dict'], fho)
|
||||
|
||||
return True
|
||||
|
@ -2658,7 +2659,7 @@ def show_pricing(kwargs=None, call=None):
|
|||
if not os.path.exists(pricefile):
|
||||
update_pricing()
|
||||
|
||||
with salt.utils.fopen(pricefile, 'r') as fho:
|
||||
with salt.utils.files.fopen(pricefile, 'r') as fho:
|
||||
sizes = msgpack.load(fho)
|
||||
|
||||
per_hour = float(sizes['gcp_price_list'][size][region])
|
||||
|
|
|
@ -67,10 +67,10 @@ from Crypto.Signature import PKCS1_v1_5
|
|||
# Import salt libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import http_client # pylint: disable=import-error,no-name-in-module
|
||||
import salt.utils.http
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.utils.http
|
||||
import salt.config as config
|
||||
from salt.utils.cloud import is_public_ip
|
||||
from salt.cloud.libcloudfuncs import node_state
|
||||
from salt.exceptions import (
|
||||
SaltCloudSystemExit,
|
||||
|
@ -693,7 +693,7 @@ def reformat_node(item=None, full=False):
|
|||
item['public_ips'] = []
|
||||
if 'ips' in item:
|
||||
for ip in item['ips']:
|
||||
if is_public_ip(ip):
|
||||
if salt.utils.cloud.is_public_ip(ip):
|
||||
item['public_ips'].append(ip)
|
||||
else:
|
||||
item['private_ips'].append(ip)
|
||||
|
@ -951,7 +951,7 @@ def import_key(kwargs=None, call=None):
|
|||
))
|
||||
return False
|
||||
|
||||
with salt.utils.fopen(kwargs['keyfile'], 'r') as fp_:
|
||||
with salt.utils.files.fopen(kwargs['keyfile'], 'r') as fp_:
|
||||
kwargs['key'] = fp_.read()
|
||||
|
||||
send_data = {'name': kwargs['keyname'], 'key': kwargs['key']}
|
||||
|
@ -1070,7 +1070,7 @@ def query(action=None,
|
|||
|
||||
timenow = datetime.datetime.utcnow()
|
||||
timestamp = timenow.strftime('%a, %d %b %Y %H:%M:%S %Z').strip()
|
||||
with salt.utils.fopen(ssh_keyfile, 'r') as kh_:
|
||||
with salt.utils.files.fopen(ssh_keyfile, 'r') as kh_:
|
||||
rsa_key = RSA.importKey(kh_)
|
||||
rsa_ = PKCS1_v1_5.new(rsa_key)
|
||||
hash_ = SHA256.new()
|
||||
|
|
|
@ -210,7 +210,9 @@ import yaml
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.utils.pycrypto
|
||||
import salt.client
|
||||
from salt.utils.openstack import nova
|
||||
try:
|
||||
|
@ -220,8 +222,6 @@ except ImportError as exc:
|
|||
|
||||
# Import Salt Cloud Libs
|
||||
from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401
|
||||
import salt.utils.cloud
|
||||
import salt.utils.pycrypto as sup
|
||||
import salt.config as config
|
||||
from salt.utils import namespaced_function
|
||||
from salt.exceptions import (
|
||||
|
@ -651,7 +651,7 @@ def request_instance(vm_=None, call=None):
|
|||
kwargs['files'] = {}
|
||||
for src_path in files:
|
||||
if os.path.exists(files[src_path]):
|
||||
with salt.utils.fopen(files[src_path], 'r') as fp_:
|
||||
with salt.utils.files.fopen(files[src_path], 'r') as fp_:
|
||||
kwargs['files'][src_path] = fp_.read()
|
||||
else:
|
||||
kwargs['files'][src_path] = files[src_path]
|
||||
|
@ -661,7 +661,7 @@ def request_instance(vm_=None, call=None):
|
|||
)
|
||||
if userdata_file is not None:
|
||||
try:
|
||||
with salt.utils.fopen(userdata_file, 'r') as fp_:
|
||||
with salt.utils.files.fopen(userdata_file, 'r') as fp_:
|
||||
kwargs['userdata'] = salt.utils.cloud.userdata_template(
|
||||
__opts__, vm_, fp_.read()
|
||||
)
|
||||
|
@ -981,7 +981,7 @@ def create(vm_):
|
|||
)
|
||||
data = conn.server_show_libcloud(vm_['instance_id'])
|
||||
if vm_['key_filename'] is None and 'change_password' in __opts__ and __opts__['change_password'] is True:
|
||||
vm_['password'] = sup.secure_password()
|
||||
vm_['password'] = salt.utils.pycrypto.secure_password()
|
||||
conn.root_password(vm_['instance_id'], vm_['password'])
|
||||
else:
|
||||
# Put together all of the information required to request the instance,
|
||||
|
|
|
@ -77,6 +77,7 @@ from salt.exceptions import (
|
|||
SaltCloudSystemExit
|
||||
)
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Import Third Party Libs
|
||||
try:
|
||||
|
@ -1310,7 +1311,7 @@ def image_allocate(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -1876,7 +1877,7 @@ def image_update(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -1969,7 +1970,7 @@ def secgroup_allocate(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2269,7 +2270,7 @@ def secgroup_update(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2335,7 +2336,7 @@ def template_allocate(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2650,7 +2651,7 @@ def template_update(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2783,7 +2784,7 @@ def vm_allocate(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2849,7 +2850,7 @@ def vm_attach(name, kwargs=None, call=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -2916,7 +2917,7 @@ def vm_attach_nic(name, kwargs=None, call=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -3603,7 +3604,7 @@ def vm_resize(name, kwargs=None, call=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -3831,7 +3832,7 @@ def vm_update(name, kwargs=None, call=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -3919,7 +3920,7 @@ def vn_add_ar(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -3992,7 +3993,7 @@ def vn_allocate(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -4217,7 +4218,7 @@ def vn_hold(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -4362,7 +4363,7 @@ def vn_release(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
@ -4449,7 +4450,7 @@ def vn_reserve(call=None, kwargs=None):
|
|||
'\'data\' will take precedence.'
|
||||
)
|
||||
elif path:
|
||||
with salt.utils.fopen(path, mode='r') as rfh:
|
||||
with salt.utils.files.fopen(path, mode='r') as rfh:
|
||||
data = rfh.read()
|
||||
else:
|
||||
raise SaltCloudSystemExit(
|
||||
|
|
|
@ -175,10 +175,9 @@ from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
||||
# Import salt.cloud libs
|
||||
import salt.utils.cloud
|
||||
import salt.utils.pycrypto as sup
|
||||
import salt.utils.files
|
||||
import salt.utils.pycrypto
|
||||
import salt.config as config
|
||||
from salt.utils import namespaced_function
|
||||
from salt.exceptions import (
|
||||
|
@ -529,7 +528,7 @@ def request_instance(vm_=None, call=None):
|
|||
if files:
|
||||
kwargs['ex_files'] = {}
|
||||
for src_path in files:
|
||||
with salt.utils.fopen(files[src_path], 'r') as fp_:
|
||||
with salt.utils.files.fopen(files[src_path], 'r') as fp_:
|
||||
kwargs['ex_files'][src_path] = fp_.read()
|
||||
|
||||
userdata_file = config.get_cloud_config_value(
|
||||
|
@ -537,7 +536,7 @@ def request_instance(vm_=None, call=None):
|
|||
)
|
||||
if userdata_file is not None:
|
||||
try:
|
||||
with salt.utils.fopen(userdata_file, 'r') as fp_:
|
||||
with salt.utils.files.fopen(userdata_file, 'r') as fp_:
|
||||
kwargs['ex_userdata'] = salt.utils.cloud.userdata_template(
|
||||
__opts__, vm_, fp_.read()
|
||||
)
|
||||
|
@ -761,7 +760,7 @@ def create(vm_):
|
|||
)
|
||||
data = conn.ex_get_node_details(vm_['instance_id'])
|
||||
if vm_['key_filename'] is None and 'change_password' in __opts__ and __opts__['change_password'] is True:
|
||||
vm_['password'] = sup.secure_password()
|
||||
vm_['password'] = salt.utils.pycrypto.secure_password()
|
||||
conn.ex_set_password(data, vm_['password'])
|
||||
networks(vm_)
|
||||
else:
|
||||
|
|
|
@ -98,7 +98,8 @@ import pprint
|
|||
import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.cloud
|
||||
import salt.utils.files
|
||||
import salt.config as config
|
||||
from salt.exceptions import (
|
||||
SaltCloudConfigError,
|
||||
|
@ -108,9 +109,6 @@ from salt.exceptions import (
|
|||
SaltCloudSystemExit
|
||||
)
|
||||
|
||||
# Import salt.cloud libs
|
||||
import salt.utils.cloud
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
try:
|
||||
|
@ -642,7 +640,7 @@ def get_public_keys(vm_):
|
|||
)
|
||||
)
|
||||
ssh_keys = []
|
||||
with salt.utils.fopen(key_filename) as rfh:
|
||||
with salt.utils.files.fopen(key_filename) as rfh:
|
||||
for key in rfh.readlines():
|
||||
ssh_keys.append(key)
|
||||
|
||||
|
|
|
@ -33,6 +33,7 @@ from salt.ext.six.moves.urllib.parse import urlparse
|
|||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.dictupdate
|
||||
import salt.utils.files
|
||||
import salt.utils.network
|
||||
import salt.syspaths
|
||||
import salt.utils.validate.path
|
||||
|
@ -1932,7 +1933,7 @@ def _read_conf_file(path):
|
|||
Read in a config file from a given path and process it into a dictionary
|
||||
'''
|
||||
log.debug('Reading configuration from {0}'.format(path))
|
||||
with salt.utils.fopen(path, 'r') as conf_file:
|
||||
with salt.utils.files.fopen(path, 'r') as conf_file:
|
||||
try:
|
||||
conf_opts = yaml.safe_load(conf_file.read()) or {}
|
||||
except yaml.YAMLError as err:
|
||||
|
@ -2024,8 +2025,8 @@ def load_config(path, env_var, default_path=None, exit_on_config_errors=True):
|
|||
template = '{0}.template'.format(path)
|
||||
if os.path.isfile(template):
|
||||
log.debug('Writing {0} based on {1}'.format(path, template))
|
||||
with salt.utils.fopen(path, 'w') as out:
|
||||
with salt.utils.fopen(template, 'r') as ifile:
|
||||
with salt.utils.files.fopen(path, 'w') as out:
|
||||
with salt.utils.files.fopen(template, 'r') as ifile:
|
||||
ifile.readline() # skip first line
|
||||
out.write(ifile.read())
|
||||
|
||||
|
@ -3313,7 +3314,7 @@ def _cache_id(minion_id, cache_file):
|
|||
Helper function, writes minion id to a cache file.
|
||||
'''
|
||||
try:
|
||||
with salt.utils.fopen(cache_file, 'w') as idf:
|
||||
with salt.utils.files.fopen(cache_file, 'w') as idf:
|
||||
idf.write(minion_id)
|
||||
except (IOError, OSError) as exc:
|
||||
log.error('Could not cache minion ID: {0}'.format(exc))
|
||||
|
@ -3346,7 +3347,7 @@ def get_id(opts, cache_minion_id=False):
|
|||
|
||||
if opts.get('minion_id_caching', True):
|
||||
try:
|
||||
with salt.utils.fopen(id_cache) as idf:
|
||||
with salt.utils.files.fopen(id_cache) as idf:
|
||||
name = idf.readline().strip()
|
||||
bname = salt.utils.to_bytes(name)
|
||||
if bname.startswith(codecs.BOM): # Remove BOM if exists
|
||||
|
@ -3748,7 +3749,7 @@ def client_config(path, env_var='SALT_CLIENT_CONFIG', defaults=None):
|
|||
# Make sure token is still valid
|
||||
expire = opts.get('token_expire', 43200)
|
||||
if os.stat(opts['token_file']).st_mtime + expire > time.mktime(time.localtime()):
|
||||
with salt.utils.fopen(opts['token_file']) as fp_:
|
||||
with salt.utils.files.fopen(opts['token_file']) as fp_:
|
||||
opts['token'] = fp_.read().strip()
|
||||
# On some platforms, like OpenBSD, 0.0.0.0 won't catch a master running on localhost
|
||||
if opts['interface'] == '0.0.0.0':
|
||||
|
|
|
@ -21,7 +21,6 @@ import weakref
|
|||
import getpass
|
||||
|
||||
# Import third party libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import zip # pylint: disable=import-error,redefined-builtin
|
||||
try:
|
||||
from Cryptodome.Cipher import AES, PKCS1_OAEP
|
||||
|
@ -45,9 +44,12 @@ if not CDOME:
|
|||
pass
|
||||
|
||||
# Import salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.defaults.exitcodes
|
||||
import salt.utils
|
||||
import salt.utils.decorators
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.payload
|
||||
import salt.transport.client
|
||||
import salt.transport.frame
|
||||
|
@ -78,7 +80,7 @@ def dropfile(cachedir, user=None):
|
|||
|
||||
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
|
||||
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
|
||||
with salt.utils.fopen(dfn, 'wb+') as fp_:
|
||||
with salt.utils.files.fopen(dfn, 'wb+') as fp_:
|
||||
fp_.write(b'')
|
||||
os.chmod(dfn, stat.S_IRUSR)
|
||||
if user:
|
||||
|
@ -120,10 +122,10 @@ def gen_keys(keydir, keyname, keysize, user=None):
|
|||
raise IOError('Write access denied to "{0}" for user "{1}".'.format(os.path.abspath(keydir), getpass.getuser()))
|
||||
|
||||
cumask = os.umask(191)
|
||||
with salt.utils.fopen(priv, 'wb+') as f:
|
||||
with salt.utils.files.fopen(priv, 'wb+') as f:
|
||||
f.write(gen.exportKey('PEM'))
|
||||
os.umask(cumask)
|
||||
with salt.utils.fopen(pub, 'wb+') as f:
|
||||
with salt.utils.files.fopen(pub, 'wb+') as f:
|
||||
f.write(gen.publickey().exportKey('PEM'))
|
||||
os.chmod(priv, 256)
|
||||
if user:
|
||||
|
@ -149,7 +151,7 @@ def _get_key_with_evict(path, timestamp):
|
|||
then the params are different and the key is loaded from disk.
|
||||
'''
|
||||
log.debug('salt.crypt._get_key_with_evict: Loading private key')
|
||||
with salt.utils.fopen(path) as f:
|
||||
with salt.utils.files.fopen(path) as f:
|
||||
key = RSA.importKey(f.read())
|
||||
return key
|
||||
|
||||
|
@ -185,7 +187,7 @@ def verify_signature(pubkey_path, message, signature):
|
|||
Returns True for valid signature.
|
||||
'''
|
||||
log.debug('salt.crypt.verify_signature: Loading public key')
|
||||
with salt.utils.fopen(pubkey_path) as f:
|
||||
with salt.utils.files.fopen(pubkey_path) as f:
|
||||
pubkey = RSA.importKey(f.read())
|
||||
log.debug('salt.crypt.verify_signature: Verifying signature')
|
||||
verifier = PKCS1_v1_5.new(pubkey)
|
||||
|
@ -198,7 +200,7 @@ def gen_signature(priv_path, pub_path, sign_path):
|
|||
the given private key and writes it to sign_path
|
||||
'''
|
||||
|
||||
with salt.utils.fopen(pub_path) as fp_:
|
||||
with salt.utils.files.fopen(pub_path) as fp_:
|
||||
mpub_64 = fp_.read()
|
||||
|
||||
mpub_sig = sign_message(priv_path, mpub_64)
|
||||
|
@ -213,7 +215,7 @@ def gen_signature(priv_path, pub_path, sign_path):
|
|||
log.trace('Signature file {0} already exists, please '
|
||||
'remove it first and try again'.format(sign_path))
|
||||
else:
|
||||
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
|
||||
with salt.utils.files.fopen(sign_path, 'wb+') as sig_f:
|
||||
sig_f.write(salt.utils.to_bytes(mpub_sig_64))
|
||||
log.trace('Wrote signature to {0}'.format(sign_path))
|
||||
return True
|
||||
|
@ -270,7 +272,7 @@ class MasterKeys(dict):
|
|||
self.sig_path = os.path.join(self.opts['pki_dir'],
|
||||
opts['master_pubkey_signature'])
|
||||
if os.path.isfile(self.sig_path):
|
||||
with salt.utils.fopen(self.sig_path) as fp_:
|
||||
with salt.utils.files.fopen(self.sig_path) as fp_:
|
||||
self.pub_signature = fp_.read()
|
||||
log.info('Read {0}\'s signature from {1}'
|
||||
''.format(os.path.basename(self.pub_path),
|
||||
|
@ -310,7 +312,7 @@ class MasterKeys(dict):
|
|||
path = os.path.join(self.opts['pki_dir'],
|
||||
name + '.pem')
|
||||
if os.path.exists(path):
|
||||
with salt.utils.fopen(path) as f:
|
||||
with salt.utils.files.fopen(path) as f:
|
||||
key = RSA.importKey(f.read())
|
||||
log.debug('Loaded {0} key: {1}'.format(name, path))
|
||||
else:
|
||||
|
@ -319,7 +321,7 @@ class MasterKeys(dict):
|
|||
name,
|
||||
self.opts['keysize'],
|
||||
self.opts.get('user'))
|
||||
with salt.utils.fopen(self.rsa_path) as f:
|
||||
with salt.utils.files.fopen(self.rsa_path) as f:
|
||||
key = RSA.importKey(f.read())
|
||||
return key
|
||||
|
||||
|
@ -332,9 +334,9 @@ class MasterKeys(dict):
|
|||
name + '.pub')
|
||||
if not os.path.isfile(path):
|
||||
key = self.__get_keys()
|
||||
with salt.utils.fopen(path, 'wb+') as wfh:
|
||||
with salt.utils.files.fopen(path, 'wb+') as wfh:
|
||||
wfh.write(key.publickey().exportKey('PEM'))
|
||||
with salt.utils.fopen(path) as rfh:
|
||||
with salt.utils.files.fopen(path) as rfh:
|
||||
return rfh.read()
|
||||
|
||||
def get_mkey_paths(self):
|
||||
|
@ -674,7 +676,7 @@ class AsyncAuth(object):
|
|||
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
|
||||
|
||||
if os.path.exists(self.rsa_path):
|
||||
with salt.utils.fopen(self.rsa_path) as f:
|
||||
with salt.utils.files.fopen(self.rsa_path) as f:
|
||||
key = RSA.importKey(f.read())
|
||||
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
|
||||
else:
|
||||
|
@ -683,7 +685,7 @@ class AsyncAuth(object):
|
|||
'minion',
|
||||
self.opts['keysize'],
|
||||
self.opts.get('user'))
|
||||
with salt.utils.fopen(self.rsa_path) as f:
|
||||
with salt.utils.files.fopen(self.rsa_path) as f:
|
||||
key = RSA.importKey(f.read())
|
||||
return key
|
||||
|
||||
|
@ -712,13 +714,13 @@ class AsyncAuth(object):
|
|||
payload['id'] = self.opts['id']
|
||||
try:
|
||||
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
|
||||
with salt.utils.fopen(pubkey_path) as f:
|
||||
with salt.utils.files.fopen(pubkey_path) as f:
|
||||
pub = RSA.importKey(f.read())
|
||||
cipher = PKCS1_OAEP.new(pub)
|
||||
payload['token'] = cipher.encrypt(self.token)
|
||||
except Exception:
|
||||
pass
|
||||
with salt.utils.fopen(self.pub_path) as f:
|
||||
with salt.utils.files.fopen(self.pub_path) as f:
|
||||
payload['pub'] = f.read()
|
||||
return payload
|
||||
|
||||
|
@ -760,7 +762,7 @@ class AsyncAuth(object):
|
|||
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
|
||||
if os.path.exists(m_path):
|
||||
try:
|
||||
with salt.utils.fopen(m_path) as f:
|
||||
with salt.utils.files.fopen(m_path) as f:
|
||||
mkey = RSA.importKey(f.read())
|
||||
except Exception:
|
||||
return '', ''
|
||||
|
@ -829,7 +831,7 @@ class AsyncAuth(object):
|
|||
'from master {0}'.format(self.opts['master']))
|
||||
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
|
||||
uid = salt.utils.get_uid(self.opts.get('user', None))
|
||||
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
|
||||
with salt.utils.files.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
|
||||
wfh.write(salt.utils.to_bytes(payload['pub_key']))
|
||||
return True
|
||||
else:
|
||||
|
@ -926,7 +928,7 @@ class AsyncAuth(object):
|
|||
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
|
||||
m_pub_exists = os.path.isfile(m_pub_fn)
|
||||
if m_pub_exists and master_pub and not self.opts['open_mode']:
|
||||
with salt.utils.fopen(m_pub_fn) as fp_:
|
||||
with salt.utils.files.fopen(m_pub_fn) as fp_:
|
||||
local_master_pub = fp_.read()
|
||||
|
||||
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
|
||||
|
@ -977,7 +979,7 @@ class AsyncAuth(object):
|
|||
if not m_pub_exists:
|
||||
# the minion has not received any masters pubkey yet, write
|
||||
# the newly received pubkey to minion_master.pub
|
||||
with salt.utils.fopen(m_pub_fn, 'wb+') as fp_:
|
||||
with salt.utils.files.fopen(m_pub_fn, 'wb+') as fp_:
|
||||
fp_.write(salt.utils.to_bytes(payload['pub_key']))
|
||||
return self.extract_aes(payload, master_pub=False)
|
||||
|
||||
|
|
|
@ -19,8 +19,9 @@ import json
|
|||
# Import salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.daemons.masterapi
|
||||
import salt.utils.args
|
||||
import salt.utils
|
||||
import salt.utils.args
|
||||
import salt.utils.files
|
||||
import salt.transport
|
||||
from raet import raeting, nacling
|
||||
from raet.lane.stacking import LaneStack
|
||||
|
@ -279,7 +280,7 @@ class SaltRaetNixJobber(ioflo.base.deeding.Deed):
|
|||
|
||||
sdata = {'pid': os.getpid()}
|
||||
sdata.update(data)
|
||||
with salt.utils.fopen(fn_, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(fn_, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(sdata))
|
||||
ret = {'success': False}
|
||||
function_name = data['fun']
|
||||
|
|
|
@ -31,10 +31,11 @@ import salt.fileserver
|
|||
import salt.utils.args
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.event
|
||||
import salt.utils.verify
|
||||
import salt.utils.minions
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.jid
|
||||
import salt.utils.minions
|
||||
import salt.utils.verify
|
||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||
from salt.pillar import git_pillar
|
||||
from salt.utils.event import tagify
|
||||
|
@ -154,7 +155,7 @@ def clean_expired_tokens(opts):
|
|||
for (dirpath, dirnames, filenames) in os.walk(opts['token_dir']):
|
||||
for token in filenames:
|
||||
token_path = os.path.join(dirpath, token)
|
||||
with salt.utils.fopen(token_path, 'rb') as token_file:
|
||||
with salt.utils.files.fopen(token_path, 'rb') as token_file:
|
||||
try:
|
||||
token_data = serializer.loads(token_file.read())
|
||||
except msgpack.UnpackValueError:
|
||||
|
@ -223,7 +224,7 @@ def mk_key(opts, user):
|
|||
|
||||
key = salt.crypt.Crypticle.generate_key_string()
|
||||
cumask = os.umask(191)
|
||||
with salt.utils.fopen(keyfile, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(keyfile, 'w+') as fp_:
|
||||
fp_.write(key)
|
||||
os.umask(cumask)
|
||||
# 600 octal: Read and write access to the owner only.
|
||||
|
@ -360,7 +361,7 @@ class AutoKey(object):
|
|||
log.warning(message.format(signing_file))
|
||||
return False
|
||||
|
||||
with salt.utils.fopen(signing_file, 'r') as fp_:
|
||||
with salt.utils.files.fopen(signing_file, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
if line.startswith('#'):
|
||||
|
@ -693,7 +694,7 @@ class RemoteFuncs(object):
|
|||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
with salt.utils.fopen(cpath, mode) as fp_:
|
||||
with salt.utils.files.fopen(cpath, mode) as fp_:
|
||||
if load['loc']:
|
||||
fp_.seek(load['loc'])
|
||||
fp_.write(load['data'])
|
||||
|
@ -858,7 +859,7 @@ class RemoteFuncs(object):
|
|||
if not os.path.isdir(auth_cache):
|
||||
os.makedirs(auth_cache)
|
||||
jid_fn = os.path.join(auth_cache, load['jid'])
|
||||
with salt.utils.fopen(jid_fn, 'r') as fp_:
|
||||
with salt.utils.files.fopen(jid_fn, 'r') as fp_:
|
||||
if not load['id'] == fp_.read():
|
||||
return {}
|
||||
|
||||
|
@ -915,7 +916,7 @@ class RemoteFuncs(object):
|
|||
if not os.path.isdir(auth_cache):
|
||||
os.makedirs(auth_cache)
|
||||
jid_fn = os.path.join(auth_cache, str(ret['jid']))
|
||||
with salt.utils.fopen(jid_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(jid_fn, 'w+') as fp_:
|
||||
fp_.write(load['id'])
|
||||
return ret
|
||||
|
||||
|
|
|
@ -49,7 +49,9 @@ except ImportError:
|
|||
HAS_HYPCHAT = False
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.http
|
||||
import salt.runner
|
||||
import salt.client
|
||||
import salt.loader
|
||||
|
@ -93,7 +95,7 @@ def _publish_file(token, room, filepath, message='', outputter=None, api_url=Non
|
|||
headers['Authorization'] = "Bearer " + token
|
||||
msg = json.dumps({'message': message})
|
||||
|
||||
with salt.utils.fopen(filepath, 'rb') as rfh:
|
||||
with salt.utils.files.fopen(filepath, 'rb') as rfh:
|
||||
payload = """\
|
||||
--boundary123456
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
|
@ -411,8 +413,8 @@ def start(token,
|
|||
_publish_code_message(token, room, ret, message=message_string, outputter=outputter, api_url=api_url)
|
||||
else:
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(ret, sort_keys=True, indent=4))
|
||||
_publish_file(token, room, tmp_path_fn, message=message_string, api_url=api_url)
|
||||
salt.utils.safe_rm(tmp_path_fn)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
time.sleep(wait_time or _DEFAULT_SLEEP)
|
||||
|
|
|
@ -24,11 +24,11 @@ import time
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.minions
|
||||
import salt.config
|
||||
import salt.key
|
||||
import salt.utils.files
|
||||
import salt.utils.minions
|
||||
import salt.wheel
|
||||
import salt.utils
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
@ -59,7 +59,7 @@ def start(interval=3600, expire=604800):
|
|||
minions = {}
|
||||
if os.path.exists(presence_file):
|
||||
try:
|
||||
with salt.utils.fopen(presence_file, 'r') as f:
|
||||
with salt.utils.files.fopen(presence_file, 'r') as f:
|
||||
minions = msgpack.load(f)
|
||||
except IOError as e:
|
||||
log.error('Could not open presence file {0}: {1}'.format(presence_file, e))
|
||||
|
@ -94,7 +94,7 @@ def start(interval=3600, expire=604800):
|
|||
del minions[k]
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(presence_file, 'w') as f:
|
||||
with salt.utils.files.fopen(presence_file, 'w') as f:
|
||||
msgpack.dump(minions, f)
|
||||
except IOError as e:
|
||||
log.error('Could not write to presence file {0}: {1}'.format(presence_file, e))
|
||||
|
|
|
@ -499,7 +499,7 @@ class Client(object):
|
|||
'Path \'{0}\' is not absolute'.format(url_path)
|
||||
)
|
||||
if dest is None:
|
||||
with salt.utils.fopen(url_path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(url_path, 'r') as fp_:
|
||||
data = fp_.read()
|
||||
return data
|
||||
return url_path
|
||||
|
@ -507,7 +507,7 @@ class Client(object):
|
|||
if url_scheme == 'salt':
|
||||
result = self.get_file(url, dest, makedirs, saltenv, cachedir=cachedir)
|
||||
if result and dest is None:
|
||||
with salt.utils.fopen(result, 'r') as fp_:
|
||||
with salt.utils.files.fopen(result, 'r') as fp_:
|
||||
data = fp_.read()
|
||||
return data
|
||||
return result
|
||||
|
@ -558,7 +558,7 @@ class Client(object):
|
|||
ftp = ftplib.FTP()
|
||||
ftp.connect(url_data.hostname, url_data.port)
|
||||
ftp.login(url_data.username, url_data.password)
|
||||
with salt.utils.fopen(dest, 'wb') as fp_:
|
||||
with salt.utils.files.fopen(dest, 'wb') as fp_:
|
||||
ftp.retrbinary('RETR {0}'.format(url_data.path), fp_.write)
|
||||
ftp.quit()
|
||||
return dest
|
||||
|
@ -680,7 +680,7 @@ class Client(object):
|
|||
dest_tmp = "{0}.part".format(dest)
|
||||
# We need an open filehandle to use in the on_chunk callback,
|
||||
# that's why we're not using a with clause here.
|
||||
destfp = salt.utils.fopen(dest_tmp, 'wb') # pylint: disable=resource-leakage
|
||||
destfp = salt.utils.files.fopen(dest_tmp, 'wb') # pylint: disable=resource-leakage
|
||||
|
||||
def on_chunk(chunk):
|
||||
if write_body[0]:
|
||||
|
@ -772,7 +772,7 @@ class Client(object):
|
|||
if makedirs:
|
||||
os.makedirs(destdir)
|
||||
else:
|
||||
salt.utils.safe_rm(data['data'])
|
||||
salt.utils.files.safe_rm(data['data'])
|
||||
return ''
|
||||
shutil.move(data['data'], dest)
|
||||
return dest
|
||||
|
@ -1138,7 +1138,7 @@ class RemoteClient(Client):
|
|||
return False
|
||||
# We need an open filehandle here, that's why we're not using a
|
||||
# with clause:
|
||||
fn_ = salt.utils.fopen(dest, 'wb+') # pylint: disable=resource-leakage
|
||||
fn_ = salt.utils.files.fopen(dest, 'wb+') # pylint: disable=resource-leakage
|
||||
else:
|
||||
log.debug('No dest file found')
|
||||
|
||||
|
@ -1164,7 +1164,7 @@ class RemoteClient(Client):
|
|||
saltenv,
|
||||
cachedir=cachedir) as cache_dest:
|
||||
dest = cache_dest
|
||||
with salt.utils.fopen(cache_dest, 'wb+') as ofile:
|
||||
with salt.utils.files.fopen(cache_dest, 'wb+') as ofile:
|
||||
ofile.write(data['data'])
|
||||
if 'hsum' in data and d_tries < 3:
|
||||
# Master has prompted a file verification, if the
|
||||
|
@ -1188,7 +1188,7 @@ class RemoteClient(Client):
|
|||
# remove it to avoid a traceback trying to write the file
|
||||
if os.path.isdir(dest):
|
||||
salt.utils.rm_rf(dest)
|
||||
fn_ = salt.utils.fopen(dest, 'wb+')
|
||||
fn_ = salt.utils.files.fopen(dest, 'wb+')
|
||||
if data.get('gzip', None):
|
||||
data = salt.utils.gzip_util.uncompress(data['data'])
|
||||
else:
|
||||
|
|
|
@ -16,7 +16,9 @@ import time
|
|||
# Import salt libs
|
||||
import salt.loader
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.locales
|
||||
import salt.utils.url
|
||||
from salt.utils.args import get_function_argspec as _argspec
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -126,7 +128,7 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
|
|||
age = opts.get('fileserver_list_cache_time', 20) + 1
|
||||
if age < opts.get('fileserver_list_cache_time', 20):
|
||||
# Young enough! Load this sucker up!
|
||||
with salt.utils.fopen(list_cache, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(list_cache, 'rb') as fp_:
|
||||
log.trace('Returning file_lists cache data from '
|
||||
'{0}'.format(list_cache))
|
||||
return serial.load(fp_).get(form, []), False, False
|
||||
|
@ -151,7 +153,7 @@ def write_file_list_cache(opts, data, list_cache, w_lock):
|
|||
backend to determine if the cache needs to be refreshed/written).
|
||||
'''
|
||||
serial = salt.payload.Serial(opts)
|
||||
with salt.utils.fopen(list_cache, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(list_cache, 'w+b') as fp_:
|
||||
fp_.write(serial.dumps(data))
|
||||
_unlock_cache(w_lock)
|
||||
log.trace('Lockfile {0} removed'.format(w_lock))
|
||||
|
@ -164,7 +166,7 @@ def check_env_cache(opts, env_cache):
|
|||
if not os.path.isfile(env_cache):
|
||||
return None
|
||||
try:
|
||||
with salt.utils.fopen(env_cache, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(env_cache, 'rb') as fp_:
|
||||
log.trace('Returning env cache data from {0}'.format(env_cache))
|
||||
serial = salt.payload.Serial(opts)
|
||||
return serial.load(fp_)
|
||||
|
|
|
@ -46,7 +46,6 @@ permissions.
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from salt.utils.versions import LooseVersion
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
|
@ -57,6 +56,9 @@ import shutil
|
|||
# Import salt libs
|
||||
import salt.fileserver
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
from salt.utils.versions import LooseVersion
|
||||
|
||||
try:
|
||||
import azure.storage
|
||||
|
@ -160,7 +162,7 @@ def serve_file(load, fnd):
|
|||
ret['dest'] = fnd['rel']
|
||||
gzip = load.get('gzip', None)
|
||||
fpath = os.path.normpath(fnd['path'])
|
||||
with salt.utils.fopen(fpath, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(fpath, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(fpath):
|
||||
|
@ -235,7 +237,7 @@ def update():
|
|||
# Lock writes
|
||||
lk_fn = fname + '.lk'
|
||||
salt.fileserver.wait_lock(lk_fn, fname)
|
||||
with salt.utils.fopen(lk_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
|
||||
try:
|
||||
|
@ -254,9 +256,9 @@ def update():
|
|||
container_list = path + '.list'
|
||||
lk_fn = container_list + '.lk'
|
||||
salt.fileserver.wait_lock(lk_fn, container_list)
|
||||
with salt.utils.fopen(lk_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.fopen(container_list, 'w') as fp_:
|
||||
with salt.utils.files.fopen(container_list, 'w') as fp_:
|
||||
fp_.write(json.dumps(blob_names))
|
||||
try:
|
||||
os.unlink(lk_fn)
|
||||
|
@ -282,11 +284,11 @@ def file_hash(load, fnd):
|
|||
if not os.path.exists(os.path.dirname(hashdest)):
|
||||
os.makedirs(os.path.dirname(hashdest))
|
||||
ret['hsum'] = salt.utils.get_hash(path, __opts__['hash_type'])
|
||||
with salt.utils.fopen(hashdest, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(hashdest, 'w+') as fp_:
|
||||
fp_.write(ret['hsum'])
|
||||
return ret
|
||||
else:
|
||||
with salt.utils.fopen(hashdest, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(hashdest, 'rb') as fp_:
|
||||
ret['hsum'] = fp_.read()
|
||||
return ret
|
||||
|
||||
|
@ -305,7 +307,7 @@ def file_list(load):
|
|||
salt.fileserver.wait_lock(lk, container_list, 5)
|
||||
if not os.path.exists(container_list):
|
||||
continue
|
||||
with salt.utils.fopen(container_list, 'r') as fp_:
|
||||
with salt.utils.files.fopen(container_list, 'r') as fp_:
|
||||
ret.update(set(json.load(fp_)))
|
||||
except Exception as exc:
|
||||
log.error('azurefs: an error ocurred retrieving file lists. '
|
||||
|
|
|
@ -60,6 +60,8 @@ except ImportError:
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.url
|
||||
import salt.fileserver
|
||||
from salt.utils.event import tagify
|
||||
|
@ -295,7 +297,7 @@ def init():
|
|||
if not refs:
|
||||
# Write an hgrc defining the remote URL
|
||||
hgconfpath = os.path.join(rp_, '.hg', 'hgrc')
|
||||
with salt.utils.fopen(hgconfpath, 'w+') as hgconfig:
|
||||
with salt.utils.files.fopen(hgconfpath, 'w+') as hgconfig:
|
||||
hgconfig.write('[paths]\n')
|
||||
hgconfig.write('default = {0}\n'.format(repo_url))
|
||||
|
||||
|
@ -314,7 +316,7 @@ def init():
|
|||
if new_remote:
|
||||
remote_map = os.path.join(__opts__['cachedir'], 'hgfs/remote_map.txt')
|
||||
try:
|
||||
with salt.utils.fopen(remote_map, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(remote_map, 'w+') as fp_:
|
||||
timestamp = datetime.now().strftime('%d %b %Y %H:%M:%S.%f')
|
||||
fp_.write('# hgfs_remote map as of {0}\n'.format(timestamp))
|
||||
for repo in repos:
|
||||
|
@ -453,7 +455,7 @@ def lock(remote=None):
|
|||
failed = []
|
||||
if not os.path.exists(repo['lockfile']):
|
||||
try:
|
||||
with salt.utils.fopen(repo['lockfile'], 'w+') as fp_:
|
||||
with salt.utils.files.fopen(repo['lockfile'], 'w+') as fp_:
|
||||
fp_.write('')
|
||||
except (IOError, OSError) as exc:
|
||||
msg = ('Unable to set update lock for {0} ({1}): {2} '
|
||||
|
@ -538,7 +540,7 @@ def update():
|
|||
os.makedirs(env_cachedir)
|
||||
new_envs = envs(ignore_cache=True)
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
with salt.utils.fopen(env_cache, 'wb+') as fp_:
|
||||
with salt.utils.files.fopen(env_cache, 'wb+') as fp_:
|
||||
fp_.write(serial.dumps(new_envs))
|
||||
log.trace('Wrote env cache data to {0}'.format(env_cache))
|
||||
|
||||
|
@ -678,7 +680,7 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
|
|||
continue
|
||||
salt.fileserver.wait_lock(lk_fn, dest)
|
||||
if os.path.isfile(blobshadest) and os.path.isfile(dest):
|
||||
with salt.utils.fopen(blobshadest, 'r') as fp_:
|
||||
with salt.utils.files.fopen(blobshadest, 'r') as fp_:
|
||||
sha = fp_.read()
|
||||
if sha == ref[2]:
|
||||
fnd['rel'] = path
|
||||
|
@ -692,14 +694,14 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
|
|||
except hglib.error.CommandError:
|
||||
repo['repo'].close()
|
||||
continue
|
||||
with salt.utils.fopen(lk_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
for filename in glob.glob(hashes_glob):
|
||||
try:
|
||||
os.remove(filename)
|
||||
except Exception:
|
||||
pass
|
||||
with salt.utils.fopen(blobshadest, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(blobshadest, 'w+') as fp_:
|
||||
fp_.write(ref[2])
|
||||
try:
|
||||
os.remove(lk_fn)
|
||||
|
@ -750,7 +752,7 @@ def serve_file(load, fnd):
|
|||
ret['dest'] = fnd['rel']
|
||||
gzip = load.get('gzip', None)
|
||||
fpath = os.path.normpath(fnd['path'])
|
||||
with salt.utils.fopen(fpath, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(fpath, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(fpath):
|
||||
|
@ -787,11 +789,11 @@ def file_hash(load, fnd):
|
|||
__opts__['hash_type']))
|
||||
if not os.path.isfile(hashdest):
|
||||
ret['hsum'] = salt.utils.get_hash(path, __opts__['hash_type'])
|
||||
with salt.utils.fopen(hashdest, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(hashdest, 'w+') as fp_:
|
||||
fp_.write(ret['hsum'])
|
||||
return ret
|
||||
else:
|
||||
with salt.utils.fopen(hashdest, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(hashdest, 'rb') as fp_:
|
||||
ret['hsum'] = fp_.read()
|
||||
return ret
|
||||
|
||||
|
|
|
@ -32,6 +32,8 @@ import logging
|
|||
# Import salt libs
|
||||
import salt.fileserver
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.url
|
||||
|
||||
# Import third party libs
|
||||
|
@ -129,7 +131,7 @@ def serve_file(load, fnd):
|
|||
# AP
|
||||
# May I sleep here to slow down serving of big files?
|
||||
# How many threads are serving files?
|
||||
with salt.utils.fopen(fpath, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(fpath, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(fpath):
|
||||
|
@ -191,7 +193,7 @@ def file_hash(load, fnd):
|
|||
# if we have a cache, serve that if the mtime hasn't changed
|
||||
if os.path.exists(cache_path):
|
||||
try:
|
||||
with salt.utils.fopen(cache_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(cache_path, 'rb') as fp_:
|
||||
try:
|
||||
hsum, mtime = fp_.read().split(':')
|
||||
except ValueError:
|
||||
|
@ -222,7 +224,7 @@ def file_hash(load, fnd):
|
|||
os.makedirs(cache_dir)
|
||||
# save the cache object "hash:mtime"
|
||||
cache_object = '{0}:{1}'.format(ret['hsum'], os.path.getmtime(path))
|
||||
with salt.utils.flopen(cache_path, 'w') as fp_:
|
||||
with salt.utils.files.flopen(cache_path, 'w') as fp_:
|
||||
fp_.write(cache_object)
|
||||
return ret
|
||||
|
||||
|
|
|
@ -25,8 +25,10 @@ import logging
|
|||
# Import salt libs
|
||||
import salt.fileserver
|
||||
import salt.utils
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.path
|
||||
from salt.utils.event import tagify
|
||||
import salt.ext.six as six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -131,7 +133,7 @@ def serve_file(load, fnd):
|
|||
ret['dest'] = fnd['rel']
|
||||
gzip = load.get('gzip', None)
|
||||
fpath = os.path.normpath(fnd['path'])
|
||||
with salt.utils.fopen(fpath, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(fpath, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(fpath):
|
||||
|
@ -168,7 +170,7 @@ def update():
|
|||
old_mtime_map = {}
|
||||
# if you have an old map, load that
|
||||
if os.path.exists(mtime_map_path):
|
||||
with salt.utils.fopen(mtime_map_path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(mtime_map_path, 'r') as fp_:
|
||||
for line in fp_:
|
||||
try:
|
||||
file_path, mtime = line.replace('\n', '').split(':', 1)
|
||||
|
@ -193,7 +195,7 @@ def update():
|
|||
mtime_map_path_dir = os.path.dirname(mtime_map_path)
|
||||
if not os.path.exists(mtime_map_path_dir):
|
||||
os.makedirs(mtime_map_path_dir)
|
||||
with salt.utils.fopen(mtime_map_path, 'w') as fp_:
|
||||
with salt.utils.files.fopen(mtime_map_path, 'w') as fp_:
|
||||
for file_path, mtime in six.iteritems(new_mtime_map):
|
||||
fp_.write('{file_path}:{mtime}\n'.format(file_path=file_path,
|
||||
mtime=mtime))
|
||||
|
@ -206,7 +208,8 @@ def update():
|
|||
__opts__['transport'],
|
||||
opts=__opts__,
|
||||
listen=False)
|
||||
event.fire_event(data, tagify(['roots', 'update'], prefix='fileserver'))
|
||||
event.fire_event(data,
|
||||
salt.utils.event.tagify(['roots', 'update'], prefix='fileserver'))
|
||||
|
||||
|
||||
def file_hash(load, fnd):
|
||||
|
@ -244,7 +247,7 @@ def file_hash(load, fnd):
|
|||
# if we have a cache, serve that if the mtime hasn't changed
|
||||
if os.path.exists(cache_path):
|
||||
try:
|
||||
with salt.utils.fopen(cache_path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(cache_path, 'r') as fp_:
|
||||
try:
|
||||
hsum, mtime = fp_.read().split(':')
|
||||
except ValueError:
|
||||
|
@ -284,7 +287,7 @@ def file_hash(load, fnd):
|
|||
raise
|
||||
# save the cache object "hash:mtime"
|
||||
cache_object = '{0}:{1}'.format(ret['hsum'], os.path.getmtime(path))
|
||||
with salt.utils.flopen(cache_path, 'w') as fp_:
|
||||
with salt.utils.files.flopen(cache_path, 'w') as fp_:
|
||||
fp_.write(cache_object)
|
||||
return ret
|
||||
|
||||
|
|
|
@ -69,6 +69,8 @@ import logging
|
|||
import salt.fileserver as fs
|
||||
import salt.modules
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
|
@ -225,7 +227,7 @@ def serve_file(load, fnd):
|
|||
|
||||
ret['dest'] = _trim_env_off_path([fnd['path']], load['saltenv'])[0]
|
||||
|
||||
with salt.utils.fopen(cached_file_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(cached_file_path, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(cached_file_path):
|
||||
|
@ -535,7 +537,7 @@ def _refresh_buckets_cache_file(cache_file):
|
|||
|
||||
log.debug('Writing buckets cache file')
|
||||
|
||||
with salt.utils.fopen(cache_file, 'w') as fp_:
|
||||
with salt.utils.files.fopen(cache_file, 'w') as fp_:
|
||||
pickle.dump(metadata, fp_)
|
||||
|
||||
return metadata
|
||||
|
@ -548,7 +550,7 @@ def _read_buckets_cache_file(cache_file):
|
|||
|
||||
log.debug('Reading buckets cache file')
|
||||
|
||||
with salt.utils.fopen(cache_file, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(cache_file, 'rb') as fp_:
|
||||
try:
|
||||
data = pickle.load(fp_)
|
||||
except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
|
||||
|
|
|
@ -55,6 +55,8 @@ except ImportError:
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.url
|
||||
import salt.fileserver
|
||||
from salt.utils.event import tagify
|
||||
|
@ -224,7 +226,7 @@ def init():
|
|||
if new_remote:
|
||||
remote_map = os.path.join(__opts__['cachedir'], 'svnfs/remote_map.txt')
|
||||
try:
|
||||
with salt.utils.fopen(remote_map, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(remote_map, 'w+') as fp_:
|
||||
timestamp = datetime.now().strftime('%d %b %Y %H:%M:%S.%f')
|
||||
fp_.write('# svnfs_remote map as of {0}\n'.format(timestamp))
|
||||
for repo_conf in repos:
|
||||
|
@ -367,7 +369,7 @@ def lock(remote=None):
|
|||
failed = []
|
||||
if not os.path.exists(repo['lockfile']):
|
||||
try:
|
||||
with salt.utils.fopen(repo['lockfile'], 'w+') as fp_:
|
||||
with salt.utils.files.fopen(repo['lockfile'], 'w+') as fp_:
|
||||
fp_.write('')
|
||||
except (IOError, OSError) as exc:
|
||||
msg = ('Unable to set update lock for {0} ({1}): {2} '
|
||||
|
@ -453,7 +455,7 @@ def update():
|
|||
os.makedirs(env_cachedir)
|
||||
new_envs = envs(ignore_cache=True)
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
with salt.utils.fopen(env_cache, 'wb+') as fp_:
|
||||
with salt.utils.files.fopen(env_cache, 'wb+') as fp_:
|
||||
fp_.write(serial.dumps(new_envs))
|
||||
log.trace('Wrote env cache data to {0}'.format(env_cache))
|
||||
|
||||
|
@ -645,7 +647,7 @@ def serve_file(load, fnd):
|
|||
ret['dest'] = fnd['rel']
|
||||
gzip = load.get('gzip', None)
|
||||
fpath = os.path.normpath(fnd['path'])
|
||||
with salt.utils.fopen(fpath, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(fpath, 'rb') as fp_:
|
||||
fp_.seek(load['loc'])
|
||||
data = fp_.read(__opts__['file_buffer_size'])
|
||||
if data and six.PY3 and not salt.utils.is_bin_file(fpath):
|
||||
|
@ -695,7 +697,7 @@ def file_hash(load, fnd):
|
|||
__opts__['hash_type']))
|
||||
# If we have a cache, serve that if the mtime hasn't changed
|
||||
if os.path.exists(cache_path):
|
||||
with salt.utils.fopen(cache_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(cache_path, 'rb') as fp_:
|
||||
hsum, mtime = fp_.read().split(':')
|
||||
if os.path.getmtime(path) == mtime:
|
||||
# check if mtime changed
|
||||
|
@ -709,7 +711,7 @@ def file_hash(load, fnd):
|
|||
if not os.path.exists(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
# save the cache object "hash:mtime"
|
||||
with salt.utils.fopen(cache_path, 'w') as fp_:
|
||||
with salt.utils.files.fopen(cache_path, 'w') as fp_:
|
||||
fp_.write('{0}:{1}'.format(ret['hsum'], os.path.getmtime(path)))
|
||||
|
||||
return ret
|
||||
|
|
|
@ -43,8 +43,9 @@ except ImportError:
|
|||
import salt.exceptions
|
||||
import salt.log
|
||||
import salt.utils
|
||||
import salt.utils.network
|
||||
import salt.utils.dns
|
||||
import salt.utils.files
|
||||
import salt.utils.network
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import range
|
||||
|
||||
|
@ -119,7 +120,7 @@ def _linux_cpudata():
|
|||
cpuinfo = '/proc/cpuinfo'
|
||||
# Parse over the cpuinfo file
|
||||
if os.path.isfile(cpuinfo):
|
||||
with salt.utils.fopen(cpuinfo, 'r') as _fp:
|
||||
with salt.utils.files.fopen(cpuinfo, 'r') as _fp:
|
||||
for line in _fp:
|
||||
comps = line.split(':')
|
||||
if not len(comps) > 1:
|
||||
|
@ -338,7 +339,7 @@ def _bsd_cpudata(osdata):
|
|||
if osdata['kernel'] == 'FreeBSD' and os.path.isfile('/var/run/dmesg.boot'):
|
||||
grains['cpu_flags'] = []
|
||||
# TODO: at least it needs to be tested for BSD other then FreeBSD
|
||||
with salt.utils.fopen('/var/run/dmesg.boot', 'r') as _fp:
|
||||
with salt.utils.files.fopen('/var/run/dmesg.boot', 'r') as _fp:
|
||||
cpu_here = False
|
||||
for line in _fp:
|
||||
if line.startswith('CPU: '):
|
||||
|
@ -402,7 +403,7 @@ def _memdata(osdata):
|
|||
meminfo = '/proc/meminfo'
|
||||
|
||||
if os.path.isfile(meminfo):
|
||||
with salt.utils.fopen(meminfo, 'r') as ifile:
|
||||
with salt.utils.files.fopen(meminfo, 'r') as ifile:
|
||||
for line in ifile:
|
||||
comps = line.rstrip('\n').split(':')
|
||||
if not len(comps) > 1:
|
||||
|
@ -711,10 +712,10 @@ def _virtual(osdata):
|
|||
pass
|
||||
if os.path.isfile('/proc/1/cgroup'):
|
||||
try:
|
||||
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
with salt.utils.files.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
if ':/lxc/' in fhr.read():
|
||||
grains['virtual_subtype'] = 'LXC'
|
||||
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
with salt.utils.files.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
fhr_contents = fhr.read()
|
||||
if ':/docker/' in fhr_contents or ':/system.slice/docker' in fhr_contents:
|
||||
grains['virtual_subtype'] = 'Docker'
|
||||
|
@ -730,7 +731,7 @@ def _virtual(osdata):
|
|||
failed_commands.discard('dmidecode')
|
||||
# Provide additional detection for OpenVZ
|
||||
if os.path.isfile('/proc/self/status'):
|
||||
with salt.utils.fopen('/proc/self/status') as status_file:
|
||||
with salt.utils.files.fopen('/proc/self/status') as status_file:
|
||||
vz_re = re.compile(r'^envID:\s+(\d+)$')
|
||||
for line in status_file:
|
||||
vz_match = vz_re.match(line.rstrip('\n'))
|
||||
|
@ -750,7 +751,7 @@ def _virtual(osdata):
|
|||
grains['virtual_subtype'] = 'Xen HVM DomU'
|
||||
elif os.path.isfile('/proc/xen/capabilities') and \
|
||||
os.access('/proc/xen/capabilities', os.R_OK):
|
||||
with salt.utils.fopen('/proc/xen/capabilities') as fhr:
|
||||
with salt.utils.files.fopen('/proc/xen/capabilities') as fhr:
|
||||
if 'control_d' not in fhr.read():
|
||||
# Tested on CentOS 5.5 / 2.6.18-194.3.1.el5xen
|
||||
grains['virtual_subtype'] = 'Xen PV DomU'
|
||||
|
@ -770,12 +771,12 @@ def _virtual(osdata):
|
|||
if 'dom' in grains.get('virtual_subtype', '').lower():
|
||||
grains['virtual'] = 'xen'
|
||||
if os.path.isfile('/proc/cpuinfo'):
|
||||
with salt.utils.fopen('/proc/cpuinfo', 'r') as fhr:
|
||||
with salt.utils.files.fopen('/proc/cpuinfo', 'r') as fhr:
|
||||
if 'QEMU Virtual CPU' in fhr.read():
|
||||
grains['virtual'] = 'kvm'
|
||||
if os.path.isfile('/sys/devices/virtual/dmi/id/product_name'):
|
||||
try:
|
||||
with salt.utils.fopen('/sys/devices/virtual/dmi/id/product_name', 'r') as fhr:
|
||||
with salt.utils.files.fopen('/sys/devices/virtual/dmi/id/product_name', 'r') as fhr:
|
||||
output = fhr.read()
|
||||
if 'VirtualBox' in output:
|
||||
grains['virtual'] = 'VirtualBox'
|
||||
|
@ -1234,7 +1235,7 @@ def _parse_os_release():
|
|||
filename = '/usr/lib/os-release'
|
||||
|
||||
data = dict()
|
||||
with salt.utils.fopen(filename) as ifile:
|
||||
with salt.utils.files.fopen(filename) as ifile:
|
||||
regex = re.compile('^([\\w]+)=(?:\'|")?(.*?)(?:\'|")?$')
|
||||
for line in ifile:
|
||||
match = regex.match(line.strip())
|
||||
|
@ -1331,7 +1332,7 @@ def os_data():
|
|||
grains['init'] = 'systemd'
|
||||
except (OSError, IOError):
|
||||
if os.path.exists('/proc/1/cmdline'):
|
||||
with salt.utils.fopen('/proc/1/cmdline') as fhr:
|
||||
with salt.utils.files.fopen('/proc/1/cmdline') as fhr:
|
||||
init_cmdline = fhr.read().replace('\x00', ' ').split()
|
||||
try:
|
||||
init_bin = salt.utils.which(init_cmdline[0])
|
||||
|
@ -1350,7 +1351,7 @@ def os_data():
|
|||
# Default to the value of file_buffer_size for the minion
|
||||
buf_size = 262144
|
||||
try:
|
||||
with salt.utils.fopen(init_bin, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(init_bin, 'rb') as fp_:
|
||||
buf = True
|
||||
edge = six.b('')
|
||||
buf = fp_.read(buf_size).lower()
|
||||
|
@ -1407,7 +1408,7 @@ def os_data():
|
|||
'^(DISTRIB_(?:ID|RELEASE|CODENAME|DESCRIPTION))=(?:\'|")?'
|
||||
'([\\w\\s\\.\\-_]+)(?:\'|")?'
|
||||
))
|
||||
with salt.utils.fopen('/etc/lsb-release') as ifile:
|
||||
with salt.utils.files.fopen('/etc/lsb-release') as ifile:
|
||||
for line in ifile:
|
||||
match = regex.match(line.rstrip('\n'))
|
||||
if match:
|
||||
|
@ -1442,7 +1443,7 @@ def os_data():
|
|||
grains['lsb_distrib_id'] = 'SUSE'
|
||||
version = ''
|
||||
patch = ''
|
||||
with salt.utils.fopen('/etc/SuSE-release') as fhr:
|
||||
with salt.utils.files.fopen('/etc/SuSE-release') as fhr:
|
||||
for line in fhr:
|
||||
if 'enterprise' in line.lower():
|
||||
grains['lsb_distrib_id'] = 'SLES'
|
||||
|
@ -1462,7 +1463,7 @@ def os_data():
|
|||
elif os.path.isfile('/etc/altlinux-release'):
|
||||
# ALT Linux
|
||||
grains['lsb_distrib_id'] = 'altlinux'
|
||||
with salt.utils.fopen('/etc/altlinux-release') as ifile:
|
||||
with salt.utils.files.fopen('/etc/altlinux-release') as ifile:
|
||||
# This file is symlinked to from:
|
||||
# /etc/fedora-release
|
||||
# /etc/redhat-release
|
||||
|
@ -1477,7 +1478,7 @@ def os_data():
|
|||
elif os.path.isfile('/etc/centos-release'):
|
||||
# CentOS Linux
|
||||
grains['lsb_distrib_id'] = 'CentOS'
|
||||
with salt.utils.fopen('/etc/centos-release') as ifile:
|
||||
with salt.utils.files.fopen('/etc/centos-release') as ifile:
|
||||
for line in ifile:
|
||||
# Need to pull out the version and codename
|
||||
# in the case of custom content in /etc/centos-release
|
||||
|
@ -1492,7 +1493,7 @@ def os_data():
|
|||
elif os.path.isfile('/etc.defaults/VERSION') \
|
||||
and os.path.isfile('/etc.defaults/synoinfo.conf'):
|
||||
grains['osfullname'] = 'Synology'
|
||||
with salt.utils.fopen('/etc.defaults/VERSION', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/etc.defaults/VERSION', 'r') as fp_:
|
||||
synoinfo = {}
|
||||
for line in fp_:
|
||||
try:
|
||||
|
@ -1565,7 +1566,7 @@ def os_data():
|
|||
if salt.utils.is_smartos_globalzone():
|
||||
grains.update(_smartos_computenode_data())
|
||||
elif os.path.isfile('/etc/release'):
|
||||
with salt.utils.fopen('/etc/release', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/etc/release', 'r') as fp_:
|
||||
rel_data = fp_.read()
|
||||
try:
|
||||
release_re = re.compile(
|
||||
|
@ -1928,7 +1929,7 @@ def get_machine_id():
|
|||
if not existing_locations:
|
||||
return {}
|
||||
else:
|
||||
with salt.utils.fopen(existing_locations[0]) as machineid:
|
||||
with salt.utils.files.fopen(existing_locations[0]) as machineid:
|
||||
return {'machine_id': machineid.read().strip()}
|
||||
|
||||
|
||||
|
@ -2047,7 +2048,7 @@ def _hw_data(osdata):
|
|||
contents_file = os.path.join('/sys/class/dmi/id', fw_file)
|
||||
if os.path.exists(contents_file):
|
||||
try:
|
||||
with salt.utils.fopen(contents_file, 'r') as ifile:
|
||||
with salt.utils.files.fopen(contents_file, 'r') as ifile:
|
||||
grains[key] = ifile.read()
|
||||
if key == 'uuid':
|
||||
grains['uuid'] = grains['uuid'].lower()
|
||||
|
@ -2321,19 +2322,19 @@ def _smartos_zone_data():
|
|||
imageversion = re.compile('Image:\\s(.+)')
|
||||
pkgsrcpath = re.compile('PKG_PATH=(.+)')
|
||||
if os.path.isfile('/etc/pkgsrc_version'):
|
||||
with salt.utils.fopen('/etc/pkgsrc_version', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/etc/pkgsrc_version', 'r') as fp_:
|
||||
for line in fp_:
|
||||
match = pkgsrcversion.match(line)
|
||||
if match:
|
||||
grains['pkgsrcversion'] = match.group(1)
|
||||
if os.path.isfile('/etc/product'):
|
||||
with salt.utils.fopen('/etc/product', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/etc/product', 'r') as fp_:
|
||||
for line in fp_:
|
||||
match = imageversion.match(line)
|
||||
if match:
|
||||
grains['imageversion'] = match.group(1)
|
||||
if os.path.isfile('/opt/local/etc/pkg_install.conf'):
|
||||
with salt.utils.fopen('/opt/local/etc/pkg_install.conf', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/opt/local/etc/pkg_install.conf', 'r') as fp_:
|
||||
for line in fp_:
|
||||
match = pkgsrcpath.match(line)
|
||||
if match:
|
||||
|
|
|
@ -11,6 +11,7 @@ import re
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Solve the Chicken and egg problem where grains need to run before any
|
||||
# of the modules are loaded and are generally available for any usage.
|
||||
|
@ -127,7 +128,7 @@ def _linux_disks():
|
|||
ret = {'disks': [], 'SSDs': []}
|
||||
|
||||
for entry in glob.glob('/sys/block/*/queue/rotational'):
|
||||
with salt.utils.fopen(entry) as entry_fp:
|
||||
with salt.utils.files.fopen(entry) as entry_fp:
|
||||
device = entry.split('/')[3]
|
||||
flag = entry_fp.read(1)
|
||||
if flag == '0':
|
||||
|
|
|
@ -10,7 +10,7 @@ import yaml
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -41,7 +41,7 @@ def config():
|
|||
'grains'
|
||||
)
|
||||
if os.path.isfile(gfn):
|
||||
with salt.utils.fopen(gfn, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(gfn, 'rb') as fp_:
|
||||
try:
|
||||
return yaml.safe_load(fp_.read())
|
||||
except Exception:
|
||||
|
|
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -19,7 +19,7 @@ def mdadm():
|
|||
'''
|
||||
devices = set()
|
||||
try:
|
||||
with salt.utils.fopen('/proc/mdstat', 'r') as mdstat:
|
||||
with salt.utils.files.fopen('/proc/mdstat', 'r') as mdstat:
|
||||
for line in mdstat:
|
||||
if line.startswith('Personalities : '):
|
||||
continue
|
||||
|
|
28
salt/key.py
28
salt/key.py
|
@ -23,7 +23,9 @@ import salt.daemons.masterapi
|
|||
import salt.exceptions
|
||||
import salt.minion
|
||||
import salt.utils
|
||||
import salt.utils.args
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.kinds
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
|
@ -118,7 +120,7 @@ class KeyCLI(object):
|
|||
if self.opts['eauth']:
|
||||
if 'token' in self.opts:
|
||||
try:
|
||||
with salt.utils.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
|
||||
low['key'] = fp_.readline()
|
||||
except IOError:
|
||||
low['token'] = self.opts['token']
|
||||
|
@ -649,7 +651,7 @@ class Key(object):
|
|||
ret[status] = {}
|
||||
for key in salt.utils.isorted(keys):
|
||||
path = os.path.join(self.opts['pki_dir'], status, key)
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
ret[status][key] = fp_.read()
|
||||
return ret
|
||||
|
||||
|
@ -662,7 +664,7 @@ class Key(object):
|
|||
ret[status] = {}
|
||||
for key in salt.utils.isorted(keys):
|
||||
path = os.path.join(self.opts['pki_dir'], status, key)
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
ret[status][key] = fp_.read()
|
||||
return ret
|
||||
|
||||
|
@ -1005,7 +1007,7 @@ class RaetKey(Key):
|
|||
if not name or prefix != 'estate':
|
||||
continue
|
||||
path = os.path.join(road_cache, road)
|
||||
with salt.utils.fopen(path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
if ext == '.json':
|
||||
data = json.load(fp_)
|
||||
elif ext == '.msgpack':
|
||||
|
@ -1060,7 +1062,7 @@ class RaetKey(Key):
|
|||
'pub': pub,
|
||||
'verify': verify}
|
||||
if self.opts['open_mode']: # always accept and overwrite
|
||||
with salt.utils.fopen(acc_path, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(acc_path, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(keydata))
|
||||
return self.ACC
|
||||
if os.path.isfile(rej_path):
|
||||
|
@ -1068,7 +1070,7 @@ class RaetKey(Key):
|
|||
return self.REJ
|
||||
elif os.path.isfile(acc_path):
|
||||
# The minion id has been accepted, verify the key strings
|
||||
with salt.utils.fopen(acc_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(acc_path, 'rb') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
if keydata['pub'] == pub and keydata['verify'] == verify:
|
||||
return self.ACC
|
||||
|
@ -1078,7 +1080,7 @@ class RaetKey(Key):
|
|||
elif os.path.isfile(pre_path):
|
||||
auto_reject = self.auto_key.check_autoreject(minion_id)
|
||||
auto_sign = self.auto_key.check_autosign(minion_id)
|
||||
with salt.utils.fopen(pre_path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(pre_path, 'rb') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
if keydata['pub'] == pub and keydata['verify'] == verify:
|
||||
if auto_reject:
|
||||
|
@ -1109,7 +1111,7 @@ class RaetKey(Key):
|
|||
else:
|
||||
w_path = pre_path
|
||||
ret = self.PEND
|
||||
with salt.utils.fopen(w_path, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(w_path, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(keydata))
|
||||
return ret
|
||||
|
||||
|
@ -1121,7 +1123,7 @@ class RaetKey(Key):
|
|||
verify: <verify>
|
||||
'''
|
||||
path = os.path.join(self.opts['pki_dir'], status, minion_id)
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
return 'pub: {0}\nverify: {1}'.format(
|
||||
keydata['pub'],
|
||||
|
@ -1131,7 +1133,7 @@ class RaetKey(Key):
|
|||
'''
|
||||
Return a sha256 kingerprint for the key
|
||||
'''
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
key = 'pub: {0}\nverify: {1}'.format(
|
||||
keydata['pub'],
|
||||
|
@ -1387,7 +1389,7 @@ class RaetKey(Key):
|
|||
path = os.path.join(self.opts['pki_dir'], status, minion_id)
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
with salt.utils.fopen(path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
return self.serial.loads(fp_.read())
|
||||
|
||||
def read_local(self):
|
||||
|
@ -1398,7 +1400,7 @@ class RaetKey(Key):
|
|||
path = os.path.join(self.opts['pki_dir'], 'local.key')
|
||||
if not os.path.isfile(path):
|
||||
return {}
|
||||
with salt.utils.fopen(path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
return self.serial.loads(fp_.read())
|
||||
|
||||
def write_local(self, priv, sign):
|
||||
|
@ -1412,7 +1414,7 @@ class RaetKey(Key):
|
|||
if os.path.exists(path):
|
||||
#mode = os.stat(path).st_mode
|
||||
os.chmod(path, stat.S_IWUSR | stat.S_IRUSR)
|
||||
with salt.utils.fopen(path, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(path, 'w+') as fp_:
|
||||
fp_.write(self.serial.dumps(keydata))
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
os.umask(c_umask)
|
||||
|
|
|
@ -22,6 +22,7 @@ from zipimport import zipimporter
|
|||
import salt.config
|
||||
import salt.syspaths
|
||||
import salt.utils.context
|
||||
import salt.utils.files
|
||||
import salt.utils.lazy
|
||||
import salt.utils.event
|
||||
import salt.utils.odict
|
||||
|
@ -634,7 +635,7 @@ def _load_cached_grains(opts, cfn):
|
|||
log.debug('Retrieving grains from cache')
|
||||
try:
|
||||
serial = salt.payload.Serial(opts)
|
||||
with salt.utils.fopen(cfn, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(cfn, 'rb') as fp_:
|
||||
cached_grains = serial.load(fp_)
|
||||
if not cached_grains:
|
||||
log.debug('Cached grains are empty, cache might be corrupted. Refreshing.')
|
||||
|
@ -784,7 +785,7 @@ def grains(opts, force_refresh=False, proxy=None):
|
|||
import salt.modules.cmdmod
|
||||
# Make sure cache file isn't read-only
|
||||
salt.modules.cmdmod._run_quiet('attrib -R "{0}"'.format(cfn))
|
||||
with salt.utils.fopen(cfn, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(cfn, 'w+b') as fp_:
|
||||
try:
|
||||
serial = salt.payload.Serial(opts)
|
||||
serial.dump(grains_data, fp_)
|
||||
|
@ -1412,7 +1413,7 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
|||
# pylint: enable=no-member
|
||||
sys.modules[mod_namespace] = mod
|
||||
else:
|
||||
with salt.utils.fopen(fpath, desc[1]) as fn_:
|
||||
with salt.utils.files.fopen(fpath, desc[1]) as fn_:
|
||||
mod = imp.load_module(mod_namespace, fn_, fpath, desc)
|
||||
except IOError:
|
||||
raise
|
||||
|
|
|
@ -67,6 +67,8 @@ import salt.log.setup
|
|||
import salt.utils.args
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
import salt.utils.job
|
||||
import salt.utils.verify
|
||||
import salt.utils.minions
|
||||
|
@ -988,7 +990,7 @@ class AESFuncs(object):
|
|||
pub_path = os.path.join(self.opts['pki_dir'], 'minions', id_)
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(pub_path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(pub_path, 'r') as fp_:
|
||||
minion_pub = fp_.read()
|
||||
pub = RSA.importKey(minion_pub)
|
||||
except (IOError, OSError):
|
||||
|
@ -1300,7 +1302,7 @@ class AESFuncs(object):
|
|||
mode = 'ab'
|
||||
else:
|
||||
mode = 'wb'
|
||||
with salt.utils.fopen(cpath, mode) as fp_:
|
||||
with salt.utils.files.fopen(cpath, mode) as fp_:
|
||||
if load['loc']:
|
||||
fp_.seek(load['loc'])
|
||||
if six.PY3:
|
||||
|
@ -1444,7 +1446,7 @@ class AESFuncs(object):
|
|||
path_name = os.path.split(syndic_cache_path)[0]
|
||||
if not os.path.exists(path_name):
|
||||
os.makedirs(path_name)
|
||||
with salt.utils.fopen(syndic_cache_path, 'w') as wfh:
|
||||
with salt.utils.files.fopen(syndic_cache_path, 'w') as wfh:
|
||||
wfh.write('')
|
||||
|
||||
# Format individual return loads
|
||||
|
@ -1500,7 +1502,7 @@ class AESFuncs(object):
|
|||
if not os.path.isdir(auth_cache):
|
||||
os.makedirs(auth_cache)
|
||||
jid_fn = os.path.join(auth_cache, str(load['jid']))
|
||||
with salt.utils.fopen(jid_fn, 'r') as fp_:
|
||||
with salt.utils.files.fopen(jid_fn, 'r') as fp_:
|
||||
if not load['id'] == fp_.read():
|
||||
return {}
|
||||
# Grab the latest and return
|
||||
|
|
|
@ -86,6 +86,7 @@ import salt.payload
|
|||
import salt.syspaths
|
||||
import salt.utils
|
||||
import salt.utils.context
|
||||
import salt.utils.files
|
||||
import salt.utils.jid
|
||||
import salt.pillar
|
||||
import salt.utils.args
|
||||
|
@ -667,7 +668,7 @@ class SMinion(MinionBase):
|
|||
else:
|
||||
penv = 'base'
|
||||
cache_top = {penv: {self.opts['id']: ['cache']}}
|
||||
with salt.utils.fopen(ptop, 'wb') as fp_:
|
||||
with salt.utils.files.fopen(ptop, 'wb') as fp_:
|
||||
fp_.write(
|
||||
yaml.dump(
|
||||
cache_top,
|
||||
|
@ -676,7 +677,7 @@ class SMinion(MinionBase):
|
|||
)
|
||||
os.chmod(ptop, 0o600)
|
||||
cache_sls = os.path.join(pdir, 'cache.sls')
|
||||
with salt.utils.fopen(cache_sls, 'wb') as fp_:
|
||||
with salt.utils.files.fopen(cache_sls, 'wb') as fp_:
|
||||
fp_.write(
|
||||
yaml.dump(
|
||||
self.opts['pillar'],
|
||||
|
@ -1418,7 +1419,7 @@ class Minion(MinionBase):
|
|||
sdata = {'pid': os.getpid()}
|
||||
sdata.update(data)
|
||||
log.info('Starting a new job with PID {0}'.format(sdata['pid']))
|
||||
with salt.utils.fopen(fn_, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(fn_, 'w+b') as fp_:
|
||||
fp_.write(minion_instance.serial.dumps(sdata))
|
||||
ret = {'success': False}
|
||||
function_name = data['fun']
|
||||
|
|
|
@ -11,7 +11,7 @@ import stat
|
|||
import tempfile
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.utils import which as _which
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
|
@ -49,7 +49,7 @@ def __parse_aliases():
|
|||
ret = []
|
||||
if not os.path.isfile(afn):
|
||||
return ret
|
||||
with salt.utils.fopen(afn, 'r') as ifile:
|
||||
with salt.utils.files.fopen(afn, 'r') as ifile:
|
||||
for line in ifile:
|
||||
match = __ALIAS_RE.match(line)
|
||||
if match:
|
||||
|
|
|
@ -11,7 +11,7 @@ import os
|
|||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
@ -89,7 +89,7 @@ def show_link(name):
|
|||
path += 'alternatives/{0}'.format(name)
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(path, 'rb') as r_file:
|
||||
with salt.utils.files.fopen(path, 'rb') as r_file:
|
||||
contents = r_file.read()
|
||||
if six.PY3:
|
||||
contents = contents.decode(__salt_system_encoding__)
|
||||
|
|
|
@ -30,6 +30,7 @@ from salt.ext.six.moves.urllib.request import (
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -450,7 +451,7 @@ def config(name, config, edit=True):
|
|||
key = next(six.iterkeys(entry))
|
||||
configs = _parse_config(entry[key], key)
|
||||
if edit:
|
||||
with salt.utils.fopen(name, 'w') as configfile:
|
||||
with salt.utils.files.fopen(name, 'w') as configfile:
|
||||
configfile.write('# This file is managed by Salt.\n')
|
||||
configfile.write(configs)
|
||||
return configs
|
||||
|
|
|
@ -39,6 +39,7 @@ import salt.config
|
|||
import salt.syspaths
|
||||
from salt.modules.cmdmod import _parse_env
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.itertools
|
||||
import salt.utils.pkg
|
||||
import salt.utils.pkg.deb
|
||||
|
@ -2662,7 +2663,7 @@ def set_selections(path=None, selection=None, clear=False, saltenv='base'):
|
|||
|
||||
if path:
|
||||
path = __salt__['cp.cache_file'](path, saltenv)
|
||||
with salt.utils.fopen(path, 'r') as ifile:
|
||||
with salt.utils.files.fopen(path, 'r') as ifile:
|
||||
content = ifile.readlines()
|
||||
selection = _parse_selections(content)
|
||||
|
||||
|
|
|
@ -1279,14 +1279,14 @@ def _render_filenames(filenames, zip_file, saltenv, template):
|
|||
'''
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
**kwargs
|
||||
)
|
||||
salt.utils.safe_rm(tmp_path_fn)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data['result']:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
|
|
|
@ -10,7 +10,7 @@ import base64
|
|||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.ext.six.moves.http_client # pylint: disable=import-error,redefined-builtin,no-name-in-module
|
||||
from salt.ext.six.moves import urllib # pylint: disable=no-name-in-module
|
||||
from salt.ext.six.moves.urllib.error import HTTPError, URLError # pylint: disable=no-name-in-module
|
||||
|
@ -465,7 +465,7 @@ def __save_artifact(artifact_url, target_file, headers):
|
|||
try:
|
||||
request = urllib.request.Request(artifact_url, None, headers)
|
||||
f = urllib.request.urlopen(request)
|
||||
with salt.utils.fopen(target_file, "wb") as local_file:
|
||||
with salt.utils.files.fopen(target_file, "wb") as local_file:
|
||||
local_file.write(f.read())
|
||||
result['status'] = True
|
||||
result['comment'] = __append_comment(('Artifact downloaded from URL: {0}'.format(artifact_url)), result['comment'])
|
||||
|
|
|
@ -26,6 +26,7 @@ from salt.ext.six.moves import map
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
__virtualname__ = 'at'
|
||||
|
@ -100,7 +101,7 @@ def atq(tag=None):
|
|||
job=job
|
||||
)
|
||||
if __salt__['file.file_exists'](atjob_file):
|
||||
with salt.utils.fopen(atjob_file, 'r') as atjob:
|
||||
with salt.utils.files.fopen(atjob_file, 'r') as atjob:
|
||||
for line in atjob:
|
||||
tmp = job_kw_regex.match(line)
|
||||
if tmp:
|
||||
|
@ -224,7 +225,7 @@ def atc(jobid):
|
|||
job=jobid
|
||||
)
|
||||
if __salt__['file.file_exists'](atjob_file):
|
||||
with salt.utils.fopen(atjob_file, 'r') as rfh:
|
||||
with salt.utils.files.fopen(atjob_file, 'r') as rfh:
|
||||
return "".join(rfh.readlines())
|
||||
else:
|
||||
return {'error': 'invalid job id \'{0}\''.format(jobid)}
|
||||
|
|
|
@ -14,8 +14,8 @@ import os
|
|||
import yaml
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Get logging started
|
||||
|
@ -291,7 +291,7 @@ def save():
|
|||
yaml_out = ''
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(sfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(sfn, 'w+') as fp_:
|
||||
fp_.write(yaml_out)
|
||||
ret['comment'] = 'Beacons saved to {0}.'.format(sfn)
|
||||
except (IOError, OSError):
|
||||
|
|
|
@ -89,7 +89,7 @@ import random
|
|||
# Import Salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils.compat
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error
|
||||
|
@ -201,7 +201,7 @@ def _get_role_arn(name, region=None, key=None, keyid=None, profile=None):
|
|||
|
||||
|
||||
def _filedata(infile):
|
||||
with salt.utils.fopen(infile, 'rb') as f:
|
||||
with salt.utils.files.fopen(infile, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ except ImportError:
|
|||
|
||||
# Import salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
# Define the module's virtual name
|
||||
|
@ -76,7 +76,7 @@ def info(name):
|
|||
python_shell=False).split(':')[5:7]
|
||||
elif __grains__['kernel'] in ('NetBSD', 'OpenBSD'):
|
||||
try:
|
||||
with salt.utils.fopen('/etc/master.passwd', 'r') as fp_:
|
||||
with salt.utils.files.fopen('/etc/master.passwd', 'r') as fp_:
|
||||
for line in fp_:
|
||||
if line.startswith('{0}:'.format(name)):
|
||||
key = line.split(':')
|
||||
|
|
|
@ -39,7 +39,7 @@ except ImportError:
|
|||
HAS_CAPIRCA = False
|
||||
|
||||
# Import Salt modules
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.ext import six
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -213,7 +213,7 @@ def _get_services_mapping():
|
|||
return _SERVICES
|
||||
services_txt = ''
|
||||
try:
|
||||
with salt.utils.fopen('/etc/services', 'r') as srv_f:
|
||||
with salt.utils.files.fopen('/etc/services', 'r') as srv_f:
|
||||
services_txt = srv_f.read()
|
||||
except IOError as ioe:
|
||||
log.error('Unable to read from /etc/services:')
|
||||
|
|
|
@ -142,14 +142,14 @@ def _render_cmd(cmd, cwd, template, saltenv='base', pillarenv=None, pillar_overr
|
|||
def _render(contents):
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
**kwargs
|
||||
)
|
||||
salt.utils.safe_rm(tmp_path_fn)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data['result']:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
|
@ -2419,7 +2419,7 @@ def exec_code_all(lang, code, cwd=None):
|
|||
else:
|
||||
codefile = salt.utils.files.mkstemp()
|
||||
|
||||
with salt.utils.fopen(codefile, 'w+t', binary=False) as fp_:
|
||||
with salt.utils.files.fopen(codefile, 'w+t', binary=False) as fp_:
|
||||
fp_.write(code)
|
||||
|
||||
if powershell:
|
||||
|
@ -2450,7 +2450,7 @@ def tty(device, echo=''):
|
|||
else:
|
||||
return {'Error': 'The specified device is not a valid TTY'}
|
||||
try:
|
||||
with salt.utils.fopen(teletype, 'wb') as tty_device:
|
||||
with salt.utils.files.fopen(teletype, 'wb') as tty_device:
|
||||
tty_device.write(salt.utils.to_bytes(echo))
|
||||
return {
|
||||
'Success': 'Message was successfully echoed to {0}'.format(teletype)
|
||||
|
@ -2662,7 +2662,7 @@ def _is_valid_shell(shell):
|
|||
available_shells = []
|
||||
if os.path.exists(shells):
|
||||
try:
|
||||
with salt.utils.fopen(shells, 'r') as shell_fp:
|
||||
with salt.utils.files.fopen(shells, 'r') as shell_fp:
|
||||
lines = shell_fp.read().splitlines()
|
||||
for line in lines:
|
||||
if line.startswith('#'):
|
||||
|
@ -2694,7 +2694,7 @@ def shells():
|
|||
ret = []
|
||||
if os.path.exists(shells_fn):
|
||||
try:
|
||||
with salt.utils.fopen(shells_fn, 'r') as shell_fp:
|
||||
with salt.utils.files.fopen(shells_fn, 'r') as shell_fp:
|
||||
lines = shell_fp.read().splitlines()
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
|
|
|
@ -17,6 +17,7 @@ import salt.fileclient
|
|||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.templates
|
||||
import salt.utils.url
|
||||
import salt.crypt
|
||||
import salt.transport
|
||||
|
@ -86,7 +87,7 @@ def recv(dest, chunk, append=False, compressed=True, mode=None):
|
|||
|
||||
open_mode = 'ab' if append else 'wb'
|
||||
try:
|
||||
fh_ = salt.utils.fopen(dest, open_mode) # pylint: disable=W8470
|
||||
fh_ = salt.utils.files.fopen(dest, open_mode) # pylint: disable=W8470
|
||||
except (IOError, OSError) as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
# Parent dir does not exist, we need to create it
|
||||
|
@ -96,7 +97,7 @@ def recv(dest, chunk, append=False, compressed=True, mode=None):
|
|||
except (IOError, OSError) as makedirs_exc:
|
||||
# Failed to make directory
|
||||
return _error(makedirs_exc.__str__())
|
||||
fh_ = salt.utils.fopen(dest, open_mode) # pylint: disable=W8470
|
||||
fh_ = salt.utils.files.fopen(dest, open_mode) # pylint: disable=W8470
|
||||
|
||||
try:
|
||||
# Write the chunk to disk
|
||||
|
@ -178,14 +179,14 @@ def _render_filenames(path, dest, saltenv, template, **kw):
|
|||
'''
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
**kwargs
|
||||
)
|
||||
salt.utils.safe_rm(tmp_path_fn)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data['result']:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
|
@ -391,7 +392,7 @@ def get_file_str(path, saltenv='base'):
|
|||
fn_ = cache_file(path, saltenv)
|
||||
if isinstance(fn_, six.string_types):
|
||||
try:
|
||||
with salt.utils.fopen(fn_, 'r') as fp_:
|
||||
with salt.utils.files.fopen(fn_, 'r') as fp_:
|
||||
return fp_.read()
|
||||
except IOError:
|
||||
return False
|
||||
|
@ -768,7 +769,7 @@ def push(path, keep_symlinks=False, upload_path=None, remove_source=False):
|
|||
'path': load_path_list,
|
||||
'tok': auth.gen_token('salt')}
|
||||
channel = salt.transport.Channel.factory(__opts__)
|
||||
with salt.utils.fopen(path, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
init_send = False
|
||||
while True:
|
||||
load['loc'] = fp_.tell()
|
||||
|
|
|
@ -13,6 +13,7 @@ import logging
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -107,7 +108,7 @@ def remove(module, details=False):
|
|||
if 'MANIFEST' not in contents:
|
||||
continue
|
||||
mfile = os.path.join(build_dir, 'MANIFEST')
|
||||
with salt.utils.fopen(mfile, 'r') as fh_:
|
||||
with salt.utils.files.fopen(mfile, 'r') as fh_:
|
||||
for line in fh_.readlines():
|
||||
if line.startswith('lib/'):
|
||||
files.append(line.replace('lib/', ins_path).strip())
|
||||
|
|
|
@ -227,13 +227,13 @@ def _write_cron_lines(user, lines):
|
|||
path = salt.utils.files.mkstemp()
|
||||
if _check_instance_uid_match(user) or __grains__.get('os_family') in ('Solaris', 'AIX'):
|
||||
# In some cases crontab command should be executed as user rather than root
|
||||
with salt.utils.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), mode=0o600) as fp_:
|
||||
with salt.utils.files.fpopen(path, 'w+', uid=__salt__['file.user_to_uid'](user), mode=0o600) as fp_:
|
||||
fp_.writelines(lines)
|
||||
ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path),
|
||||
runas=user,
|
||||
python_shell=False)
|
||||
else:
|
||||
with salt.utils.fpopen(path, 'w+', mode=0o600) as fp_:
|
||||
with salt.utils.files.fpopen(path, 'w+', mode=0o600) as fp_:
|
||||
fp_.writelines(lines)
|
||||
ret = __salt__['cmd.run_all'](_get_cron_cmdstr(path, user),
|
||||
python_shell=False)
|
||||
|
|
|
@ -13,6 +13,7 @@ import re
|
|||
|
||||
# Import salt libraries
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -140,7 +141,7 @@ def crypttab(config='/etc/crypttab'):
|
|||
ret = {}
|
||||
if not os.path.isfile(config):
|
||||
return ret
|
||||
with salt.utils.fopen(config) as ifile:
|
||||
with salt.utils.files.fopen(config) as ifile:
|
||||
for line in ifile:
|
||||
try:
|
||||
entry = _crypttab_entry.dict_from_line(line)
|
||||
|
@ -177,7 +178,7 @@ def rm_crypttab(name, config='/etc/crypttab'):
|
|||
# the list. At the end, re-create the config from just those lines.
|
||||
lines = []
|
||||
try:
|
||||
with salt.utils.fopen(config, 'r') as ifile:
|
||||
with salt.utils.files.fopen(config, 'r') as ifile:
|
||||
for line in ifile:
|
||||
try:
|
||||
if criteria.match(line):
|
||||
|
@ -194,7 +195,7 @@ def rm_crypttab(name, config='/etc/crypttab'):
|
|||
|
||||
if modified:
|
||||
try:
|
||||
with salt.utils.fopen(config, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(config, 'w+') as ofile:
|
||||
ofile.writelines(lines)
|
||||
except (IOError, OSError) as exc:
|
||||
msg = "Couldn't write to {0}: {1}"
|
||||
|
@ -271,7 +272,7 @@ def set_crypttab(
|
|||
raise CommandExecutionError('Bad config file "{0}"'.format(config))
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(config, 'r') as ifile:
|
||||
with salt.utils.files.fopen(config, 'r') as ifile:
|
||||
for line in ifile:
|
||||
try:
|
||||
if criteria.match(line):
|
||||
|
@ -301,7 +302,7 @@ def set_crypttab(
|
|||
if ret != 'present': # ret in ['new', 'change']:
|
||||
if not test:
|
||||
try:
|
||||
with salt.utils.fopen(config, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(config, 'w+') as ofile:
|
||||
# The line was changed, commit it!
|
||||
ofile.writelines(lines)
|
||||
except (IOError, OSError):
|
||||
|
|
|
@ -17,6 +17,7 @@ from salt.ext.six.moves.urllib.request import urlopen as _urlopen # pylint: dis
|
|||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
|
||||
|
@ -146,7 +147,7 @@ def _run_silent_cygwin(cyg_arch='x86_64',
|
|||
os.remove(cyg_setup_path)
|
||||
|
||||
file_data = _urlopen(cyg_setup_source)
|
||||
with salt.utils.fopen(cyg_setup_path, "wb") as fhw:
|
||||
with salt.utils.files.fopen(cyg_setup_path, "wb") as fhw:
|
||||
fhw.write(file_data.read())
|
||||
|
||||
setup_command = cyg_setup_path
|
||||
|
|
|
@ -11,7 +11,7 @@ import ast
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.payload
|
||||
|
||||
# Import 3rd-party lib
|
||||
|
@ -52,7 +52,7 @@ def load():
|
|||
|
||||
try:
|
||||
datastore_path = os.path.join(__opts__['cachedir'], 'datastore')
|
||||
with salt.utils.fopen(datastore_path, 'rb') as rfh:
|
||||
with salt.utils.files.fopen(datastore_path, 'rb') as rfh:
|
||||
return serial.loads(rfh.read())
|
||||
except (IOError, OSError, NameError):
|
||||
return {}
|
||||
|
@ -76,7 +76,7 @@ def dump(new_data):
|
|||
|
||||
try:
|
||||
datastore_path = os.path.join(__opts__['cachedir'], 'datastore')
|
||||
with salt.utils.fopen(datastore_path, 'w+b') as fn_:
|
||||
with salt.utils.files.fopen(datastore_path, 'w+b') as fn_:
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
serial.dump(new_data, fn_)
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ try:
|
|||
except ImportError as e:
|
||||
dns_support = False
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -70,7 +70,7 @@ def _config(name, key=None, **kwargs):
|
|||
def _get_keyring(keyfile):
|
||||
keyring = None
|
||||
if keyfile:
|
||||
with salt.utils.fopen(keyfile) as _f:
|
||||
with salt.utils.files.fopen(keyfile) as _f:
|
||||
keyring = dns.tsigkeyring.from_text(json.load(_f))
|
||||
return keyring
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ import traceback
|
|||
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=no-name-in-module,import-error
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.vt
|
||||
|
||||
HAS_LIBS = False
|
||||
|
||||
|
@ -252,7 +254,7 @@ def _create_pbuilders(env):
|
|||
|
||||
env_overrides = _get_build_env(env)
|
||||
if env_overrides and not env_overrides.isspace():
|
||||
with salt.utils.fopen(pbuilderrc, 'a') as fow:
|
||||
with salt.utils.files.fopen(pbuilderrc, 'a') as fow:
|
||||
fow.write('{0}'.format(env_overrides))
|
||||
|
||||
|
||||
|
@ -575,12 +577,12 @@ def make_repo(repodir,
|
|||
|
||||
codename, repocfg_dists = _get_repo_dists_env(env)
|
||||
repoconfdist = os.path.join(repoconf, 'distributions')
|
||||
with salt.utils.fopen(repoconfdist, 'w') as fow:
|
||||
with salt.utils.files.fopen(repoconfdist, 'w') as fow:
|
||||
fow.write('{0}'.format(repocfg_dists))
|
||||
|
||||
repocfg_opts = _get_repo_options_env(env)
|
||||
repoconfopts = os.path.join(repoconf, 'options')
|
||||
with salt.utils.fopen(repoconfopts, 'w') as fow:
|
||||
with salt.utils.files.fopen(repoconfopts, 'w') as fow:
|
||||
fow.write('{0}'.format(repocfg_opts))
|
||||
|
||||
local_keygrip_to_use = None
|
||||
|
@ -597,7 +599,7 @@ def make_repo(repodir,
|
|||
older_gnupg = __salt__['file.file_exists'](gpg_info_file)
|
||||
|
||||
if keyid is not None:
|
||||
with salt.utils.fopen(repoconfdist, 'a') as fow:
|
||||
with salt.utils.files.fopen(repoconfdist, 'a') as fow:
|
||||
fow.write('SignWith: {0}\n'.format(keyid))
|
||||
|
||||
# import_keys
|
||||
|
@ -657,7 +659,7 @@ def make_repo(repodir,
|
|||
_check_repo_sign_utils_support('debsign')
|
||||
|
||||
if older_gnupg:
|
||||
with salt.utils.fopen(gpg_info_file, 'r') as fow:
|
||||
with salt.utils.files.fopen(gpg_info_file, 'r') as fow:
|
||||
gpg_raw_info = fow.readlines()
|
||||
|
||||
for gpg_info_line in gpg_raw_info:
|
||||
|
@ -666,7 +668,7 @@ def make_repo(repodir,
|
|||
__salt__['environ.setenv'](gpg_info_dict)
|
||||
break
|
||||
else:
|
||||
with salt.utils.fopen(gpg_tty_info_file, 'r') as fow:
|
||||
with salt.utils.files.fopen(gpg_tty_info_file, 'r') as fow:
|
||||
gpg_raw_info = fow.readlines()
|
||||
|
||||
for gpg_tty_info_line in gpg_raw_info:
|
||||
|
|
|
@ -24,9 +24,10 @@ from salt.ext.six.moves import StringIO # pylint: disable=import-error,no-name-
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.odict
|
||||
import salt.utils.templates
|
||||
import salt.utils.validate.net
|
||||
import salt.utils.odict
|
||||
|
||||
|
||||
# Set up logging
|
||||
|
@ -219,7 +220,7 @@ def _read_file(path):
|
|||
Reads and returns the contents of a text file
|
||||
'''
|
||||
try:
|
||||
with salt.utils.flopen(path, 'rb') as contents:
|
||||
with salt.utils.files.flopen(path, 'rb') as contents:
|
||||
return [salt.utils.to_str(line) for line in contents.readlines()]
|
||||
except (OSError, IOError):
|
||||
return ''
|
||||
|
@ -280,7 +281,7 @@ def _parse_current_network_settings():
|
|||
opts['networking'] = ''
|
||||
|
||||
if os.path.isfile(_DEB_NETWORKING_FILE):
|
||||
with salt.utils.fopen(_DEB_NETWORKING_FILE) as contents:
|
||||
with salt.utils.files.fopen(_DEB_NETWORKING_FILE) as contents:
|
||||
for line in contents:
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
|
@ -574,7 +575,7 @@ def _parse_interfaces(interface_files=None):
|
|||
method = -1
|
||||
|
||||
for interface_file in interface_files:
|
||||
with salt.utils.fopen(interface_file) as interfaces:
|
||||
with salt.utils.files.fopen(interface_file) as interfaces:
|
||||
# This ensures iface_dict exists, but does not ensure we're not reading a new interface.
|
||||
iface_dict = {}
|
||||
for line in interfaces:
|
||||
|
@ -1489,7 +1490,7 @@ def _write_file(iface, data, folder, pattern):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.flopen(filename, 'w') as fout:
|
||||
with salt.utils.files.flopen(filename, 'w') as fout:
|
||||
fout.write(data)
|
||||
return filename
|
||||
|
||||
|
@ -1514,7 +1515,7 @@ def _write_file_routes(iface, data, folder, pattern):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.flopen(filename, 'w') as fout:
|
||||
with salt.utils.files.flopen(filename, 'w') as fout:
|
||||
fout.write(data)
|
||||
|
||||
__salt__['file.set_mode'](filename, '0755')
|
||||
|
@ -1533,7 +1534,7 @@ def _write_file_network(data, filename, create=False):
|
|||
msg = msg.format(filename)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.flopen(filename, 'w') as fout:
|
||||
with salt.utils.files.flopen(filename, 'w') as fout:
|
||||
fout.write(data)
|
||||
|
||||
|
||||
|
@ -1609,7 +1610,7 @@ def _write_file_ifaces(iface, data, **settings):
|
|||
msg = msg.format(os.path.dirname(filename))
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.flopen(filename, 'w') as fout:
|
||||
with salt.utils.files.flopen(filename, 'w') as fout:
|
||||
if _SEPARATE_FILE:
|
||||
fout.write(saved_ifcfg)
|
||||
else:
|
||||
|
@ -1642,7 +1643,7 @@ def _write_file_ppp_ifaces(iface, data):
|
|||
msg = msg.format(os.path.dirname(filename))
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.fopen(filename, 'w') as fout:
|
||||
with salt.utils.files.fopen(filename, 'w') as fout:
|
||||
fout.write(ifcfg)
|
||||
|
||||
# Return as a array so the difflib works
|
||||
|
|
|
@ -7,6 +7,7 @@ import yaml
|
|||
|
||||
import salt.fileclient
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.url
|
||||
|
||||
from salt.utils import dictupdate
|
||||
|
@ -60,7 +61,7 @@ def _load(formula):
|
|||
|
||||
if os.path.exists(file_):
|
||||
log.debug("Reading defaults from %r", file_)
|
||||
with salt.utils.fopen(file_) as fhr:
|
||||
with salt.utils.files.fopen(file_) as fhr:
|
||||
defaults = loader.load(fhr)
|
||||
log.debug("Read defaults %r", defaults)
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import os
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -163,7 +164,7 @@ def _parse_dnamasq(filename):
|
|||
'Error: No such file \'{0}\''.format(filename)
|
||||
)
|
||||
|
||||
with salt.utils.fopen(filename, 'r') as fp_:
|
||||
with salt.utils.files.fopen(filename, 'r') as fp_:
|
||||
for line in fp_:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
|
|
@ -2,15 +2,14 @@
|
|||
'''
|
||||
Compendium of generic DNS utilities
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import socket
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
import time
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -35,7 +34,7 @@ def parse_hosts(hostsfile='/etc/hosts', hosts=None):
|
|||
'''
|
||||
if not hosts:
|
||||
try:
|
||||
with salt.utils.fopen(hostsfile, 'r') as fp_:
|
||||
with salt.utils.files.fopen(hostsfile, 'r') as fp_:
|
||||
hosts = fp_.read()
|
||||
except Exception:
|
||||
return 'Error: hosts data was not found'
|
||||
|
@ -75,7 +74,7 @@ def hosts_append(hostsfile='/etc/hosts', ip_addr=None, entries=None):
|
|||
return 'No additional hosts were added to {0}'.format(hostsfile)
|
||||
|
||||
append_line = '\n{0} {1}'.format(ip_addr, ' '.join(host_list))
|
||||
with salt.utils.fopen(hostsfile, 'a') as fp_:
|
||||
with salt.utils.files.fopen(hostsfile, 'a') as fp_:
|
||||
fp_.write(append_line)
|
||||
|
||||
return 'The following line was added to {0}:{1}'.format(hostsfile,
|
||||
|
@ -95,11 +94,11 @@ def hosts_remove(hostsfile='/etc/hosts', entries=None):
|
|||
salt '*' dnsutil.hosts_remove /etc/hosts ad1.yuk.co
|
||||
salt '*' dnsutil.hosts_remove /etc/hosts ad2.yuk.co,ad1.yuk.co
|
||||
'''
|
||||
with salt.utils.fopen(hostsfile, 'r') as fp_:
|
||||
with salt.utils.files.fopen(hostsfile, 'r') as fp_:
|
||||
hosts = fp_.read()
|
||||
|
||||
host_list = entries.split(',')
|
||||
with salt.utils.fopen(hostsfile, 'w') as out_file:
|
||||
with salt.utils.files.fopen(hostsfile, 'w') as out_file:
|
||||
for line in hosts.splitlines():
|
||||
if not line or line.strip().startswith('#'):
|
||||
out_file.write('{0}\n'.format(line))
|
||||
|
@ -125,7 +124,7 @@ def parse_zone(zonefile=None, zone=None):
|
|||
'''
|
||||
if zonefile:
|
||||
try:
|
||||
with salt.utils.fopen(zonefile, 'r') as fp_:
|
||||
with salt.utils.files.fopen(zonefile, 'r') as fp_:
|
||||
zone = fp_.read()
|
||||
except Exception:
|
||||
pass
|
||||
|
|
|
@ -106,7 +106,7 @@ import inspect
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
from operator import attrgetter
|
||||
try:
|
||||
|
@ -179,7 +179,7 @@ def __read_docker_compose(path):
|
|||
return __standardize_result(False,
|
||||
'Path does not exist or docker-compose.yml is not present',
|
||||
None, None)
|
||||
f = salt.utils.fopen(os.path.join(path, dc_filename), 'r') # pylint: disable=resource-leakage
|
||||
f = salt.utils.files.fopen(os.path.join(path, dc_filename), 'r') # pylint: disable=resource-leakage
|
||||
result = {'docker-compose.yml': ''}
|
||||
if f:
|
||||
for line in f:
|
||||
|
@ -207,7 +207,7 @@ def __write_docker_compose(path, docker_compose):
|
|||
|
||||
if os.path.isdir(path) is False:
|
||||
os.mkdir(path)
|
||||
f = salt.utils.fopen(os.path.join(path, dc_filename), 'w') # pylint: disable=resource-leakage
|
||||
f = salt.utils.files.fopen(os.path.join(path, dc_filename), 'w') # pylint: disable=resource-leakage
|
||||
if f:
|
||||
f.write(docker_compose)
|
||||
f.close()
|
||||
|
|
|
@ -2862,7 +2862,7 @@ def export(name,
|
|||
# open the filehandle. If not using gzip, we need to open the
|
||||
# filehandle here. We make sure to close it in the "finally" block
|
||||
# below.
|
||||
out = salt.utils.fopen(path, 'wb') # pylint: disable=resource-leakage
|
||||
out = salt.utils.files.fopen(path, 'wb') # pylint: disable=resource-leakage
|
||||
response = _client_wrapper('export', name)
|
||||
buf = None
|
||||
while buf != '':
|
||||
|
@ -3874,12 +3874,12 @@ def save(name,
|
|||
compressor = lzma.LZMACompressor()
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(saved_path, 'rb') as uncompressed:
|
||||
with salt.utils.files.fopen(saved_path, 'rb') as uncompressed:
|
||||
if compression != 'gzip':
|
||||
# gzip doesn't use a Compressor object, it uses a .open()
|
||||
# method to open the filehandle. If not using gzip, we need
|
||||
# to open the filehandle here.
|
||||
out = salt.utils.fopen(path, 'wb')
|
||||
out = salt.utils.files.fopen(path, 'wb')
|
||||
buf = None
|
||||
while buf != '':
|
||||
buf = uncompressed.read(4096)
|
||||
|
|
|
@ -12,6 +12,7 @@ import datetime
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -323,7 +324,7 @@ def _get_pkg_license(pkg):
|
|||
licenses = set()
|
||||
cpr = "/usr/share/doc/{0}/copyright".format(pkg)
|
||||
if os.path.exists(cpr):
|
||||
with salt.utils.fopen(cpr) as fp_:
|
||||
with salt.utils.files.fopen(cpr) as fp_:
|
||||
for line in fp_.read().split(os.linesep):
|
||||
if line.startswith("License:"):
|
||||
licenses.add(line.split(":", 1)[1].strip())
|
||||
|
@ -361,7 +362,7 @@ def _get_pkg_ds_avail():
|
|||
ret = dict()
|
||||
pkg_mrk = "Package:"
|
||||
pkg_name = "package"
|
||||
with salt.utils.fopen(avail) as fp_:
|
||||
with salt.utils.files.fopen(avail) as fp_:
|
||||
for pkg_info in fp_.read().split(pkg_mrk):
|
||||
nfo = dict()
|
||||
for line in (pkg_mrk + pkg_info).split(os.linesep):
|
||||
|
|
|
@ -1233,8 +1233,8 @@ def psed(path,
|
|||
|
||||
shutil.copy2(path, '{0}{1}'.format(path, backup))
|
||||
|
||||
with salt.utils.fopen(path, 'w') as ofile:
|
||||
with salt.utils.fopen('{0}{1}'.format(path, backup), 'r') as ifile:
|
||||
with salt.utils.files.fopen(path, 'w') as ofile:
|
||||
with salt.utils.files.fopen('{0}{1}'.format(path, backup), 'r') as ifile:
|
||||
if multi is True:
|
||||
for line in ifile.readline():
|
||||
ofile.write(_psed(line, before, after, limit, flags))
|
||||
|
@ -1442,7 +1442,7 @@ def comment_line(path,
|
|||
bufsize = os.path.getsize(path)
|
||||
try:
|
||||
# Use a read-only handle to open the file
|
||||
with salt.utils.fopen(path,
|
||||
with salt.utils.files.fopen(path,
|
||||
mode='rb',
|
||||
buffering=bufsize) as r_file:
|
||||
# Loop through each line of the file and look for a match
|
||||
|
@ -1481,12 +1481,12 @@ def comment_line(path,
|
|||
|
||||
try:
|
||||
# Open the file in write mode
|
||||
with salt.utils.fopen(path,
|
||||
with salt.utils.files.fopen(path,
|
||||
mode='wb',
|
||||
buffering=bufsize) as w_file:
|
||||
try:
|
||||
# Open the temp file in read mode
|
||||
with salt.utils.fopen(temp_file,
|
||||
with salt.utils.files.fopen(temp_file,
|
||||
mode='rb',
|
||||
buffering=bufsize) as r_file:
|
||||
# Loop through each line of the file and look for a match
|
||||
|
@ -1857,7 +1857,7 @@ def line(path, content=None, match=None, mode=None, location=None,
|
|||
if before is None and after is None and not match:
|
||||
match = content
|
||||
|
||||
with salt.utils.fopen(path, mode='r') as fp_:
|
||||
with salt.utils.files.fopen(path, mode='r') as fp_:
|
||||
body = fp_.read()
|
||||
body_before = hashlib.sha256(salt.utils.to_bytes(body)).hexdigest()
|
||||
after = _regex_to_static(body, after)
|
||||
|
@ -2000,7 +2000,7 @@ def line(path, content=None, match=None, mode=None, location=None,
|
|||
|
||||
if changed:
|
||||
if show_changes:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
path_content = _splitlines_preserving_trailing_newline(
|
||||
fp_.read())
|
||||
changes_diff = ''.join(difflib.unified_diff(
|
||||
|
@ -2212,7 +2212,7 @@ def replace(path,
|
|||
# Searching first avoids modifying the time stamp if there are no changes
|
||||
r_data = None
|
||||
# Use a read-only handle to open the file
|
||||
with salt.utils.fopen(path,
|
||||
with salt.utils.files.fopen(path,
|
||||
mode='rb',
|
||||
buffering=bufsize) as r_file:
|
||||
try:
|
||||
|
@ -2271,12 +2271,12 @@ def replace(path,
|
|||
r_data = None
|
||||
try:
|
||||
# Open the file in write mode
|
||||
with salt.utils.fopen(path,
|
||||
with salt.utils.files.fopen(path,
|
||||
mode='w',
|
||||
buffering=bufsize) as w_file:
|
||||
try:
|
||||
# Open the temp file in read mode
|
||||
with salt.utils.fopen(temp_file,
|
||||
with salt.utils.files.fopen(temp_file,
|
||||
mode='r',
|
||||
buffering=bufsize) as r_file:
|
||||
r_data = mmap.mmap(r_file.fileno(),
|
||||
|
@ -2792,7 +2792,7 @@ def contains_regex(path, regex, lchar=''):
|
|||
return False
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as target:
|
||||
with salt.utils.files.fopen(path, 'r') as target:
|
||||
for line in target:
|
||||
if lchar:
|
||||
line = line.lstrip(lchar)
|
||||
|
@ -2876,7 +2876,7 @@ def append(path, *args, **kwargs):
|
|||
|
||||
# Make sure we have a newline at the end of the file. Do this in binary
|
||||
# mode so SEEK_END with nonzero offset will work.
|
||||
with salt.utils.fopen(path, 'rb+') as ofile:
|
||||
with salt.utils.files.fopen(path, 'rb+') as ofile:
|
||||
linesep = salt.utils.to_bytes(os.linesep)
|
||||
try:
|
||||
ofile.seek(-len(linesep), os.SEEK_END)
|
||||
|
@ -2892,7 +2892,7 @@ def append(path, *args, **kwargs):
|
|||
ofile.write(linesep)
|
||||
|
||||
# Append lines in text mode
|
||||
with salt.utils.fopen(path, 'a') as ofile:
|
||||
with salt.utils.files.fopen(path, 'a') as ofile:
|
||||
for new_line in args:
|
||||
ofile.write('{0}{1}'.format(new_line, os.linesep))
|
||||
|
||||
|
@ -2941,7 +2941,7 @@ def prepend(path, *args, **kwargs):
|
|||
args = [kwargs['args']]
|
||||
|
||||
try:
|
||||
with salt.utils.fopen(path) as fhr:
|
||||
with salt.utils.files.fopen(path) as fhr:
|
||||
contents = fhr.readlines()
|
||||
except IOError:
|
||||
contents = []
|
||||
|
@ -2950,7 +2950,7 @@ def prepend(path, *args, **kwargs):
|
|||
for line in args:
|
||||
preface.append('{0}\n'.format(line))
|
||||
|
||||
with salt.utils.fopen(path, "w") as ofile:
|
||||
with salt.utils.files.fopen(path, "w") as ofile:
|
||||
contents = preface + contents
|
||||
ofile.write(''.join(contents))
|
||||
return 'Prepended {0} lines to "{1}"'.format(len(args), path)
|
||||
|
@ -2999,7 +2999,7 @@ def write(path, *args, **kwargs):
|
|||
contents = []
|
||||
for line in args:
|
||||
contents.append('{0}\n'.format(line))
|
||||
with salt.utils.fopen(path, "w") as ofile:
|
||||
with salt.utils.files.fopen(path, "w") as ofile:
|
||||
ofile.write(''.join(contents))
|
||||
return 'Wrote {0} lines to "{1}"'.format(len(contents), path)
|
||||
|
||||
|
@ -3030,7 +3030,7 @@ def touch(name, atime=None, mtime=None):
|
|||
mtime = int(mtime)
|
||||
try:
|
||||
if not os.path.exists(name):
|
||||
with salt.utils.fopen(name, 'a') as fhw:
|
||||
with salt.utils.files.fopen(name, 'a') as fhw:
|
||||
fhw.write('')
|
||||
|
||||
if not atime and not mtime:
|
||||
|
@ -3133,7 +3133,7 @@ def truncate(path, length):
|
|||
salt '*' file.truncate /path/to/file 512
|
||||
'''
|
||||
path = os.path.expanduser(path)
|
||||
with salt.utils.fopen(path, 'rb+') as seek_fh:
|
||||
with salt.utils.files.fopen(path, 'rb+') as seek_fh:
|
||||
seek_fh.truncate(int(length))
|
||||
|
||||
|
||||
|
@ -3380,7 +3380,7 @@ def read(path, binary=False):
|
|||
access_mode = 'r'
|
||||
if binary is True:
|
||||
access_mode += 'b'
|
||||
with salt.utils.fopen(path, access_mode) as file_obj:
|
||||
with salt.utils.files.fopen(path, access_mode) as file_obj:
|
||||
return file_obj.read()
|
||||
|
||||
|
||||
|
@ -4145,7 +4145,7 @@ def extract_hash(hash_fn,
|
|||
partial = None
|
||||
found = {}
|
||||
|
||||
with salt.utils.fopen(hash_fn, 'r') as fp_:
|
||||
with salt.utils.files.fopen(hash_fn, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
hash_re = r'(?i)(?<![a-z0-9])([a-f0-9]{' + hash_len_expr + '})(?![a-z0-9])'
|
||||
|
@ -4671,9 +4671,9 @@ def check_file_meta(
|
|||
if bdiff:
|
||||
changes['diff'] = bdiff
|
||||
else:
|
||||
with salt.utils.fopen(sfn, 'r') as src:
|
||||
with salt.utils.files.fopen(sfn, 'r') as src:
|
||||
slines = src.readlines()
|
||||
with salt.utils.fopen(name, 'r') as name_:
|
||||
with salt.utils.files.fopen(name, 'r') as name_:
|
||||
nlines = name_.readlines()
|
||||
changes['diff'] = \
|
||||
''.join(difflib.unified_diff(nlines, slines))
|
||||
|
@ -4687,12 +4687,12 @@ def check_file_meta(
|
|||
if salt.utils.is_windows():
|
||||
contents = os.linesep.join(
|
||||
_splitlines_preserving_trailing_newline(contents))
|
||||
with salt.utils.fopen(tmp, 'w') as tmp_:
|
||||
with salt.utils.files.fopen(tmp, 'w') as tmp_:
|
||||
tmp_.write(str(contents))
|
||||
# Compare the static contents with the named file
|
||||
with salt.utils.fopen(tmp, 'r') as src:
|
||||
with salt.utils.files.fopen(tmp, 'r') as src:
|
||||
slines = src.readlines()
|
||||
with salt.utils.fopen(name, 'r') as name_:
|
||||
with salt.utils.files.fopen(name, 'r') as name_:
|
||||
nlines = name_.readlines()
|
||||
__clean_tmp(tmp)
|
||||
if ''.join(nlines) != ''.join(slines):
|
||||
|
@ -4758,9 +4758,9 @@ def get_diff(
|
|||
|
||||
sfn = __salt__['cp.cache_file'](masterfile, saltenv)
|
||||
if sfn:
|
||||
with salt.utils.fopen(sfn, 'r') as src:
|
||||
with salt.utils.files.fopen(sfn, 'r') as src:
|
||||
slines = src.readlines()
|
||||
with salt.utils.fopen(minionfile, 'r') as name_:
|
||||
with salt.utils.files.fopen(minionfile, 'r') as name_:
|
||||
nlines = name_.readlines()
|
||||
if ''.join(nlines) != ''.join(slines):
|
||||
bdiff = _binary_replace(minionfile, sfn)
|
||||
|
@ -4983,9 +4983,9 @@ def manage_file(name,
|
|||
if bdiff:
|
||||
ret['changes']['diff'] = bdiff
|
||||
else:
|
||||
with salt.utils.fopen(sfn, 'r') as src:
|
||||
with salt.utils.files.fopen(sfn, 'r') as src:
|
||||
slines = src.readlines()
|
||||
with salt.utils.fopen(real_name, 'r') as name_:
|
||||
with salt.utils.files.fopen(real_name, 'r') as name_:
|
||||
nlines = name_.readlines()
|
||||
|
||||
sndiff = ''.join(difflib.unified_diff(nlines, slines))
|
||||
|
@ -5010,7 +5010,7 @@ def manage_file(name,
|
|||
if salt.utils.is_windows():
|
||||
contents = os.linesep.join(
|
||||
_splitlines_preserving_trailing_newline(contents))
|
||||
with salt.utils.fopen(tmp, 'w') as tmp_:
|
||||
with salt.utils.files.fopen(tmp, 'w') as tmp_:
|
||||
if encoding:
|
||||
log.debug('File will be encoded with {0}'.format(encoding))
|
||||
tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors))
|
||||
|
@ -5018,9 +5018,9 @@ def manage_file(name,
|
|||
tmp_.write(str(contents))
|
||||
|
||||
# Compare contents of files to know if we need to replace
|
||||
with salt.utils.fopen(tmp, 'r') as src:
|
||||
with salt.utils.files.fopen(tmp, 'r') as src:
|
||||
slines = src.readlines()
|
||||
with salt.utils.fopen(real_name, 'r') as name_:
|
||||
with salt.utils.files.fopen(real_name, 'r') as name_:
|
||||
nlines = name_.readlines()
|
||||
different = ''.join(slines) != ''.join(nlines)
|
||||
|
||||
|
@ -5220,7 +5220,7 @@ def manage_file(name,
|
|||
if salt.utils.is_windows():
|
||||
contents = os.linesep.join(
|
||||
_splitlines_preserving_trailing_newline(contents))
|
||||
with salt.utils.fopen(tmp, 'w') as tmp_:
|
||||
with salt.utils.files.fopen(tmp, 'w') as tmp_:
|
||||
if encoding:
|
||||
log.debug('File will be encoded with {0}'.format(encoding))
|
||||
tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors))
|
||||
|
|
|
@ -8,12 +8,9 @@ from __future__ import absolute_import
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'sysctl'
|
||||
|
||||
|
@ -69,7 +66,7 @@ def show(config_file=False):
|
|||
|
||||
if config_file:
|
||||
try:
|
||||
with salt.utils.fopen(config_file, 'r') as f:
|
||||
with salt.utils.files.fopen(config_file, 'r') as f:
|
||||
for line in f.readlines():
|
||||
l = line.strip()
|
||||
if l != "" and not l.startswith("#"):
|
||||
|
@ -144,7 +141,7 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
edited = False
|
||||
value = str(value)
|
||||
|
||||
with salt.utils.fopen(config, 'r') as ifile:
|
||||
with salt.utils.files.fopen(config, 'r') as ifile:
|
||||
for line in ifile:
|
||||
if not line.startswith('{0}='.format(name)):
|
||||
nlines.append(line)
|
||||
|
@ -165,7 +162,7 @@ def persist(name, value, config='/etc/sysctl.conf'):
|
|||
edited = True
|
||||
if not edited:
|
||||
nlines.append("{0}\n".format(_formatfor(name, value, config)))
|
||||
with salt.utils.fopen(config, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(config, 'w+') as ofile:
|
||||
ofile.writelines(nlines)
|
||||
if config != '/boot/loader.conf':
|
||||
assign(name, value)
|
||||
|
|
|
@ -11,6 +11,7 @@ import subprocess
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'jail'
|
||||
|
@ -99,7 +100,7 @@ def get_enabled():
|
|||
ret = []
|
||||
for rconf in ('/etc/rc.conf', '/etc/rc.conf.local'):
|
||||
if os.access(rconf, os.R_OK):
|
||||
with salt.utils.fopen(rconf, 'r') as _fp:
|
||||
with salt.utils.files.fopen(rconf, 'r') as _fp:
|
||||
for line in _fp:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
@ -135,7 +136,7 @@ def show_config(jail):
|
|||
else:
|
||||
for rconf in ('/etc/rc.conf', '/etc/rc.conf.local'):
|
||||
if os.access(rconf, os.R_OK):
|
||||
with salt.utils.fopen(rconf, 'r') as _fp:
|
||||
with salt.utils.files.fopen(rconf, 'r') as _fp:
|
||||
for line in _fp:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
@ -145,7 +146,7 @@ def show_config(jail):
|
|||
ret[key.split('_', 2)[2]] = value.split('"')[1]
|
||||
for jconf in ('/etc/jail.conf', '/usr/local/etc/jail.conf'):
|
||||
if os.access(jconf, os.R_OK):
|
||||
with salt.utils.fopen(jconf, 'r') as _fp:
|
||||
with salt.utils.files.fopen(jconf, 'r') as _fp:
|
||||
for line in _fp:
|
||||
line = line.partition('#')[0].strip()
|
||||
if line:
|
||||
|
@ -186,7 +187,7 @@ def fstab(jail):
|
|||
c_fstab = config['mount.fstab']
|
||||
if 'fstab' in config or 'mount.fstab' in config:
|
||||
if os.access(c_fstab, os.R_OK):
|
||||
with salt.utils.fopen(c_fstab, 'r') as _fp:
|
||||
with salt.utils.files.fopen(c_fstab, 'r') as _fp:
|
||||
for line in _fp:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
|
|
|
@ -9,7 +9,7 @@ import os
|
|||
import re
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'kmod'
|
||||
|
@ -70,7 +70,7 @@ def _get_persistent_modules():
|
|||
Returns a list of modules in loader.conf that load on boot.
|
||||
'''
|
||||
mods = set()
|
||||
with salt.utils.fopen(_LOADER_CONF, 'r') as loader_conf:
|
||||
with salt.utils.files.fopen(_LOADER_CONF, 'r') as loader_conf:
|
||||
for line in loader_conf:
|
||||
line = line.strip()
|
||||
mod_name = _get_module_name(line)
|
||||
|
|
|
@ -24,6 +24,7 @@ import logging
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.ext.six import string_types
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
import salt.ext.six as six
|
||||
|
@ -114,7 +115,7 @@ def _write_options(name, configuration):
|
|||
'Unable to make {0}: {1}'.format(dirname, exc)
|
||||
)
|
||||
|
||||
with salt.utils.fopen(os.path.join(dirname, 'options'), 'w') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(dirname, 'options'), 'w') as fp_:
|
||||
sorted_options = list(conf_ptr.keys())
|
||||
sorted_options.sort()
|
||||
fp_.write(
|
||||
|
|
|
@ -19,6 +19,7 @@ import re
|
|||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.decorators as decorators
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandNotFoundError
|
||||
|
||||
__func_alias__ = {
|
||||
|
@ -222,7 +223,7 @@ def _switch(name, # pylint: disable=C0103
|
|||
val = 'NO'
|
||||
|
||||
if os.path.exists(config):
|
||||
with salt.utils.fopen(config, 'r') as ifile:
|
||||
with salt.utils.files.fopen(config, 'r') as ifile:
|
||||
for line in ifile:
|
||||
if not line.startswith('{0}='.format(rcvar)):
|
||||
nlines.append(line)
|
||||
|
@ -236,7 +237,7 @@ def _switch(name, # pylint: disable=C0103
|
|||
nlines[-1] = '{0}\n'.format(nlines[-1])
|
||||
nlines.append('{0}="{1}"\n'.format(rcvar, val))
|
||||
|
||||
with salt.utils.fopen(config, 'w') as ofile:
|
||||
with salt.utils.files.fopen(config, 'w') as ofile:
|
||||
ofile.writelines(nlines)
|
||||
|
||||
return True
|
||||
|
|
|
@ -2039,7 +2039,7 @@ def is_worktree(cwd,
|
|||
return False
|
||||
gitdir = os.path.join(toplevel, '.git')
|
||||
try:
|
||||
with salt.utils.fopen(gitdir, 'r') as fp_:
|
||||
with salt.utils.files.fopen(gitdir, 'r') as fp_:
|
||||
for line in fp_:
|
||||
try:
|
||||
label, path = line.split(None, 1)
|
||||
|
@ -2381,7 +2381,7 @@ def list_worktrees(cwd,
|
|||
Return contents of a single line file with EOF newline stripped
|
||||
'''
|
||||
try:
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
for line in fp_:
|
||||
ret = line.strip()
|
||||
# Ignore other lines, if they exist (which they
|
||||
|
|
|
@ -22,6 +22,7 @@ import time
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
|
||||
|
@ -726,7 +727,7 @@ def import_key(text=None,
|
|||
|
||||
if filename:
|
||||
try:
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
lines = _fp.readlines()
|
||||
text = ''.join(lines)
|
||||
except IOError:
|
||||
|
@ -1019,13 +1020,13 @@ def sign(user=None,
|
|||
else:
|
||||
signed_data = gpg.sign(text, keyid=keyid, passphrase=gpg_passphrase)
|
||||
elif filename:
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
if gnupg_version >= '1.3.1':
|
||||
signed_data = gpg.sign(text, default_key=keyid, passphrase=gpg_passphrase)
|
||||
else:
|
||||
signed_data = gpg.sign_file(_fp, keyid=keyid, passphrase=gpg_passphrase)
|
||||
if output:
|
||||
with salt.utils.flopen(output, 'w') as fout:
|
||||
with salt.utils.files.flopen(output, 'w') as fout:
|
||||
fout.write(signed_data.data)
|
||||
else:
|
||||
raise SaltInvocationError('filename or text must be passed.')
|
||||
|
@ -1077,10 +1078,10 @@ def verify(text=None,
|
|||
if signature:
|
||||
# need to call with fopen instead of flopen due to:
|
||||
# https://bitbucket.org/vinay.sajip/python-gnupg/issues/76/verify_file-closes-passed-file-handle
|
||||
with salt.utils.fopen(signature, 'rb') as _fp:
|
||||
with salt.utils.files.fopen(signature, 'rb') as _fp:
|
||||
verified = gpg.verify_file(_fp, filename)
|
||||
else:
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
verified = gpg.verify_file(_fp)
|
||||
else:
|
||||
raise SaltInvocationError('filename or text must be passed.')
|
||||
|
@ -1173,12 +1174,12 @@ def encrypt(user=None,
|
|||
if GPG_1_3_1:
|
||||
# This version does not allow us to encrypt using the
|
||||
# file stream # have to read in the contents and encrypt.
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
_contents = _fp.read()
|
||||
result = gpg.encrypt(_contents, recipients, passphrase=gpg_passphrase, output=output)
|
||||
else:
|
||||
# This version allows encrypting the file stream
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
if output:
|
||||
result = gpg.encrypt_file(_fp, recipients, passphrase=gpg_passphrase, output=output, sign=sign)
|
||||
else:
|
||||
|
@ -1264,7 +1265,7 @@ def decrypt(user=None,
|
|||
if text:
|
||||
result = gpg.decrypt(text, passphrase=gpg_passphrase)
|
||||
elif filename:
|
||||
with salt.utils.flopen(filename, 'rb') as _fp:
|
||||
with salt.utils.files.flopen(filename, 'rb') as _fp:
|
||||
if output:
|
||||
result = gpg.decrypt_file(_fp, passphrase=gpg_passphrase, output=output)
|
||||
else:
|
||||
|
|
|
@ -12,15 +12,14 @@ import operator
|
|||
import collections
|
||||
import json
|
||||
import math
|
||||
import yaml
|
||||
from functools import reduce # pylint: disable=redefined-builtin
|
||||
|
||||
# Import 3rd-party libs
|
||||
import yaml
|
||||
import salt.utils.compat
|
||||
# Import Salt libs
|
||||
import salt.ext.six as six
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.compat
|
||||
import salt.utils.files
|
||||
import salt.utils.yamldumper
|
||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||
from salt.exceptions import SaltException
|
||||
|
@ -235,7 +234,7 @@ def setvals(grains, destructive=False):
|
|||
)
|
||||
|
||||
if os.path.isfile(gfn):
|
||||
with salt.utils.fopen(gfn, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(gfn, 'rb') as fp_:
|
||||
try:
|
||||
grains = yaml.safe_load(fp_.read())
|
||||
except yaml.YAMLError as exc:
|
||||
|
@ -253,14 +252,14 @@ def setvals(grains, destructive=False):
|
|||
__grains__[key] = val
|
||||
cstr = salt.utils.yamldumper.safe_dump(grains, default_flow_style=False)
|
||||
try:
|
||||
with salt.utils.fopen(gfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(gfn, 'w+') as fp_:
|
||||
fp_.write(cstr)
|
||||
except (IOError, OSError):
|
||||
msg = 'Unable to write to grains file at {0}. Check permissions.'
|
||||
log.error(msg.format(gfn))
|
||||
fn_ = os.path.join(__opts__['cachedir'], 'module_refresh')
|
||||
try:
|
||||
with salt.utils.flopen(fn_, 'w+') as fp_:
|
||||
with salt.utils.files.flopen(fn_, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
except (IOError, OSError):
|
||||
msg = 'Unable to write to cache file {0}. Check permissions.'
|
||||
|
|
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.decorators as decorators
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
|
@ -68,7 +68,7 @@ def conf():
|
|||
ret = {}
|
||||
pos = 0
|
||||
try:
|
||||
with salt.utils.fopen(_detect_conf(), 'r') as _fp:
|
||||
with salt.utils.files.fopen(_detect_conf(), 'r') as _fp:
|
||||
for line in _fp:
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
|
|
|
@ -13,6 +13,7 @@ import hmac
|
|||
import salt.exceptions
|
||||
import salt.ext.six as six
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.hashutils
|
||||
|
||||
if six.PY2:
|
||||
|
@ -72,7 +73,7 @@ def digest_file(infile, checksum='md5'):
|
|||
raise salt.exceptions.CommandExecutionError(
|
||||
"File path '{0}' not found.".format(infile))
|
||||
|
||||
with salt.utils.fopen(infile, 'rb') as f:
|
||||
with salt.utils.files.fopen(infile, 'rb') as f:
|
||||
file_hash = __salt__['hashutil.digest'](f.read(), checksum)
|
||||
|
||||
return file_hash
|
||||
|
@ -156,7 +157,7 @@ def base64_encodefile(fname):
|
|||
'''
|
||||
encoded_f = StringIO.StringIO()
|
||||
|
||||
with salt.utils.fopen(fname, 'rb') as f:
|
||||
with salt.utils.files.fopen(fname, 'rb') as f:
|
||||
base64.encode(f, encoded_f)
|
||||
|
||||
encoded_f.seek(0)
|
||||
|
@ -193,7 +194,7 @@ def base64_decodefile(instr, outfile):
|
|||
'''
|
||||
encoded_f = StringIO.StringIO(instr)
|
||||
|
||||
with salt.utils.fopen(outfile, 'wb') as f:
|
||||
with salt.utils.files.fopen(outfile, 'wb') as f:
|
||||
base64.decode(encoded_f, f)
|
||||
|
||||
return True
|
||||
|
|
|
@ -45,11 +45,10 @@ from __future__ import absolute_import
|
|||
import time
|
||||
import json
|
||||
import logging
|
||||
# Import third party libs
|
||||
import yaml
|
||||
# Import salt libs
|
||||
|
||||
# Import Salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
|
@ -543,9 +542,9 @@ def create_stack(name=None, template_file=None, enviroment=None,
|
|||
contents=None,
|
||||
dir_mode=None)
|
||||
if template_manage_result['result']:
|
||||
with salt.utils.fopen(template_tmp_file, 'r') as tfp_:
|
||||
with salt.utils.files.fopen(template_tmp_file, 'r') as tfp_:
|
||||
tpl = tfp_.read()
|
||||
salt.utils.safe_rm(template_tmp_file)
|
||||
salt.utils.files.safe_rm(template_tmp_file)
|
||||
try:
|
||||
if isinstance(tpl, six.binary_type):
|
||||
tpl = tpl.decode('utf-8')
|
||||
|
@ -605,9 +604,9 @@ def create_stack(name=None, template_file=None, enviroment=None,
|
|||
contents=None,
|
||||
dir_mode=None)
|
||||
if enviroment_manage_result['result']:
|
||||
with salt.utils.fopen(enviroment_tmp_file, 'r') as efp_:
|
||||
with salt.utils.files.fopen(enviroment_tmp_file, 'r') as efp_:
|
||||
env_str = efp_.read()
|
||||
salt.utils.safe_rm(enviroment_tmp_file)
|
||||
salt.utils.files.safe_rm(enviroment_tmp_file)
|
||||
try:
|
||||
env = _parse_enviroment(env_str)
|
||||
except ValueError as ex:
|
||||
|
@ -732,9 +731,9 @@ def update_stack(name=None, template_file=None, enviroment=None,
|
|||
contents=None,
|
||||
dir_mode=None)
|
||||
if template_manage_result['result']:
|
||||
with salt.utils.fopen(template_tmp_file, 'r') as tfp_:
|
||||
with salt.utils.files.fopen(template_tmp_file, 'r') as tfp_:
|
||||
tpl = tfp_.read()
|
||||
salt.utils.safe_rm(template_tmp_file)
|
||||
salt.utils.files.safe_rm(template_tmp_file)
|
||||
try:
|
||||
if isinstance(tpl, six.binary_type):
|
||||
tpl = tpl.decode('utf-8')
|
||||
|
@ -794,9 +793,9 @@ def update_stack(name=None, template_file=None, enviroment=None,
|
|||
contents=None,
|
||||
dir_mode=None)
|
||||
if enviroment_manage_result['result']:
|
||||
with salt.utils.fopen(enviroment_tmp_file, 'r') as efp_:
|
||||
with salt.utils.files.fopen(enviroment_tmp_file, 'r') as efp_:
|
||||
env_str = efp_.read()
|
||||
salt.utils.safe_rm(enviroment_tmp_file)
|
||||
salt.utils.files.safe_rm(enviroment_tmp_file)
|
||||
try:
|
||||
env = _parse_enviroment(env_str)
|
||||
except ValueError as ex:
|
||||
|
|
|
@ -221,7 +221,7 @@ import re
|
|||
import yaml
|
||||
import logging
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.templates as tpl
|
||||
from salt.utils.yamldumper import OrderedDumper
|
||||
|
||||
|
@ -412,7 +412,7 @@ def read_file(name):
|
|||
'''
|
||||
out = ''
|
||||
try:
|
||||
with salt.utils.fopen(name, 'r') as f:
|
||||
with salt.utils.files.fopen(name, 'r') as f:
|
||||
out = f.read()
|
||||
except Exception as ex:
|
||||
log.error(ex)
|
||||
|
@ -550,7 +550,7 @@ def _format_markdown_system_file(filename, config):
|
|||
if is_binary:
|
||||
file_data = '[[skipped binary data]]'
|
||||
else:
|
||||
with salt.utils.fopen(filename, 'r') as f:
|
||||
with salt.utils.files.fopen(filename, 'r') as f:
|
||||
file_data = f.read()
|
||||
#file_data = __salt__['cmd.shell']('\\file -i \'{0}\' | \\grep -q \'charset=binary\' && echo [[binary data]] || cat \'{0}\''.format(filename))
|
||||
file_data = _md_fix(file_data)
|
||||
|
|
|
@ -8,7 +8,7 @@ from __future__ import absolute_import
|
|||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.odict as odict
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -33,7 +33,7 @@ def _get_or_create_hostfile():
|
|||
if hfn is None:
|
||||
hfn = ''
|
||||
if not os.path.exists(hfn):
|
||||
with salt.utils.fopen(hfn, 'w'):
|
||||
with salt.utils.files.fopen(hfn, 'w'):
|
||||
pass
|
||||
return hfn
|
||||
|
||||
|
@ -47,7 +47,7 @@ def _list_hosts():
|
|||
ret = odict.OrderedDict()
|
||||
if not os.path.isfile(hfn):
|
||||
return ret
|
||||
with salt.utils.fopen(hfn) as ifile:
|
||||
with salt.utils.files.fopen(hfn) as ifile:
|
||||
for line in ifile:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
|
@ -161,7 +161,7 @@ def set_host(ip, alias):
|
|||
if not alias.strip():
|
||||
line_to_add = ''
|
||||
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
with salt.utils.files.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind, line in enumerate(lines):
|
||||
tmpline = line.strip()
|
||||
|
@ -182,7 +182,7 @@ def set_host(ip, alias):
|
|||
lines[-1] += os.linesep
|
||||
line = line_to_add
|
||||
lines.append(line)
|
||||
with salt.utils.fopen(hfn, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(hfn, 'w+') as ofile:
|
||||
ofile.writelines(lines)
|
||||
return True
|
||||
|
||||
|
@ -200,7 +200,7 @@ def rm_host(ip, alias):
|
|||
if not has_pair(ip, alias):
|
||||
return True
|
||||
hfn = _get_or_create_hostfile()
|
||||
with salt.utils.fopen(hfn) as fp_:
|
||||
with salt.utils.files.fopen(hfn) as fp_:
|
||||
lines = fp_.readlines()
|
||||
for ind in range(len(lines)):
|
||||
tmpline = lines[ind].strip()
|
||||
|
@ -221,7 +221,7 @@ def rm_host(ip, alias):
|
|||
else:
|
||||
# Only an alias was removed
|
||||
lines[ind] = newline + os.linesep
|
||||
with salt.utils.fopen(hfn, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(hfn, 'w+') as ofile:
|
||||
ofile.writelines(lines)
|
||||
return True
|
||||
|
||||
|
@ -271,7 +271,7 @@ def _write_hosts(hosts):
|
|||
lines.append(line)
|
||||
|
||||
hfn = _get_or_create_hostfile()
|
||||
with salt.utils.fopen(hfn, 'w+') as ofile:
|
||||
with salt.utils.files.fopen(hfn, 'w+') as ofile:
|
||||
for line in lines:
|
||||
if line.strip():
|
||||
# /etc/hosts needs to end with EOL so that some utils that read
|
||||
|
|
|
@ -102,7 +102,7 @@ def _write_incron_lines(user, lines):
|
|||
return ret
|
||||
else:
|
||||
path = salt.utils.files.mkstemp()
|
||||
with salt.utils.fopen(path, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(path, 'w+') as fp_:
|
||||
fp_.writelines(lines)
|
||||
if __grains__['os_family'] == 'Solaris' and user != "root":
|
||||
__salt__['cmd.run']('chown {0} {1}'.format(user, path), python_shell=False)
|
||||
|
@ -121,7 +121,7 @@ def _write_file(folder, filename, data):
|
|||
msg = msg.format(filename, folder)
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
with salt.utils.fopen(path, 'w') as fp_:
|
||||
with salt.utils.files.fopen(path, 'w') as fp_:
|
||||
fp_.write(data)
|
||||
|
||||
return 0
|
||||
|
@ -133,7 +133,7 @@ def _read_file(folder, filename):
|
|||
'''
|
||||
path = os.path.join(folder, filename)
|
||||
try:
|
||||
with salt.utils.fopen(path, 'rb') as contents:
|
||||
with salt.utils.files.fopen(path, 'rb') as contents:
|
||||
return contents.readlines()
|
||||
except (OSError, IOError):
|
||||
return ''
|
||||
|
|
|
@ -21,7 +21,7 @@ import json
|
|||
|
||||
# Import Salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
|
@ -369,7 +369,7 @@ class _Ini(_Section):
|
|||
def refresh(self, inicontents=None):
|
||||
if inicontents is None:
|
||||
try:
|
||||
with salt.utils.fopen(self.name) as rfh:
|
||||
with salt.utils.files.fopen(self.name) as rfh:
|
||||
inicontents = rfh.read()
|
||||
except (OSError, IOError) as exc:
|
||||
raise CommandExecutionError(
|
||||
|
@ -395,7 +395,7 @@ class _Ini(_Section):
|
|||
|
||||
def flush(self):
|
||||
try:
|
||||
with salt.utils.fopen(self.name, 'w') as outfile:
|
||||
with salt.utils.files.fopen(self.name, 'w') as outfile:
|
||||
ini_gen = self.gen_ini()
|
||||
next(ini_gen)
|
||||
outfile.writelines(ini_gen)
|
||||
|
|
|
@ -29,6 +29,7 @@ from salt.modules.inspectlib.entities import (AllowedDir, IgnoredDir, Package,
|
|||
PayloadFile, PackageCfgFile)
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.utils import fsutils
|
||||
from salt.utils import reinit_crypto
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
@ -475,7 +476,7 @@ def is_alive(pidfile):
|
|||
Check if PID is still alive.
|
||||
'''
|
||||
try:
|
||||
with salt.utils.fopen(pidfile) as fp_:
|
||||
with salt.utils.files.fopen(pidfile) as fp_:
|
||||
os.kill(int(fp_.read().strip()), 0)
|
||||
return True
|
||||
except Exception as ex:
|
||||
|
@ -517,7 +518,7 @@ if __name__ == '__main__':
|
|||
pid = os.fork()
|
||||
if pid > 0:
|
||||
reinit_crypto()
|
||||
with salt.utils.fopen(os.path.join(pidfile, EnvLoader.PID_FILE), 'w') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(pidfile, EnvLoader.PID_FILE), 'w') as fp_:
|
||||
fp_.write('{0}\n'.format(pid))
|
||||
sys.exit(0)
|
||||
except OSError as ex:
|
||||
|
|
|
@ -24,7 +24,7 @@ import platform
|
|||
import socket
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.modules.inspectlib.exceptions import InspectorKiwiProcessorException
|
||||
|
||||
# Import third party libs
|
||||
|
@ -145,14 +145,14 @@ class KiwiExporter(object):
|
|||
'''
|
||||
# Get real local users with the local passwords
|
||||
shadow = {}
|
||||
with salt.utils.fopen('/etc/shadow') as rfh:
|
||||
with salt.utils.files.fopen('/etc/shadow') as rfh:
|
||||
for sh_line in rfh.read().split(os.linesep):
|
||||
if sh_line.strip():
|
||||
login, pwd = sh_line.split(":")[:2]
|
||||
if pwd and pwd[0] not in '!*':
|
||||
shadow[login] = {'p': pwd}
|
||||
|
||||
with salt.utils.fopen('/etc/passwd') as rfh:
|
||||
with salt.utils.files.fopen('/etc/passwd') as rfh:
|
||||
for ps_line in rfh.read().split(os.linesep):
|
||||
if ps_line.strip():
|
||||
ps_line = ps_line.strip().split(':')
|
||||
|
|
|
@ -21,6 +21,8 @@ import time
|
|||
import logging
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.files
|
||||
import salt.utils.fsutils
|
||||
import salt.utils.network
|
||||
from salt.modules.inspectlib.exceptions import (InspectorQueryException, SIException)
|
||||
from salt.modules.inspectlib import EnvLoader
|
||||
|
@ -214,7 +216,7 @@ class Query(EnvLoader):
|
|||
'''
|
||||
users = dict()
|
||||
path = '/etc/passwd'
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
if ':' not in line:
|
||||
|
@ -239,7 +241,7 @@ class Query(EnvLoader):
|
|||
'''
|
||||
groups = dict()
|
||||
path = '/etc/group'
|
||||
with salt.utils.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
if ':' not in line:
|
||||
|
|
|
@ -37,6 +37,7 @@ import string
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
|
||||
from salt.exceptions import SaltException
|
||||
from salt.ext import six
|
||||
|
@ -970,7 +971,7 @@ def _parse_conf(conf_file=None, in_mem=False, family='ipv4'):
|
|||
|
||||
rules = ''
|
||||
if conf_file:
|
||||
with salt.utils.fopen(conf_file, 'r') as ifile:
|
||||
with salt.utils.files.fopen(conf_file, 'r') as ifile:
|
||||
rules = ifile.read()
|
||||
elif in_mem:
|
||||
cmd = '{0}-save' . format(_iptables_cmd(family))
|
||||
|
|
|
@ -33,7 +33,7 @@ try:
|
|||
except ImportError:
|
||||
HAS_JENKINS = False
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
|
@ -264,7 +264,7 @@ def create_job(name=None,
|
|||
else:
|
||||
config_xml_file = __salt__['cp.cache_file'](config_xml, saltenv)
|
||||
|
||||
with salt.utils.fopen(config_xml_file) as _fp:
|
||||
with salt.utils.files.fopen(config_xml_file) as _fp:
|
||||
config_xml = _fp.read()
|
||||
|
||||
server = _connect()
|
||||
|
@ -303,7 +303,7 @@ def update_job(name=None,
|
|||
else:
|
||||
config_xml_file = __salt__['cp.cache_file'](config_xml, saltenv)
|
||||
|
||||
with salt.utils.fopen(config_xml_file) as _fp:
|
||||
with salt.utils.files.fopen(config_xml_file) as _fp:
|
||||
config_xml = _fp.read()
|
||||
|
||||
server = _connect()
|
||||
|
|
|
@ -25,6 +25,9 @@ try:
|
|||
except ImportError:
|
||||
from salt._compat import ElementTree as etree
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
|
||||
# Juniper interface libraries
|
||||
# https://github.com/Juniper/py-junos-eznc
|
||||
try:
|
||||
|
@ -41,11 +44,6 @@ try:
|
|||
except ImportError:
|
||||
HAS_JUNOS = False
|
||||
|
||||
# Import salt libraries
|
||||
from salt.utils import fopen
|
||||
from salt.utils import files
|
||||
from salt.utils import safe_rm
|
||||
|
||||
# Set up logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -234,7 +232,7 @@ def rpc(cmd=None, dest=None, format='xml', **kwargs):
|
|||
write_response = json.dumps(reply, indent=1)
|
||||
else:
|
||||
write_response = etree.tostring(reply)
|
||||
with fopen(dest, 'w') as fp:
|
||||
with salt.utils.files.fopen(dest, 'w') as fp:
|
||||
fp.write(write_response)
|
||||
return ret
|
||||
|
||||
|
@ -460,7 +458,7 @@ def rollback(id=0, **kwargs):
|
|||
if 'diffs_file' in op and op['diffs_file'] is not None:
|
||||
diff = conn.cu.diff()
|
||||
if diff is not None:
|
||||
with fopen(op['diffs_file'], 'w') as fp:
|
||||
with salt.utils.files.fopen(op['diffs_file'], 'w') as fp:
|
||||
fp.write(diff)
|
||||
else:
|
||||
log.info(
|
||||
|
@ -655,7 +653,7 @@ def cli(command=None, format='text', **kwargs):
|
|||
ret['message'] = jxmlease.parse(result)
|
||||
|
||||
if 'dest' in op and op['dest'] is not None:
|
||||
with fopen(op['dest'], 'w') as fp:
|
||||
with salt.utils.files.fopen(op['dest'], 'w') as fp:
|
||||
fp.write(result)
|
||||
|
||||
ret['out'] = True
|
||||
|
@ -833,7 +831,7 @@ def install_config(path=None, **kwargs):
|
|||
if "template_vars" in op:
|
||||
template_vars = op["template_vars"]
|
||||
|
||||
template_cached_path = files.mkstemp()
|
||||
template_cached_path = salt.utils.files.mkstemp()
|
||||
__salt__['cp.get_template'](
|
||||
path,
|
||||
template_cached_path,
|
||||
|
@ -888,7 +886,7 @@ def install_config(path=None, **kwargs):
|
|||
return ret
|
||||
|
||||
finally:
|
||||
safe_rm(template_cached_path)
|
||||
salt.utils.files.safe_rm(template_cached_path)
|
||||
|
||||
config_diff = cu.diff()
|
||||
if config_diff is None:
|
||||
|
@ -929,7 +927,7 @@ def install_config(path=None, **kwargs):
|
|||
|
||||
try:
|
||||
if write_diff and config_diff is not None:
|
||||
with fopen(write_diff, 'w') as fp:
|
||||
with salt.utils.files.fopen(write_diff, 'w') as fp:
|
||||
fp.write(config_diff)
|
||||
except Exception as exception:
|
||||
ret['message'] = 'Could not write into diffs_file due to: "{0}"'.format(
|
||||
|
@ -1004,7 +1002,7 @@ def install_os(path=None, **kwargs):
|
|||
ret['out'] = False
|
||||
return ret
|
||||
|
||||
image_cached_path = files.mkstemp()
|
||||
image_cached_path = salt.utils.files.mkstemp()
|
||||
__salt__['cp.get_file'](path, image_cached_path)
|
||||
|
||||
if not os.path.isfile(image_cached_path):
|
||||
|
@ -1034,7 +1032,7 @@ def install_os(path=None, **kwargs):
|
|||
ret['out'] = False
|
||||
return ret
|
||||
finally:
|
||||
safe_rm(image_cached_path)
|
||||
salt.utils.files.safe_rm(image_cached_path)
|
||||
|
||||
if 'reboot' in op and op['reboot'] is True:
|
||||
try:
|
||||
|
@ -1231,7 +1229,7 @@ def load(path=None, **kwargs):
|
|||
if "template_vars" in op:
|
||||
template_vars = op["template_vars"]
|
||||
|
||||
template_cached_path = files.mkstemp()
|
||||
template_cached_path = salt.utils.files.mkstemp()
|
||||
__salt__['cp.get_template'](
|
||||
path,
|
||||
template_cached_path,
|
||||
|
@ -1278,7 +1276,7 @@ def load(path=None, **kwargs):
|
|||
ret['out'] = False
|
||||
return ret
|
||||
finally:
|
||||
safe_rm(template_cached_path)
|
||||
salt.utils.files.safe_rm(template_cached_path)
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: dis
|
|||
|
||||
# TODO Remove requests dependency
|
||||
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.http as http
|
||||
|
||||
__virtualname__ = 'k8s'
|
||||
|
@ -55,7 +55,7 @@ def _guess_apiserver(apiserver_url=None):
|
|||
config = __salt__['config.get']('k8s:config', default_config)
|
||||
kubeapi_regex = re.compile("""KUBE_MASTER=['"]--master=(.*)['"]""",
|
||||
re.MULTILINE)
|
||||
with salt.utils.fopen(config) as fh_k8s:
|
||||
with salt.utils.files.fopen(config) as fh_k8s:
|
||||
for line in fh_k8s.readlines():
|
||||
match_line = kubeapi_regex.match(line)
|
||||
if match_line:
|
||||
|
@ -541,7 +541,7 @@ def _is_valid_secret_file(filename):
|
|||
|
||||
def _file_encode(filename):
|
||||
log.trace("Encoding secret file: {0}".format(filename))
|
||||
with salt.utils.fopen(filename, "rb") as f:
|
||||
with salt.utils.files.fopen(filename, "rb") as f:
|
||||
data = f.read()
|
||||
return base64.b64encode(data)
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import re
|
|||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -201,7 +201,7 @@ def mod_list(only_persist=False):
|
|||
conf = _get_modules_conf()
|
||||
if os.path.exists(conf):
|
||||
try:
|
||||
with salt.utils.fopen(conf, 'r') as modules_file:
|
||||
with salt.utils.files.fopen(conf, 'r') as modules_file:
|
||||
for line in modules_file:
|
||||
line = line.strip()
|
||||
mod_name = _strip_module_name(line)
|
||||
|
|
|
@ -26,7 +26,7 @@ import yaml
|
|||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext.six import iteritems
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.templates
|
||||
|
||||
try:
|
||||
|
@ -1220,7 +1220,7 @@ def __read_and_render_yaml_file(source,
|
|||
raise CommandExecutionError(
|
||||
'Source file \'{0}\' not found'.format(source))
|
||||
|
||||
with salt.utils.fopen(sfn, 'r') as src:
|
||||
with salt.utils.files.fopen(sfn, 'r') as src:
|
||||
contents = src.read()
|
||||
|
||||
if template:
|
||||
|
|
|
@ -22,6 +22,7 @@ import re
|
|||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.decorators as decorators
|
||||
import salt.utils.files
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
import salt.ext.six as six
|
||||
|
||||
|
@ -88,7 +89,7 @@ def _available_services():
|
|||
try:
|
||||
# This assumes most of the plist files
|
||||
# will be already in XML format
|
||||
with salt.utils.fopen(file_path):
|
||||
with salt.utils.files.fopen(file_path):
|
||||
plist = plistlib.readPlist(true_path)
|
||||
|
||||
except Exception:
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue