mirror of
https://github.com/saltstack/salt.git
synced 2025-04-10 06:41:40 +00:00
Pyupgrade and drop six
This commit is contained in:
parent
0d71775f51
commit
d9b50659b2
1565 changed files with 8268 additions and 16733 deletions
|
@ -1,5 +1,4 @@
|
|||
#! /bin/env python
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The setup script for sodium_grabber
|
||||
"""
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import print_function
|
||||
|
||||
import getopt
|
||||
import os
|
||||
|
@ -49,9 +48,8 @@ def main(argv):
|
|||
if target == "":
|
||||
display_help()
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
search = search.encode("utf-8")
|
||||
replace = replace.encode("utf-8")
|
||||
search = search.encode("utf-8")
|
||||
replace = replace.encode("utf-8")
|
||||
f = open(target, "rb").read()
|
||||
f = f.replace(search, replace)
|
||||
f = f.replace(search.lower(), replace)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt._logging
|
||||
~~~~~~~~~~~~~
|
||||
|
@ -11,7 +10,5 @@
|
|||
the python's logging system.
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from salt._logging.impl import * # pylint: disable=wildcard-import
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt._logging.handlers
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -6,18 +5,15 @@
|
|||
Salt's logging handlers
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import logging.handlers
|
||||
import queue
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
# Import salt libs
|
||||
from salt._logging.mixins import ExcInfoOnLogLevelFormatMixin, NewStyleClassMixin
|
||||
from salt.ext.six.moves import queue # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
# from salt.utils.versions import warn_until_date
|
||||
|
||||
|
@ -48,7 +44,7 @@ class TemporaryLoggingHandler(logging.NullHandler):
|
|||
# '{{date}}.'.format(name=__name__)
|
||||
# )
|
||||
self.__max_queue_size = max_queue_size
|
||||
super(TemporaryLoggingHandler, self).__init__(level=level)
|
||||
super().__init__(level=level)
|
||||
self.__messages = deque(maxlen=max_queue_size)
|
||||
|
||||
def handle(self, record):
|
||||
|
@ -117,7 +113,7 @@ class SysLogHandler(
|
|||
del exc_type, exc, exc_traceback
|
||||
|
||||
if not handled:
|
||||
super(SysLogHandler, self).handleError(record)
|
||||
super().handleError(record)
|
||||
|
||||
|
||||
class RotatingFileHandler(
|
||||
|
@ -152,7 +148,7 @@ class RotatingFileHandler(
|
|||
):
|
||||
if self.level <= logging.WARNING:
|
||||
sys.stderr.write(
|
||||
'[WARNING ] Unable to rotate the log file "{0}" '
|
||||
'[WARNING ] Unable to rotate the log file "{}" '
|
||||
"because it is in use\n".format(self.baseFilename)
|
||||
)
|
||||
handled = True
|
||||
|
@ -162,7 +158,7 @@ class RotatingFileHandler(
|
|||
del exc_type, exc, exc_traceback
|
||||
|
||||
if not handled:
|
||||
super(RotatingFileHandler, self).handleError(record)
|
||||
super().handleError(record)
|
||||
|
||||
|
||||
class WatchedFileHandler(
|
||||
|
@ -217,7 +213,7 @@ if sys.version_info < (3, 2):
|
|||
except queue.Full:
|
||||
sys.stderr.write(
|
||||
"[WARNING ] Message queue is full, "
|
||||
'unable to write "{0}" to log'.format(record)
|
||||
'unable to write "{}" to log'.format(record)
|
||||
)
|
||||
|
||||
def prepare(self, record):
|
||||
|
@ -266,7 +262,7 @@ elif sys.version_info < (3, 7):
|
|||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||
): # pylint: disable=no-member,inconsistent-mro
|
||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||
super(QueueHandler, self).__init__(queue)
|
||||
super().__init__(queue)
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||
|
@ -325,7 +321,7 @@ else:
|
|||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||
): # pylint: disable=no-member,inconsistent-mro
|
||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||
super(QueueHandler, self).__init__(queue)
|
||||
super().__init__(queue)
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||
|
@ -347,5 +343,5 @@ else:
|
|||
except queue.Full:
|
||||
sys.stderr.write(
|
||||
"[WARNING ] Message queue is full, "
|
||||
'unable to write "{0}" to log.\n'.format(record)
|
||||
'unable to write "{}" to log.\n'.format(record)
|
||||
)
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
|
||||
Salt's logging implementation classes/functionality
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
@ -30,8 +28,6 @@ from salt.exceptions import LoggingRuntimeError # isort:skip
|
|||
from salt.utils.ctx import RequestContext # isort:skip
|
||||
from salt.utils.textformat import TextFormat # isort:skip
|
||||
|
||||
# from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
LOG_LEVELS = {
|
||||
"all": logging.NOTSET,
|
||||
"debug": logging.DEBUG,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt._logging.mixins
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -6,14 +5,12 @@
|
|||
Logging related mix-ins
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
class NewStyleClassMixin(object):
|
||||
class NewStyleClassMixin:
|
||||
"""
|
||||
Simple new style class to make pylint shut up!
|
||||
This is required because SaltLoggingClass can't subclass object directly:
|
||||
|
@ -22,7 +19,7 @@ class NewStyleClassMixin(object):
|
|||
"""
|
||||
|
||||
|
||||
class LoggingProfileMixin(object):
|
||||
class LoggingProfileMixin:
|
||||
"""
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
"""
|
||||
|
@ -31,7 +28,7 @@ class LoggingProfileMixin(object):
|
|||
self.log(getattr(logging, "PROFILE", 15), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingTraceMixin(object):
|
||||
class LoggingTraceMixin:
|
||||
"""
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
"""
|
||||
|
@ -40,7 +37,7 @@ class LoggingTraceMixin(object):
|
|||
self.log(getattr(logging, "TRACE", 5), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingGarbageMixin(object):
|
||||
class LoggingGarbageMixin:
|
||||
"""
|
||||
Simple mix-in class to add a garbage method to python's logging.
|
||||
"""
|
||||
|
@ -74,10 +71,10 @@ class LoggingMixinMeta(type):
|
|||
bases.append(LoggingTraceMixin)
|
||||
if include_garbage:
|
||||
bases.append(LoggingGarbageMixin)
|
||||
return super(LoggingMixinMeta, mcs).__new__(mcs, name, tuple(bases), attrs)
|
||||
return super().__new__(mcs, name, tuple(bases), attrs)
|
||||
|
||||
|
||||
class ExcInfoOnLogLevelFormatMixin(object):
|
||||
class ExcInfoOnLogLevelFormatMixin:
|
||||
"""
|
||||
Logging handler class mixin to properly handle including exc_info on a per logging handler basis
|
||||
"""
|
||||
|
@ -86,7 +83,7 @@ class ExcInfoOnLogLevelFormatMixin(object):
|
|||
"""
|
||||
Format the log record to include exc_info if the handler is enabled for a specific log level
|
||||
"""
|
||||
formatted_record = super(ExcInfoOnLogLevelFormatMixin, self).format(record)
|
||||
formatted_record = super().format(record)
|
||||
exc_info_on_loglevel = getattr(record, "exc_info_on_loglevel", None)
|
||||
exc_info_on_loglevel_formatted = getattr(
|
||||
record, "exc_info_on_loglevel_formatted", None
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The acl module handles publisher_acl operations
|
||||
|
||||
|
@ -9,16 +8,11 @@ found by reading the salt documentation:
|
|||
"""
|
||||
|
||||
# Import python libraries
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
class PublisherACL(object):
|
||||
class PublisherACL:
|
||||
"""
|
||||
Represents the publisher ACL and provides methods
|
||||
to query the ACL for given operations
|
||||
|
@ -38,7 +32,7 @@ class PublisherACL(object):
|
|||
|
||||
def cmd_is_blacklisted(self, cmd):
|
||||
# If this is a regular command, it is a single function
|
||||
if isinstance(cmd, six.string_types):
|
||||
if isinstance(cmd, str):
|
||||
cmd = [cmd]
|
||||
for fun in cmd:
|
||||
if not salt.utils.stringutils.check_whitelist_blacklist(
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
An "Always Approved" eauth interface to test against, not intended for
|
||||
production use
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provide authentication using Django Web Framework
|
||||
|
||||
|
@ -47,16 +46,11 @@ indicated above, though the model DOES NOT have to be named
|
|||
'SaltExternalAuthModel'.
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import django
|
||||
|
@ -118,7 +112,7 @@ def __django_auth_setup():
|
|||
django_module_name, globals(), locals(), "SaltExternalAuthModel"
|
||||
)
|
||||
# pylint: enable=possibly-unused-variable
|
||||
DJANGO_AUTH_CLASS_str = "django_auth_module.{0}".format(django_model_name)
|
||||
DJANGO_AUTH_CLASS_str = "django_auth_module.{}".format(django_model_name)
|
||||
DJANGO_AUTH_CLASS = eval(DJANGO_AUTH_CLASS_str) # pylint: disable=W0123
|
||||
|
||||
|
||||
|
@ -211,7 +205,7 @@ def acl(username):
|
|||
found = False
|
||||
for d in auth_dict[a.user_fk.username]:
|
||||
if isinstance(d, dict):
|
||||
if a.minion_or_fn_matcher in six.iterkeys(d):
|
||||
if a.minion_or_fn_matcher in d.keys():
|
||||
auth_dict[a.user_fk.username][a.minion_or_fn_matcher].append(
|
||||
a.minion_fn
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provide authentication using local files
|
||||
|
||||
|
@ -95,8 +94,6 @@ When using ``htdigest`` the ``^realm`` must be set:
|
|||
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provide authentication using OpenStack Keystone
|
||||
|
||||
:depends: - keystoneclient Python module
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from keystoneclient.v2_0 import client
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Provide authentication using MySQL.
|
||||
|
||||
|
@ -49,7 +47,6 @@ Enable MySQL authentication.
|
|||
:depends: - MySQL-python Python module
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# The pam components have been modified to be salty and have been taken from
|
||||
# the pam module under this licence:
|
||||
# (c) 2007 Chris AtLee <chris@atlee.ca>
|
||||
|
@ -35,8 +34,6 @@ authenticated against. This defaults to `login`
|
|||
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
from ctypes import (
|
||||
|
@ -55,13 +52,8 @@ from ctypes import (
|
|||
)
|
||||
from ctypes.util import find_library
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.user
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
|
@ -110,7 +102,7 @@ class PamMessage(Structure):
|
|||
]
|
||||
|
||||
def __repr__(self):
|
||||
return "<PamMessage {0} '{1}'>".format(self.msg_style, self.msg)
|
||||
return "<PamMessage {} '{}'>".format(self.msg_style, self.msg)
|
||||
|
||||
|
||||
class PamResponse(Structure):
|
||||
|
@ -124,7 +116,7 @@ class PamResponse(Structure):
|
|||
]
|
||||
|
||||
def __repr__(self):
|
||||
return "<PamResponse {0} '{1}'>".format(self.resp_retcode, self.resp)
|
||||
return "<PamResponse {} '{}'>".format(self.resp_retcode, self.resp)
|
||||
|
||||
|
||||
CONV_FUNC = CFUNCTYPE(
|
||||
|
@ -182,11 +174,11 @@ def authenticate(username, password):
|
|||
"""
|
||||
service = __opts__.get("auth.pam.service", "login")
|
||||
|
||||
if isinstance(username, six.text_type):
|
||||
if isinstance(username, str):
|
||||
username = username.encode(__salt_system_encoding__)
|
||||
if isinstance(password, six.text_type):
|
||||
if isinstance(password, str):
|
||||
password = password.encode(__salt_system_encoding__)
|
||||
if isinstance(service, six.text_type):
|
||||
if isinstance(service, str):
|
||||
service = service.encode(__salt_system_encoding__)
|
||||
|
||||
@CONV_FUNC
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provide authentication using a REST call
|
||||
|
||||
|
@ -23,12 +22,9 @@ as above.
|
|||
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.http
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Provide authentication using YubiKey.
|
||||
|
||||
|
@ -38,8 +36,6 @@ the API key will be updated on all the YubiCloud servers.
|
|||
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
|
|
@ -326,7 +326,7 @@ def beacon(config):
|
|||
r_mask = 0
|
||||
for sub in mask:
|
||||
r_mask |= _get_mask(sub)
|
||||
elif isinstance(mask, salt.ext.six.binary_type):
|
||||
elif isinstance(mask, bytes):
|
||||
r_mask = _get_mask(mask)
|
||||
else:
|
||||
r_mask = mask
|
||||
|
|
33
salt/cache/__init__.py
vendored
33
salt/cache/__init__.py
vendored
|
@ -1,21 +1,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Loader mechanism for caching data, with data expiration, etc.
|
||||
|
||||
.. versionadded:: 2016.11.0
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.syspaths
|
||||
from salt.ext import six
|
||||
from salt.payload import Serial
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
|
@ -35,7 +30,7 @@ def factory(opts, **kwargs):
|
|||
return cls(opts, **kwargs)
|
||||
|
||||
|
||||
class Cache(object):
|
||||
class Cache:
|
||||
"""
|
||||
Base caching object providing access to the modular cache subsystem.
|
||||
|
||||
|
@ -83,7 +78,7 @@ class Cache(object):
|
|||
|
||||
def __lazy_init(self):
|
||||
self._modules = salt.loader.cache(self.opts, self.serial)
|
||||
fun = "{0}.init_kwargs".format(self.driver)
|
||||
fun = "{}.init_kwargs".format(self.driver)
|
||||
if fun in self.modules:
|
||||
self._kwargs = self.modules[fun](self._kwargs)
|
||||
else:
|
||||
|
@ -154,7 +149,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.store".format(self.driver)
|
||||
fun = "{}.store".format(self.driver)
|
||||
return self.modules[fun](bank, key, data, **self._kwargs)
|
||||
|
||||
def fetch(self, bank, key):
|
||||
|
@ -178,7 +173,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.fetch".format(self.driver)
|
||||
fun = "{}.fetch".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
def updated(self, bank, key):
|
||||
|
@ -202,7 +197,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.updated".format(self.driver)
|
||||
fun = "{}.updated".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
def flush(self, bank, key=None):
|
||||
|
@ -223,7 +218,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.flush".format(self.driver)
|
||||
fun = "{}.flush".format(self.driver)
|
||||
return self.modules[fun](bank, key=key, **self._kwargs)
|
||||
|
||||
def list(self, bank):
|
||||
|
@ -242,7 +237,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.list".format(self.driver)
|
||||
fun = "{}.list".format(self.driver)
|
||||
return self.modules[fun](bank, **self._kwargs)
|
||||
|
||||
def contains(self, bank, key=None):
|
||||
|
@ -267,7 +262,7 @@ class Cache(object):
|
|||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
"""
|
||||
fun = "{0}.contains".format(self.driver)
|
||||
fun = "{}.contains".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
|
||||
|
@ -281,7 +276,7 @@ class MemCache(Cache):
|
|||
data = {}
|
||||
|
||||
def __init__(self, opts, **kwargs):
|
||||
super(MemCache, self).__init__(opts, **kwargs)
|
||||
super().__init__(opts, **kwargs)
|
||||
self.expire = opts.get("memcache_expire_seconds", 10)
|
||||
self.max = opts.get("memcache_max_items", 1024)
|
||||
self.cleanup = opts.get("memcache_full_cleanup", False)
|
||||
|
@ -294,7 +289,7 @@ class MemCache(Cache):
|
|||
@classmethod
|
||||
def __cleanup(cls, expire):
|
||||
now = time.time()
|
||||
for storage in six.itervalues(cls.data):
|
||||
for storage in cls.data.values():
|
||||
for key, data in list(storage.items()):
|
||||
if data[0] + expire < now:
|
||||
del storage[key]
|
||||
|
@ -302,7 +297,7 @@ class MemCache(Cache):
|
|||
break
|
||||
|
||||
def _get_storage_id(self):
|
||||
fun = "{0}.storage_id".format(self.driver)
|
||||
fun = "{}.storage_id".format(self.driver)
|
||||
if fun in self.modules:
|
||||
return self.modules[fun](self.kwargs)
|
||||
else:
|
||||
|
@ -338,7 +333,7 @@ class MemCache(Cache):
|
|||
return record[1]
|
||||
|
||||
# Have no value for the key or value is expired
|
||||
data = super(MemCache, self).fetch(bank, key)
|
||||
data = super().fetch(bank, key)
|
||||
if len(self.storage) >= self.max:
|
||||
if self.cleanup:
|
||||
MemCache.__cleanup(self.expire)
|
||||
|
@ -349,7 +344,7 @@ class MemCache(Cache):
|
|||
|
||||
def store(self, bank, key, data):
|
||||
self.storage.pop((bank, key), None)
|
||||
super(MemCache, self).store(bank, key, data)
|
||||
super().store(bank, key, data)
|
||||
if len(self.storage) >= self.max:
|
||||
if self.cleanup:
|
||||
MemCache.__cleanup(self.expire)
|
||||
|
@ -359,4 +354,4 @@ class MemCache(Cache):
|
|||
|
||||
def flush(self, bank, key=None):
|
||||
self.storage.pop((bank, key), None)
|
||||
super(MemCache, self).flush(bank, key)
|
||||
super().flush(bank, key)
|
||||
|
|
20
salt/cache/consul.py
vendored
20
salt/cache/consul.py
vendored
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Minion data cache plugin for Consul key/value data store.
|
||||
|
||||
|
@ -46,7 +45,6 @@ value to ``consul``:
|
|||
.. _`python-consul documentation`: https://python-consul.readthedocs.io/en/latest/#consul
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -106,13 +104,13 @@ def store(bank, key, data):
|
|||
"""
|
||||
Store a key value.
|
||||
"""
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
c_key = "{}/{}".format(bank, key)
|
||||
try:
|
||||
c_data = __context__["serial"].dumps(data)
|
||||
api.kv.put(c_key, c_data)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
"There was an error writing the key, {0}: {1}".format(c_key, exc)
|
||||
"There was an error writing the key, {}: {}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
|
@ -120,7 +118,7 @@ def fetch(bank, key):
|
|||
"""
|
||||
Fetch a key value.
|
||||
"""
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
c_key = "{}/{}".format(bank, key)
|
||||
try:
|
||||
_, value = api.kv.get(c_key)
|
||||
if value is None:
|
||||
|
@ -128,7 +126,7 @@ def fetch(bank, key):
|
|||
return __context__["serial"].loads(value["Value"])
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
"There was an error reading the key, {0}: {1}".format(c_key, exc)
|
||||
"There was an error reading the key, {}: {}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
|
@ -139,12 +137,12 @@ def flush(bank, key=None):
|
|||
if key is None:
|
||||
c_key = bank
|
||||
else:
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
c_key = "{}/{}".format(bank, key)
|
||||
try:
|
||||
return api.kv.delete(c_key, recurse=key is None)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
"There was an error removing the key, {0}: {1}".format(c_key, exc)
|
||||
"There was an error removing the key, {}: {}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
|
@ -156,7 +154,7 @@ def list_(bank):
|
|||
_, keys = api.kv.get(bank + "/", keys=True, separator="/")
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error getting the key "{0}": {1}'.format(bank, exc)
|
||||
'There was an error getting the key "{}": {}'.format(bank, exc)
|
||||
)
|
||||
if keys is None:
|
||||
keys = []
|
||||
|
@ -178,10 +176,10 @@ def contains(bank, key):
|
|||
return True # any key could be a branch and a leaf at the same time in Consul
|
||||
else:
|
||||
try:
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
c_key = "{}/{}".format(bank, key)
|
||||
_, value = api.kv.get(c_key)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
"There was an error getting the key, {0}: {1}".format(c_key, exc)
|
||||
"There was an error getting the key, {}: {}".format(c_key, exc)
|
||||
)
|
||||
return value is not None
|
||||
|
|
1
salt/cache/redis_cache.py
vendored
1
salt/cache/redis_cache.py
vendored
|
@ -141,7 +141,6 @@ import logging
|
|||
from salt.exceptions import SaltCacheError
|
||||
|
||||
# Import salt
|
||||
from salt.ext.six.moves import range
|
||||
|
||||
try:
|
||||
import redis
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The management of salt command line utilities are stored in here
|
||||
"""
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import salt.cli.caller
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import salt.defaults.exitcodes # pylint: disable=W0611
|
||||
import salt.utils.parsers
|
||||
import salt.utils.profile
|
||||
from salt.exceptions import SaltClientError
|
||||
from salt.ext import six
|
||||
from salt.utils.verify import check_user, verify_log
|
||||
|
||||
|
||||
|
@ -55,4 +51,4 @@ class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
|||
)
|
||||
|
||||
except SaltClientError as exc:
|
||||
raise SystemExit(six.text_type(exc))
|
||||
raise SystemExit(str(exc))
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt.cli.spm
|
||||
~~~~~~~~~~~~~
|
||||
|
@ -8,10 +7,7 @@
|
|||
.. versionadded:: 2015.8.0
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
import salt.spm
|
||||
import salt.utils.parsers as parsers
|
||||
from salt.utils.verify import verify_env, verify_log
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
import salt.client.ssh
|
||||
|
|
|
@ -30,7 +30,7 @@ EX_SCP_NOT_FOUND = 14
|
|||
EX_CANTCREAT = 73
|
||||
|
||||
|
||||
class OptionsContainer(object):
|
||||
class OptionsContainer:
|
||||
"""
|
||||
An empty class for holding instance attribute values.
|
||||
"""
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Create ssh executor system
|
||||
"""
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
@ -13,8 +10,6 @@ import tempfile
|
|||
from contextlib import closing
|
||||
|
||||
import salt.client.ssh
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh.shell
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
|
@ -28,9 +23,6 @@ import salt.utils.thin
|
|||
import salt.utils.url
|
||||
import salt.utils.verify
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -41,7 +33,7 @@ class SSHState(salt.state.State):
|
|||
|
||||
def __init__(self, opts, pillar=None, wrapper=None):
|
||||
self.wrapper = wrapper
|
||||
super(SSHState, self).__init__(opts, pillar)
|
||||
super().__init__(opts, pillar)
|
||||
|
||||
def load_modules(self, data=None, proxy=None):
|
||||
"""
|
||||
|
@ -163,7 +155,7 @@ def salt_refs(data, ret=None):
|
|||
proto = "salt://"
|
||||
if ret is None:
|
||||
ret = []
|
||||
if isinstance(data, six.string_types):
|
||||
if isinstance(data, str):
|
||||
if data.startswith(proto) and data not in ret:
|
||||
ret.append(data)
|
||||
if isinstance(data, list):
|
||||
|
@ -211,7 +203,7 @@ def prep_trans_tar(
|
|||
cachedir = os.path.join("salt-ssh", id_).rstrip(os.sep)
|
||||
except AttributeError:
|
||||
# Minion ID should always be a str, but don't let an int break this
|
||||
cachedir = os.path.join("salt-ssh", six.text_type(id_)).rstrip(os.sep)
|
||||
cachedir = os.path.join("salt-ssh", str(id_)).rstrip(os.sep)
|
||||
|
||||
for saltenv in file_refs:
|
||||
# Location where files in this saltenv will be cached
|
||||
|
@ -226,7 +218,7 @@ def prep_trans_tar(
|
|||
cache_dest = os.path.join(cache_dest_root, short)
|
||||
try:
|
||||
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
except OSError:
|
||||
path = ""
|
||||
if path:
|
||||
tgt = os.path.join(env_root, short)
|
||||
|
@ -237,7 +229,7 @@ def prep_trans_tar(
|
|||
continue
|
||||
try:
|
||||
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
except OSError:
|
||||
files = ""
|
||||
if files:
|
||||
for filename in files:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The ssh client wrapper system contains the routines that are used to alter
|
||||
how executions are run in the salt-ssh system, this allows for state routines
|
||||
|
@ -6,23 +5,16 @@ to be easily rewritten to execute in a way that makes them do the same tasks
|
|||
as ZeroMQ salt, but via ssh.
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
|
||||
import salt.client.ssh
|
||||
|
||||
# Import salt libs
|
||||
import salt.loader
|
||||
import salt.utils.data
|
||||
import salt.utils.json
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
class FunctionWrapper(object):
|
||||
class FunctionWrapper:
|
||||
"""
|
||||
Create an object that acts like the salt function dict and makes function
|
||||
calls remotely via the SSH shell system
|
||||
|
@ -41,7 +33,7 @@ class FunctionWrapper(object):
|
|||
minion_opts=None,
|
||||
**kwargs
|
||||
):
|
||||
super(FunctionWrapper, self).__init__()
|
||||
super().__init__()
|
||||
self.cmd_prefix = cmd_prefix
|
||||
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
||||
self.opts = opts
|
||||
|
@ -95,7 +87,7 @@ class FunctionWrapper(object):
|
|||
# We're in an inner FunctionWrapper as created by the code block
|
||||
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
||||
# then evaluate as normal
|
||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
||||
cmd = "{}.{}".format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
return self.wfuncs[cmd]
|
||||
|
@ -111,10 +103,10 @@ class FunctionWrapper(object):
|
|||
argv.extend([salt.utils.json.dumps(arg) for arg in args])
|
||||
argv.extend(
|
||||
[
|
||||
"{0}={1}".format(
|
||||
"{}={}".format(
|
||||
salt.utils.stringutils.to_str(key), salt.utils.json.dumps(val)
|
||||
)
|
||||
for key, val in six.iteritems(kwargs)
|
||||
for key, val in kwargs.items()
|
||||
]
|
||||
)
|
||||
single = salt.client.ssh.Single(
|
||||
|
@ -159,14 +151,14 @@ class FunctionWrapper(object):
|
|||
# containing only 'cmd' module calls, in that case. We don't
|
||||
# support assigning directly to prefixes in this way
|
||||
raise KeyError(
|
||||
"Cannot assign to module key {0} in the " "FunctionWrapper".format(cmd)
|
||||
"Cannot assign to module key {} in the " "FunctionWrapper".format(cmd)
|
||||
)
|
||||
|
||||
if self.cmd_prefix:
|
||||
# We're in an inner FunctionWrapper as created by the first code
|
||||
# block in __getitem__. Reconstruct the original cmd in the form
|
||||
# 'cmd.run' and then evaluate as normal
|
||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
||||
cmd = "{}.{}".format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
self.wfuncs[cmd] = value
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Wrapper function for mine operations for salt-ssh
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
|
@ -9,13 +8,10 @@ salt-ssh calls and return the data from them.
|
|||
|
||||
No access control is needed because calls cannot originate from the minions.
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh
|
||||
import salt.runner
|
||||
import salt.utils.args
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Wrap the saltcheck module to copy files to ssh minion before running tests
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
@ -14,8 +11,6 @@ from contextlib import closing
|
|||
|
||||
import salt.utils.files
|
||||
import salt.utils.json
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.url
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -33,9 +28,9 @@ def update_master_cache(states, saltenv="base"):
|
|||
# Setup for copying states to gendir
|
||||
gendir = tempfile.mkdtemp()
|
||||
trans_tar = salt.utils.files.mkstemp()
|
||||
if "cp.fileclient_{0}".format(id(__opts__)) not in __context__:
|
||||
if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
|
||||
__context__[
|
||||
"cp.fileclient_{0}".format(id(__opts__))
|
||||
"cp.fileclient_{}".format(id(__opts__))
|
||||
] = salt.fileclient.get_file_client(__opts__)
|
||||
|
||||
# generate cp.list_states output and save to gendir
|
||||
|
@ -64,7 +59,7 @@ def update_master_cache(states, saltenv="base"):
|
|||
log.debug("copying %s to %s", state_name, gendir)
|
||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||
# Duplicate cp.get_dir to gendir
|
||||
copy_result = __context__["cp.fileclient_{0}".format(id(__opts__))].get_dir(
|
||||
copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
|
||||
qualified_name, gendir, saltenv
|
||||
)
|
||||
if copy_result:
|
||||
|
@ -82,7 +77,7 @@ def update_master_cache(states, saltenv="base"):
|
|||
else:
|
||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||
copy_result = __context__[
|
||||
"cp.fileclient_{0}".format(id(__opts__))
|
||||
"cp.fileclient_{}".format(id(__opts__))
|
||||
].get_dir(qualified_name, gendir, saltenv)
|
||||
if copy_result:
|
||||
copy_result = [
|
||||
|
@ -117,7 +112,7 @@ def update_master_cache(states, saltenv="base"):
|
|||
# Clean up local tar
|
||||
try:
|
||||
os.remove(trans_tar)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
tar_path = os.path.join(thin_dir, os.path.basename(trans_tar))
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Primary interfaces for the salt-cloud system
|
||||
"""
|
||||
|
@ -11,14 +10,11 @@ Primary interfaces for the salt-cloud system
|
|||
# The cli, master and cloud configs will merge for opts
|
||||
# the VM data will be in opts['profiles']
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import salt libs
|
||||
import salt.cloud
|
||||
import salt.config
|
||||
import salt.defaults.exitcodes
|
||||
|
@ -28,10 +24,6 @@ import salt.utils.cloud
|
|||
import salt.utils.parsers
|
||||
import salt.utils.user
|
||||
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import input
|
||||
from salt.utils.verify import check_user, verify_env, verify_log, verify_log_files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -72,7 +64,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if logfile is not None:
|
||||
# Logfile is not using Syslog, verify
|
||||
verify_log_files([logfile], salt_master_user)
|
||||
except (IOError, OSError) as err:
|
||||
except OSError as err:
|
||||
log.error("Error while verifying the environment: %s", err)
|
||||
sys.exit(err.errno)
|
||||
|
||||
|
@ -185,7 +177,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if map_file is not None:
|
||||
if names != ():
|
||||
msg = (
|
||||
"Supplying a mapfile, '{0}', in addition to instance names {1} "
|
||||
"Supplying a mapfile, '{}', in addition to instance names {} "
|
||||
"with the '--destroy' or '-d' function is not supported. "
|
||||
"Please choose to delete either the entire map file or individual "
|
||||
"instances.".format(map_file, names)
|
||||
|
@ -205,12 +197,12 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
|
||||
msg = "The following virtual machines are set to be destroyed:\n"
|
||||
names = set()
|
||||
for alias, drivers in six.iteritems(matching):
|
||||
msg += " {0}:\n".format(alias)
|
||||
for driver, vms in six.iteritems(drivers):
|
||||
msg += " {0}:\n".format(driver)
|
||||
for alias, drivers in matching.items():
|
||||
msg += " {}:\n".format(alias)
|
||||
for driver, vms in drivers.items():
|
||||
msg += " {}:\n".format(driver)
|
||||
for name in vms:
|
||||
msg += " {0}\n".format(name)
|
||||
msg += " {}\n".format(name)
|
||||
names.add(name)
|
||||
# pylint: disable=broad-except
|
||||
try:
|
||||
|
@ -238,7 +230,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
machines = []
|
||||
msg = (
|
||||
"The following virtual machines are set to be actioned with "
|
||||
'"{0}":\n'.format(self.options.action)
|
||||
'"{}":\n'.format(self.options.action)
|
||||
)
|
||||
for name in names:
|
||||
if "=" in name:
|
||||
|
@ -246,7 +238,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
key, value = name.split("=", 1)
|
||||
kwargs[key] = value
|
||||
else:
|
||||
msg += " {0}\n".format(name)
|
||||
msg += " {}\n".format(name)
|
||||
machines.append(name)
|
||||
names = machines
|
||||
|
||||
|
@ -272,7 +264,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
self.error(
|
||||
"Any arguments passed to --function need to be passed "
|
||||
"as kwargs. Ex: image=ami-54cf5c3d. Remaining "
|
||||
"arguments: {0}".format(args)
|
||||
"arguments: {}".format(args)
|
||||
)
|
||||
# pylint: disable=broad-except
|
||||
try:
|
||||
|
@ -295,7 +287,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
|
||||
elif self.options.set_password:
|
||||
username = self.credential_username
|
||||
provider_name = "salt.cloud.provider.{0}".format(self.credential_provider)
|
||||
provider_name = "salt.cloud.provider.{}".format(self.credential_provider)
|
||||
# TODO: check if provider is configured
|
||||
# set the password
|
||||
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
||||
|
@ -315,8 +307,8 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if "errors" in dmap:
|
||||
# display profile errors
|
||||
msg += "Found the following errors:\n"
|
||||
for profile_name, error in six.iteritems(dmap["errors"]):
|
||||
msg += " {0}: {1}\n".format(profile_name, error)
|
||||
for profile_name, error in dmap["errors"].items():
|
||||
msg += " {}: {}\n".format(profile_name, error)
|
||||
sys.stderr.write(msg)
|
||||
sys.stderr.flush()
|
||||
|
||||
|
@ -324,19 +316,19 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if "existing" in dmap:
|
||||
msg += "The following virtual machines already exist:\n"
|
||||
for name in dmap["existing"]:
|
||||
msg += " {0}\n".format(name)
|
||||
msg += " {}\n".format(name)
|
||||
|
||||
if dmap["create"]:
|
||||
msg += "The following virtual machines are set to be " "created:\n"
|
||||
for name in dmap["create"]:
|
||||
msg += " {0}\n".format(name)
|
||||
msg += " {}\n".format(name)
|
||||
|
||||
if "destroy" in dmap:
|
||||
msg += (
|
||||
"The following virtual machines are set to be " "destroyed:\n"
|
||||
)
|
||||
for name in dmap["destroy"]:
|
||||
msg += " {0}\n".format(name)
|
||||
msg += " {}\n".format(name)
|
||||
|
||||
if not dmap["create"] and not dmap.get("destroy", None):
|
||||
if not dmap.get("existing", None):
|
||||
|
@ -389,7 +381,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if args:
|
||||
self.error(
|
||||
"Any arguments passed to --bootstrap need to be passed as "
|
||||
"kwargs. Ex: ssh_username=larry. Remaining arguments: {0}".format(
|
||||
"kwargs. Ex: ssh_username=larry. Remaining arguments: {}".format(
|
||||
args
|
||||
)
|
||||
)
|
||||
|
@ -425,7 +417,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
# This is a salt cloud system exit
|
||||
if exc.exit_code > 0:
|
||||
# the exit code is bigger than 0, it's an error
|
||||
msg = "Error: {0}".format(msg)
|
||||
msg = "Error: {}".format(msg)
|
||||
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
||||
# It's not a system exit but it's an error we can
|
||||
# handle
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt.cloud.exceptions
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
@ -7,9 +6,7 @@
|
|||
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import salt libs
|
||||
import salt.defaults.exitcodes
|
||||
from salt.exceptions import SaltException
|
||||
|
||||
|
@ -27,7 +24,7 @@ class SaltCloudSystemExit(SaltCloudException):
|
|||
|
||||
def __init__(self, message, exit_code=salt.defaults.exitcodes.EX_GENERIC):
|
||||
SaltCloudException.__init__(self, message)
|
||||
self.message = "{0} [WARNING: salt.cloud.exceptions is deprecated. Please migrate to salt.exceptions!]".format(
|
||||
self.message = "{} [WARNING: salt.cloud.exceptions is deprecated. Please migrate to salt.exceptions!]".format(
|
||||
message
|
||||
)
|
||||
self.exit_code = exit_code
|
||||
|
|
|
@ -9,6 +9,7 @@ import re
|
|||
import sys
|
||||
import time
|
||||
import types
|
||||
import urllib.parse
|
||||
from copy import deepcopy
|
||||
|
||||
import salt.defaults.exitcodes
|
||||
|
@ -28,12 +29,6 @@ import salt.utils.xdg
|
|||
import salt.utils.yaml
|
||||
import salt.utils.zeromq
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module
|
||||
from salt.ext.six.moves.urllib.parse import urlparse
|
||||
|
||||
# pylint: enable=import-error,no-name-in-module
|
||||
|
||||
|
||||
try:
|
||||
import psutil
|
||||
|
||||
|
@ -2387,7 +2382,7 @@ def syndic_config(
|
|||
]
|
||||
for config_key in ("log_file", "key_logfile", "syndic_log_file"):
|
||||
# If this is not a URI and instead a local path
|
||||
if urlparse(opts.get(config_key, "")).scheme == "":
|
||||
if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "":
|
||||
prepend_root_dirs.append(config_key)
|
||||
prepend_root_dir(opts, prepend_root_dirs)
|
||||
return opts
|
||||
|
@ -2638,7 +2633,7 @@ def cloud_config(
|
|||
|
||||
# prepend root_dir
|
||||
prepend_root_dirs = ["cachedir"]
|
||||
if "log_file" in opts and urlparse(opts["log_file"]).scheme == "":
|
||||
if "log_file" in opts and urllib.parse.urlparse(opts["log_file"]).scheme == "":
|
||||
prepend_root_dirs.append(opts["log_file"])
|
||||
prepend_root_dir(opts, prepend_root_dirs)
|
||||
|
||||
|
@ -3707,7 +3702,7 @@ def apply_minion_config(
|
|||
|
||||
# These can be set to syslog, so, not actual paths on the system
|
||||
for config_key in ("log_file", "key_logfile"):
|
||||
if urlparse(opts.get(config_key, "")).scheme == "":
|
||||
if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "":
|
||||
prepend_root_dirs.append(config_key)
|
||||
|
||||
prepend_root_dir(opts, prepend_root_dirs)
|
||||
|
@ -3915,7 +3910,7 @@ def apply_master_config(overrides=None, defaults=None):
|
|||
if log_setting is None:
|
||||
continue
|
||||
|
||||
if urlparse(log_setting).scheme == "":
|
||||
if urllib.parse.urlparse(log_setting).scheme == "":
|
||||
prepend_root_dirs.append(config_key)
|
||||
|
||||
prepend_root_dir(opts, prepend_root_dirs)
|
||||
|
@ -4116,7 +4111,7 @@ def apply_spm_config(overrides, defaults):
|
|||
if log_setting is None:
|
||||
continue
|
||||
|
||||
if urlparse(log_setting).scheme == "":
|
||||
if urllib.parse.urlparse(log_setting).scheme == "":
|
||||
prepend_root_dirs.append(config_key)
|
||||
|
||||
prepend_root_dir(opts, prepend_root_dirs)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
@ -9,10 +8,7 @@
|
|||
Common salt configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils.schema import ArrayItem, OneOfItem, Schema, StringItem
|
||||
|
||||
|
||||
|
@ -30,17 +26,15 @@ class DefaultIncludeConfig(StringItem):
|
|||
description = __doc__
|
||||
|
||||
def __init__(self, default=None, pattern=None, **kwargs):
|
||||
default = "{0}/*.conf".format(self.__confd_directory__)
|
||||
default = "{}/*.conf".format(self.__confd_directory__)
|
||||
pattern = r"(?:.*)/\*\.conf"
|
||||
super(DefaultIncludeConfig, self).__init__(
|
||||
default=default, pattern=pattern, **kwargs
|
||||
)
|
||||
super().__init__(default=default, pattern=pattern, **kwargs)
|
||||
|
||||
def __validate_attributes__(self):
|
||||
self.__doc__ = DefaultIncludeConfig.__doc__.format(
|
||||
self.__target__, self.__confd_directory__
|
||||
)
|
||||
super(DefaultIncludeConfig, self).__validate_attributes__()
|
||||
super().__validate_attributes__()
|
||||
|
||||
def __get_description__(self):
|
||||
return self.__doc__.format(self.__target__, self.__confd_directory__)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||
|
||||
|
@ -9,10 +8,7 @@
|
|||
ESX Cluster configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.schema import (
|
||||
AnyOfItem,
|
||||
ArrayItem,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||
|
||||
|
@ -9,10 +8,7 @@
|
|||
ESX Datacenter configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.schema import ArrayItem, IntegerItem, Schema, StringItem
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||
|
||||
|
@ -9,10 +8,7 @@
|
|||
ESXi host configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.schema import (
|
||||
ArrayItem,
|
||||
BooleanItem,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: :email:`Agnes Tevesz (agnes.tevesz@morganstanley.com)`
|
||||
|
||||
|
@ -8,8 +7,6 @@
|
|||
ESX Virtual Machine configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from salt.utils.schema import (
|
||||
AnyOfItem,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
@ -8,12 +7,8 @@
|
|||
Minion configuration schema
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from salt.config.schemas.common import IncludeConfig, MinionDefaultInclude
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils.schema import IPv4Item, Schema
|
||||
|
||||
# XXX: THIS IS WAY TOO MINIMAL, BUT EXISTS TO IMPLEMENT salt-ssh
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
@ -9,12 +8,8 @@
|
|||
Salt SSH related configuration schemas
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from salt.config.schemas.minion import MinionConfiguration
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.schema import (
|
||||
AnyOfItem,
|
||||
BooleanItem,
|
||||
|
|
|
@ -7,8 +7,6 @@ import logging
|
|||
import sys
|
||||
from collections.abc import Iterable, Mapping, Sequence
|
||||
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Default values, to be imported elsewhere in Salt code
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Classification of Salt exit codes. These are intended to augment
|
||||
universal exit codes (found in Python's `os` module with the `EX_`
|
||||
|
|
|
@ -7,7 +7,6 @@ import logging
|
|||
import time
|
||||
|
||||
import salt.defaults.exitcodes
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -42,12 +41,9 @@ class SaltException(Exception):
|
|||
|
||||
if not isinstance(message, str):
|
||||
message = str(message)
|
||||
# pylint: disable=incompatible-py3-code,undefined-variable
|
||||
if six.PY3 or isinstance(message, unicode):
|
||||
super().__init__(salt.utils.stringutils.to_str(message))
|
||||
self.message = self.strerror = message
|
||||
# pylint: enable=incompatible-py3-code,undefined-variable
|
||||
elif isinstance(message, str):
|
||||
super().__init__(salt.utils.stringutils.to_str(message))
|
||||
self.message = self.strerror = message
|
||||
if isinstance(message, str):
|
||||
super().__init__(message)
|
||||
self.message = self.strerror = salt.utils.stringutils.to_unicode(message)
|
||||
else:
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Executors Directory
|
||||
"""
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Direct call executor module
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
|
||||
def execute(opts, data, func, args, kwargs):
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Splay function calls across targeted minions
|
||||
"""
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Sudo executor module
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import shlex
|
||||
|
||||
import salt.syspaths
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.json
|
||||
import salt.utils.path
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||
|
||||
__virtualname__ = "sudo"
|
||||
|
||||
|
@ -67,9 +62,9 @@ def execute(opts, data, func, args, kwargs):
|
|||
if data["fun"] in ("state.sls", "state.highstate", "state.apply"):
|
||||
kwargs["concurrent"] = True
|
||||
for arg in args:
|
||||
cmd.append(_cmd_quote(six.text_type(arg)))
|
||||
cmd.append(shlex.quote(str(arg)))
|
||||
for key in kwargs:
|
||||
cmd.append(_cmd_quote("{0}={1}".format(key, kwargs[key])))
|
||||
cmd.append(shlex.quote("{}={}".format(key, kwargs[key])))
|
||||
|
||||
cmd_ret = __salt__["cmd.run_all"](cmd, use_vt=True, python_shell=False)
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ import salt.utils.files
|
|||
import salt.utils.path
|
||||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
from salt.ext import six
|
||||
from salt.utils.args import get_function_argspec as _argspec
|
||||
from salt.utils.decorators import ensure_unicode_args
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The backend for serving files from the Azure blob storage service.
|
||||
|
||||
|
@ -46,15 +45,12 @@ permissions.
|
|||
Do not include the leading ? for sas_token if generated from the web
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Import salt libs
|
||||
import salt.fileserver
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
|
@ -62,9 +58,6 @@ import salt.utils.hashutils
|
|||
import salt.utils.json
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import third party libs
|
||||
from salt.ext import six
|
||||
from salt.utils.versions import LooseVersion
|
||||
|
||||
try:
|
||||
|
@ -165,7 +158,7 @@ def serve_file(load, fnd):
|
|||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||
fp_.seek(load["loc"])
|
||||
data = fp_.read(__opts__["file_buffer_size"])
|
||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
||||
if data and not salt.utils.files.is_binary(fpath):
|
||||
data = data.decode(__salt_system_encoding__)
|
||||
if gzip and data:
|
||||
data = salt.utils.gzip_util.compress(data, gzip)
|
||||
|
@ -286,7 +279,7 @@ def file_hash(load, fnd):
|
|||
hashdest = salt.utils.path.join(
|
||||
hash_cachedir,
|
||||
load["saltenv"],
|
||||
"{0}.hash.{1}".format(relpath, __opts__["hash_type"]),
|
||||
"{}.hash.{}".format(relpath, __opts__["hash_type"]),
|
||||
)
|
||||
if not os.path.isfile(hashdest):
|
||||
if not os.path.exists(os.path.dirname(hashdest)):
|
||||
|
@ -350,7 +343,7 @@ def _get_container_path(container):
|
|||
and saltenv, separated by underscores
|
||||
"""
|
||||
root = os.path.join(__opts__["cachedir"], "azurefs")
|
||||
container_dir = "{0}_{1}_{2}".format(
|
||||
container_dir = "{}_{}_{}".format(
|
||||
container.get("account_name", ""),
|
||||
container.get("container_name", ""),
|
||||
container.get("saltenv", "base"),
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Git Fileserver Backend
|
||||
|
||||
|
@ -48,12 +47,9 @@ Walkthrough <tutorial-gitfs>`.
|
|||
.. _GitPython: https://github.com/gitpython-developers/GitPython
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.gitfs
|
||||
from salt.exceptions import FileserverConfigError
|
||||
|
||||
|
|
|
@ -55,7 +55,6 @@ import salt.utils.stringutils
|
|||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
from salt.exceptions import FileserverConfigError
|
||||
from salt.ext import six
|
||||
from salt.utils.event import tagify
|
||||
|
||||
VALID_BRANCH_METHODS = ("branches", "bookmarks", "mixed")
|
||||
|
@ -748,7 +747,7 @@ def serve_file(load, fnd):
|
|||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||
fp_.seek(load["loc"])
|
||||
data = fp_.read(__opts__["file_buffer_size"])
|
||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
||||
if data and not salt.utils.files.is_binary(fpath):
|
||||
data = data.decode(__salt_system_encoding__)
|
||||
if gzip and data:
|
||||
data = salt.utils.gzip_util.compress(data, gzip)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Fileserver backend which serves files pushed to the Master
|
||||
|
||||
|
@ -27,14 +26,10 @@ Other minionfs settings include: :conf_master:`minionfs_whitelist`,
|
|||
.. seealso:: :ref:`tutorial-minionfs`
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.fileserver
|
||||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
|
@ -44,9 +39,6 @@ import salt.utils.stringutils
|
|||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
|
||||
# Import third party libs
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -142,7 +134,7 @@ def serve_file(load, fnd):
|
|||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||
fp_.seek(load["loc"])
|
||||
data = fp_.read(__opts__["file_buffer_size"])
|
||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
||||
if data and not salt.utils.files.is_binary(fpath):
|
||||
data = data.decode(__salt_system_encoding__)
|
||||
if gzip and data:
|
||||
data = salt.utils.gzip_util.compress(data, gzip)
|
||||
|
@ -192,7 +184,7 @@ def file_hash(load, fnd):
|
|||
"minionfs",
|
||||
"hash",
|
||||
load["saltenv"],
|
||||
"{0}.hash.{1}".format(fnd["rel"], __opts__["hash_type"]),
|
||||
"{}.hash.{}".format(fnd["rel"], __opts__["hash_type"]),
|
||||
)
|
||||
# if we have a cache, serve that if the mtime hasn't changed
|
||||
if os.path.exists(cache_path):
|
||||
|
@ -228,7 +220,7 @@ def file_hash(load, fnd):
|
|||
if not os.path.exists(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
# save the cache object "hash:mtime"
|
||||
cache_object = "{0}:{1}".format(ret["hsum"], os.path.getmtime(path))
|
||||
cache_object = "{}:{}".format(ret["hsum"], os.path.getmtime(path))
|
||||
with salt.utils.files.flopen(cache_path, "w") as fp_:
|
||||
fp_.write(cache_object)
|
||||
return ret
|
||||
|
|
|
@ -83,6 +83,7 @@ import logging
|
|||
import os
|
||||
import pickle
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
import salt.fileserver as fs
|
||||
import salt.modules
|
||||
|
@ -91,13 +92,6 @@ import salt.utils.gzip_util
|
|||
import salt.utils.hashutils
|
||||
import salt.utils.versions
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import filter
|
||||
from salt.ext.six.moves.urllib.parse import quote as _quote
|
||||
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
S3_CACHE_EXPIRE = 30 # cache for 30 seconds
|
||||
|
@ -240,7 +234,7 @@ def serve_file(load, fnd):
|
|||
with salt.utils.files.fopen(cached_file_path, "rb") as fp_:
|
||||
fp_.seek(load["loc"])
|
||||
data = fp_.read(__opts__["file_buffer_size"])
|
||||
if data and six.PY3 and not salt.utils.files.is_binary(cached_file_path):
|
||||
if data and not salt.utils.files.is_binary(cached_file_path):
|
||||
data = data.decode(__salt_system_encoding__)
|
||||
if gzip and data:
|
||||
data = salt.utils.gzip_util.compress(data, gzip)
|
||||
|
@ -747,7 +741,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||
service_url=service_url,
|
||||
verify_ssl=verify_ssl,
|
||||
location=location,
|
||||
path=_quote(path),
|
||||
path=urllib.parse.quote(path),
|
||||
local_file=cached_file_path,
|
||||
full_headers=True,
|
||||
path_style=path_style,
|
||||
|
@ -785,7 +779,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||
service_url=service_url,
|
||||
verify_ssl=verify_ssl,
|
||||
location=location,
|
||||
path=_quote(path),
|
||||
path=urllib.parse.quote(path),
|
||||
local_file=cached_file_path,
|
||||
path_style=path_style,
|
||||
https_enable=https_enable,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Grains plugin directory
|
||||
"""
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate chronos proxy minion grains.
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.http
|
||||
import salt.utils.platform
|
||||
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains for cimc hosts.
|
||||
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import salt.proxy.cimc
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.platform
|
||||
|
||||
__proxyenabled__ = ["cimc"]
|
||||
|
|
|
@ -40,7 +40,6 @@ import salt.utils.path
|
|||
import salt.utils.pkg.rpm
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
from salt.ext.six.moves import range
|
||||
from salt.utils.network import _get_interfaces
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Detect disks
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import glob
|
||||
import logging
|
||||
import re
|
||||
|
@ -12,8 +9,6 @@ import re
|
|||
# Solve the Chicken and egg problem where grains need to run before any
|
||||
# of the modules are loaded and are generally available for any usage.
|
||||
import salt.modules.cmdmod
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
@ -40,7 +35,7 @@ def disks():
|
|||
log.trace("Disk grain does not support OS")
|
||||
|
||||
|
||||
class _geomconsts(object):
|
||||
class _geomconsts:
|
||||
GEOMNAME = "Geom name"
|
||||
MEDIASIZE = "Mediasize"
|
||||
SECTORSIZE = "Sectorsize"
|
||||
|
@ -96,14 +91,14 @@ def _freebsd_geom():
|
|||
geom = salt.utils.path.which("geom")
|
||||
ret = {"disks": {}, "ssds": []}
|
||||
|
||||
devices = __salt__["cmd.run"]("{0} disk list".format(geom))
|
||||
devices = __salt__["cmd.run"]("{} disk list".format(geom))
|
||||
devices = devices.split("\n\n")
|
||||
|
||||
def parse_geom_attribs(device):
|
||||
tmp = {}
|
||||
for line in device.split("\n"):
|
||||
for attrib in _geom_attribs:
|
||||
search = re.search(r"{0}:\s(.*)".format(attrib), line)
|
||||
search = re.search(r"{}:\s(.*)".format(attrib), line)
|
||||
if search:
|
||||
value = _datavalue(
|
||||
_geomconsts._datatypes.get(attrib), search.group(1)
|
||||
|
@ -152,7 +147,7 @@ def _linux_disks():
|
|||
"not report 0 or 1",
|
||||
device,
|
||||
)
|
||||
except IOError:
|
||||
except OSError:
|
||||
pass
|
||||
return ret
|
||||
|
||||
|
@ -167,7 +162,7 @@ def _windows_disks():
|
|||
ret = {"disks": [], "ssds": []}
|
||||
|
||||
cmdret = __salt__["cmd.run_all"](
|
||||
"{0} /namespace:{1} path {2} get {3} /format:table".format(
|
||||
"{} /namespace:{} path {} get {} /format:table".format(
|
||||
wmic, namespace, path, get
|
||||
)
|
||||
)
|
||||
|
@ -179,7 +174,7 @@ def _windows_disks():
|
|||
info = line.split()
|
||||
if len(info) != 2 or not info[0].isdigit() or not info[1].isdigit():
|
||||
continue
|
||||
device = r"\\.\PhysicalDrive{0}".format(info[0])
|
||||
device = r"\\.\PhysicalDrive{}".format(info[0])
|
||||
mediatype = info[1]
|
||||
if mediatype == "3":
|
||||
log.trace("Device %s reports itself as an HDD", device)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains for ESXi hosts.
|
||||
|
||||
|
@ -6,8 +5,6 @@ Generate baseline proxy minion grains for ESXi hosts.
|
|||
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
|
|
@ -1,14 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import third party libs
|
||||
import logging
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
|
|
|
@ -10,12 +10,10 @@ To enable these grains set ``fibre_channel_grains: True`` in the minion config.
|
|||
|
||||
fibre_channel_grains: True
|
||||
"""
|
||||
# Import Python libs
|
||||
|
||||
import glob
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains for Dell FX2 chassis.
|
||||
The challenge is that most of Salt isn't bootstrapped yet,
|
||||
|
@ -6,7 +5,6 @@ so we need to repeat a bunch of things that would normally happen
|
|||
in proxy/fx2.py--just enough to get data from the chassis to include
|
||||
in grains.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
|
|
|
@ -9,12 +9,10 @@ To enable these grains set `iscsi_grains: True` in the minion config.
|
|||
|
||||
iscsi_grains: True
|
||||
"""
|
||||
# Import Python libs
|
||||
|
||||
import errno
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
Detect LVM Volumes
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate marathon proxy minion grains.
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import salt.utils.http
|
||||
import salt.utils.platform
|
||||
|
@ -42,9 +40,7 @@ def os_data():
|
|||
|
||||
def marathon():
|
||||
response = salt.utils.http.query(
|
||||
"{0}/v2/info".format(
|
||||
__opts__["proxy"].get("base_url", "http://locahost:8080",)
|
||||
),
|
||||
"{}/v2/info".format(__opts__["proxy"].get("base_url", "http://locahost:8080",)),
|
||||
decode_type="json",
|
||||
decode=True,
|
||||
)
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Detect MDADM RAIDs
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -28,7 +24,7 @@ def mdadm():
|
|||
continue
|
||||
if " : " in line:
|
||||
devices.add(line.split(" : ")[0])
|
||||
except IOError:
|
||||
except OSError:
|
||||
return {}
|
||||
|
||||
devices = sorted(devices)
|
||||
|
|
|
@ -14,11 +14,9 @@ metadata server set `metadata_server_grains: True` in the minion config.
|
|||
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import socket
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.http as http
|
||||
import salt.utils.json
|
||||
|
|
|
@ -1,16 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Set grains describing the minion process.
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import salt.utils.platform
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.user
|
||||
|
||||
|
||||
|
|
|
@ -9,12 +9,10 @@ To enable these grains set `nvme_grains: True` in the minion config.
|
|||
|
||||
nvme_grains: True
|
||||
"""
|
||||
# Import Python libs
|
||||
|
||||
import errno
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
|
|
@ -1,17 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains for panos hosts.
|
||||
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import salt.proxy.panos
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.platform
|
||||
|
||||
__proxyenabled__ = ["panos"]
|
||||
|
|
|
@ -3,10 +3,8 @@ Grain that indicates the system is pending a reboot
|
|||
See functions in salt.utils.win_system to see what conditions would indicate
|
||||
a reboot is pending
|
||||
"""
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_system
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2015 SUSE LLC
|
||||
#
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import salt.utils.platform
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Generate baseline proxy minion grains
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import salt.utils.platform
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
ZFS grain provider
|
||||
|
||||
|
@ -10,16 +9,12 @@ ZFS grain provider
|
|||
.. versionadded:: 2018.3.0
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Solve the Chicken and egg problem where grains need to run before any
|
||||
# of the modules are loaded and are generally available for any usage.
|
||||
import salt.modules.cmdmod
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.dictupdate
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
@ -9,7 +8,6 @@
|
|||
This is where Salt's logging gets set up. Currently, the required imports
|
||||
are made to assure backwards compatibility.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import severals classes/functions from salt.log.setup for backwards
|
||||
# compatibility
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
salt.log.handlers
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
@ -8,12 +7,9 @@
|
|||
Custom logging handlers to be used in salt.
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
from salt._logging.handlers import (
|
||||
FileHandler,
|
||||
QueueHandler,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Fluent Logging Handler
|
||||
======================
|
||||
|
@ -73,8 +72,6 @@
|
|||
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
@ -86,12 +83,7 @@ import types
|
|||
|
||||
import salt.utils.msgpack
|
||||
import salt.utils.network
|
||||
|
||||
# Import Third party libs
|
||||
from salt.ext import six
|
||||
from salt.log.mixins import NewStyleClassMixIn
|
||||
|
||||
# Import salt libs
|
||||
from salt.log.setup import LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -192,9 +184,9 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
self.tags = tags
|
||||
self.msg_path = msg_path if msg_path else payload_type
|
||||
self.msg_type = msg_type if msg_type else payload_type
|
||||
format_func = "format_{0}_v{1}".format(payload_type, version).replace(".", "_")
|
||||
format_func = "format_{}_v{}".format(payload_type, version).replace(".", "_")
|
||||
self.format = getattr(self, format_func)
|
||||
super(MessageFormatter, self).__init__(fmt=None, datefmt=None)
|
||||
super().__init__(fmt=None, datefmt=None)
|
||||
|
||||
def formatTime(self, record, datefmt=None):
|
||||
if self.payload_type == "gelf": # GELF uses epoch times
|
||||
|
@ -220,7 +212,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict.update({"full_message": exc_info})
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -245,13 +237,13 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
|
||||
# pylint: disable=incompatible-py3-code
|
||||
if isinstance(
|
||||
value, (six.string_types, bool, dict, float, int, list, types.NoneType)
|
||||
value, ((str,), bool, dict, float, int, list, types.NoneType)
|
||||
):
|
||||
val = value
|
||||
# pylint: enable=incompatible-py3-code
|
||||
else:
|
||||
val = repr(value)
|
||||
message_dict.update({"{0}".format(key): val})
|
||||
message_dict.update({"{}".format(key): val})
|
||||
return message_dict
|
||||
|
||||
def format_gelf_v1_1(self, record):
|
||||
|
@ -273,7 +265,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict.update({"full_message": exc_info})
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -298,14 +290,14 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
|
||||
# pylint: disable=incompatible-py3-code
|
||||
if isinstance(
|
||||
value, (six.string_types, bool, dict, float, int, list, types.NoneType)
|
||||
value, ((str,), bool, dict, float, int, list, types.NoneType)
|
||||
):
|
||||
val = value
|
||||
# pylint: enable=incompatible-py3-code
|
||||
else:
|
||||
val = repr(value)
|
||||
# GELF spec require "non-standard" fields to be prefixed with '_' (underscore).
|
||||
message_dict.update({"_{0}".format(key): val})
|
||||
message_dict.update({"_{}".format(key): val})
|
||||
|
||||
return message_dict
|
||||
|
||||
|
@ -327,7 +319,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
"processName": record.processName,
|
||||
},
|
||||
"@message": record.getMessage(),
|
||||
"@source": "{0}://{1}/{2}".format(self.msg_type, host, self.msg_path),
|
||||
"@source": "{}://{}/{}".format(self.msg_type, host, self.msg_path),
|
||||
"@source_host": host,
|
||||
"@source_path": self.msg_path,
|
||||
"@tags": self.tags,
|
||||
|
@ -338,7 +330,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -371,7 +363,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["@fields"][key] = value
|
||||
continue
|
||||
|
||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
||||
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||
message_dict["@fields"][key] = value
|
||||
continue
|
||||
|
||||
|
@ -403,7 +395,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -436,7 +428,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict[key] = value
|
||||
continue
|
||||
|
||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
||||
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||
message_dict[key] = value
|
||||
continue
|
||||
|
||||
|
@ -470,7 +462,7 @@ class FluentHandler(logging.Handler):
|
|||
self.release()
|
||||
|
||||
|
||||
class FluentSender(object):
|
||||
class FluentSender:
|
||||
def __init__(
|
||||
self,
|
||||
tag,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Log4Mongo Logging Handler
|
||||
=========================
|
||||
|
@ -34,18 +33,13 @@
|
|||
This work was inspired by the Salt logging handlers for LogStash and
|
||||
Sentry and by the log4mongo Python implementation.
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import socket
|
||||
|
||||
# Import salt libs
|
||||
from salt.ext import six
|
||||
from salt.log.mixins import NewStyleClassMixIn
|
||||
from salt.log.setup import LOG_LEVELS
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
from log4mongo.handlers import MongoHandler, MongoFormatter
|
||||
|
||||
|
@ -84,7 +78,7 @@ def setup_handlers():
|
|||
}
|
||||
|
||||
config_opts = {}
|
||||
for config_opt, arg_name in six.iteritems(config_fields):
|
||||
for config_opt, arg_name in config_fields.items():
|
||||
config_opts[arg_name] = __opts__[handler_id].get(config_opt)
|
||||
|
||||
config_opts["level"] = LOG_LEVELS[
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Logstash Logging Handler
|
||||
========================
|
||||
|
@ -155,8 +154,6 @@
|
|||
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
@ -166,12 +163,7 @@ import os
|
|||
import salt.utils.json
|
||||
import salt.utils.network
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import Third party libs
|
||||
from salt.ext import six
|
||||
from salt.log.mixins import NewStyleClassMixIn
|
||||
|
||||
# Import salt libs
|
||||
from salt.log.setup import LOG_LEVELS
|
||||
|
||||
try:
|
||||
|
@ -274,8 +266,8 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
self.msg_path = msg_path
|
||||
self.msg_type = msg_type
|
||||
self.version = version
|
||||
self.format = getattr(self, "format_v{0}".format(version))
|
||||
super(LogstashFormatter, self).__init__(fmt=None, datefmt=None)
|
||||
self.format = getattr(self, "format_v{}".format(version))
|
||||
super().__init__(fmt=None, datefmt=None)
|
||||
|
||||
def formatTime(self, record, datefmt=None):
|
||||
return datetime.datetime.utcfromtimestamp(record.created).isoformat()[:-3] + "Z"
|
||||
|
@ -295,7 +287,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
"processName": record.processName,
|
||||
},
|
||||
"@message": record.getMessage(),
|
||||
"@source": "{0}://{1}/{2}".format(self.msg_type, host, self.msg_path),
|
||||
"@source": "{}://{}/{}".format(self.msg_type, host, self.msg_path),
|
||||
"@source_host": host,
|
||||
"@source_path": self.msg_path,
|
||||
"@tags": ["salt"],
|
||||
|
@ -306,7 +298,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -339,7 +331,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["@fields"][key] = value
|
||||
continue
|
||||
|
||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
||||
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||
message_dict["@fields"][key] = value
|
||||
continue
|
||||
|
||||
|
@ -368,7 +360,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
# Add any extra attributes to the message field
|
||||
for key, value in six.iteritems(record.__dict__):
|
||||
for key, value in record.__dict__.items():
|
||||
if key in (
|
||||
"args",
|
||||
"asctime",
|
||||
|
@ -401,7 +393,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
|||
message_dict[key] = value
|
||||
continue
|
||||
|
||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
||||
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||
message_dict[key] = value
|
||||
continue
|
||||
|
||||
|
@ -424,7 +416,7 @@ class ZMQLogstashHander(logging.Handler, NewStyleClassMixIn):
|
|||
"""
|
||||
|
||||
def __init__(self, address, level=logging.NOTSET, zmq_hwm=1000):
|
||||
super(ZMQLogstashHander, self).__init__(level=level)
|
||||
super().__init__(level=level)
|
||||
self._context = self._publisher = None
|
||||
self._address = address
|
||||
self._zmq_hwm = zmq_hwm
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Sentry Logging Handler
|
||||
======================
|
||||
|
@ -85,17 +84,13 @@
|
|||
.. _`Raven`: https://raven.readthedocs.io
|
||||
.. _`Raven client documentation`: https://raven.readthedocs.io/en/latest/config/index.html#client-arguments
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
import re
|
||||
|
||||
# Import salt libs
|
||||
import salt.loader
|
||||
from salt.log import LOG_LEVELS
|
||||
|
||||
# Import 3rd party libs
|
||||
try:
|
||||
import raven
|
||||
from raven.handlers.logging import SentryHandler
|
||||
|
@ -138,9 +133,7 @@ def setup_handlers():
|
|||
transport_registry = TransportRegistry(default_transports)
|
||||
url = urlparse(dsn)
|
||||
if not transport_registry.supported_scheme(url.scheme):
|
||||
raise ValueError(
|
||||
"Unsupported Sentry DSN scheme: {0}".format(url.scheme)
|
||||
)
|
||||
raise ValueError("Unsupported Sentry DSN scheme: {}".format(url.scheme))
|
||||
except ValueError as exc:
|
||||
log.info("Raven failed to parse the configuration provided DSN: %s", exc)
|
||||
|
||||
|
@ -217,7 +210,7 @@ def setup_handlers():
|
|||
if exclude_patterns:
|
||||
filter_regexes = [re.compile(pattern) for pattern in exclude_patterns]
|
||||
|
||||
class FilterExcludedMessages(object):
|
||||
class FilterExcludedMessages:
|
||||
@staticmethod
|
||||
def filter(record):
|
||||
m = record.getMessage()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
@ -11,10 +10,7 @@
|
|||
Some mix-in classes to be used in salt's logging
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
# pylint: disable=unused-import
|
||||
from salt._logging.mixins import (
|
||||
ExcInfoOnLogLevelFormatMixin as ExcInfoOnLogLevelFormatMixIn,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Metaproxy Directory
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Execution Module Directory
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Manage account locks on AIX systems
|
||||
|
||||
|
@ -7,7 +6,6 @@ Manage account locks on AIX systems
|
|||
:depends: none
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python librarie
|
||||
import logging
|
||||
|
@ -43,7 +41,7 @@ def login_failures(user):
|
|||
salt <minion_id> shadow.login_failures ALL
|
||||
"""
|
||||
|
||||
cmd = "lsuser -a unsuccessful_login_count {0}".format(user)
|
||||
cmd = "lsuser -a unsuccessful_login_count {}".format(user)
|
||||
cmd += " | grep -E 'unsuccessful_login_count=([3-9]|[0-9][0-9]+)'"
|
||||
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
||||
|
||||
|
@ -67,7 +65,7 @@ def locked(user):
|
|||
salt <minion_id> shadow.locked ALL
|
||||
"""
|
||||
|
||||
cmd = "lsuser -a account_locked {0}".format(user)
|
||||
cmd = "lsuser -a account_locked {}".format(user)
|
||||
cmd += ' | grep "account_locked=true"'
|
||||
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Support for Apache
|
||||
|
||||
|
@ -9,43 +8,18 @@ Support for Apache
|
|||
Debian-based system is detected.
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import (
|
||||
absolute_import,
|
||||
generators,
|
||||
print_function,
|
||||
unicode_literals,
|
||||
with_statement,
|
||||
)
|
||||
|
||||
import io
|
||||
import logging
|
||||
import re
|
||||
import urllib.request
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import SaltException
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,no-name-in-module
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import cStringIO
|
||||
from salt.ext.six.moves.urllib.error import URLError
|
||||
from salt.ext.six.moves.urllib.request import (
|
||||
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||
)
|
||||
from salt.ext.six.moves.urllib.request import (
|
||||
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
||||
)
|
||||
from salt.ext.six.moves.urllib.request import build_opener as _build_opener
|
||||
from salt.ext.six.moves.urllib.request import install_opener as _install_opener
|
||||
from salt.ext.six.moves.urllib.request import urlopen as _urlopen
|
||||
|
||||
# pylint: enable=import-error,no-name-in-module
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -86,7 +60,7 @@ def version():
|
|||
|
||||
salt '*' apache.version
|
||||
"""
|
||||
cmd = "{0} -v".format(_detect_os())
|
||||
cmd = "{} -v".format(_detect_os())
|
||||
out = __salt__["cmd.run"](cmd).splitlines()
|
||||
ret = out[0].split(": ")
|
||||
return ret[1]
|
||||
|
@ -102,7 +76,7 @@ def fullversion():
|
|||
|
||||
salt '*' apache.fullversion
|
||||
"""
|
||||
cmd = "{0} -V".format(_detect_os())
|
||||
cmd = "{} -V".format(_detect_os())
|
||||
ret = {}
|
||||
ret["compiled_with"] = []
|
||||
out = __salt__["cmd.run"](cmd).splitlines()
|
||||
|
@ -131,7 +105,7 @@ def modules():
|
|||
|
||||
salt '*' apache.modules
|
||||
"""
|
||||
cmd = "{0} -M".format(_detect_os())
|
||||
cmd = "{} -M".format(_detect_os())
|
||||
ret = {}
|
||||
ret["static"] = []
|
||||
ret["shared"] = []
|
||||
|
@ -157,7 +131,7 @@ def servermods():
|
|||
|
||||
salt '*' apache.servermods
|
||||
"""
|
||||
cmd = "{0} -l".format(_detect_os())
|
||||
cmd = "{} -l".format(_detect_os())
|
||||
ret = []
|
||||
out = __salt__["cmd.run"](cmd).splitlines()
|
||||
for line in out:
|
||||
|
@ -179,7 +153,7 @@ def directives():
|
|||
|
||||
salt '*' apache.directives
|
||||
"""
|
||||
cmd = "{0} -L".format(_detect_os())
|
||||
cmd = "{} -L".format(_detect_os())
|
||||
ret = {}
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
out = out.replace("\n\t", "\t")
|
||||
|
@ -206,7 +180,7 @@ def vhosts():
|
|||
|
||||
salt -t 10 '*' apache.vhosts
|
||||
"""
|
||||
cmd = "{0} -S".format(_detect_os())
|
||||
cmd = "{} -S".format(_detect_os())
|
||||
ret = {}
|
||||
namevhost = ""
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
|
@ -247,9 +221,9 @@ def signal(signal=None):
|
|||
return
|
||||
# Make sure you use the right arguments
|
||||
if signal in valid_signals:
|
||||
arguments = " -k {0}".format(signal)
|
||||
arguments = " -k {}".format(signal)
|
||||
else:
|
||||
arguments = " {0}".format(signal)
|
||||
arguments = " {}".format(signal)
|
||||
cmd = _detect_os() + arguments
|
||||
out = __salt__["cmd.run_all"](cmd)
|
||||
|
||||
|
@ -263,7 +237,7 @@ def signal(signal=None):
|
|||
ret = out["stdout"].strip()
|
||||
# No output for something like: apachectl graceful
|
||||
else:
|
||||
ret = 'Command: "{0}" completed successfully!'.format(cmd)
|
||||
ret = 'Command: "{}" completed successfully!'.format(cmd)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -352,28 +326,28 @@ def server_status(profile="default"):
|
|||
|
||||
# Get configuration from pillar
|
||||
url = __salt__["config.get"](
|
||||
"apache.server-status:{0}:url".format(profile), "http://localhost/server-status"
|
||||
"apache.server-status:{}:url".format(profile), "http://localhost/server-status"
|
||||
)
|
||||
user = __salt__["config.get"]("apache.server-status:{0}:user".format(profile), "")
|
||||
passwd = __salt__["config.get"]("apache.server-status:{0}:pass".format(profile), "")
|
||||
realm = __salt__["config.get"]("apache.server-status:{0}:realm".format(profile), "")
|
||||
user = __salt__["config.get"]("apache.server-status:{}:user".format(profile), "")
|
||||
passwd = __salt__["config.get"]("apache.server-status:{}:pass".format(profile), "")
|
||||
realm = __salt__["config.get"]("apache.server-status:{}:realm".format(profile), "")
|
||||
timeout = __salt__["config.get"](
|
||||
"apache.server-status:{0}:timeout".format(profile), 5
|
||||
"apache.server-status:{}:timeout".format(profile), 5
|
||||
)
|
||||
|
||||
# create authentication handler if configuration exists
|
||||
if user and passwd:
|
||||
basic = _HTTPBasicAuthHandler()
|
||||
basic = urllib.request.HTTPBasicAuthHandler()
|
||||
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||
digest = _HTTPDigestAuthHandler()
|
||||
digest = urllib.request.HTTPDigestAuthHandler()
|
||||
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||
_install_opener(_build_opener(basic, digest))
|
||||
urllib.request.install_opener(urllib.request.build_opener(basic, digest))
|
||||
|
||||
# get http data
|
||||
url += "?auto"
|
||||
try:
|
||||
response = _urlopen(url, timeout=timeout).read().splitlines()
|
||||
except URLError:
|
||||
response = urllib.request.urlopen(url, timeout=timeout).read().splitlines()
|
||||
except urllib.error.URLError:
|
||||
return "error"
|
||||
|
||||
# parse the data
|
||||
|
@ -402,45 +376,45 @@ def _parse_config(conf, slot=None):
|
|||
:param conf: defined config structure
|
||||
:param slot: name of section container if needed
|
||||
"""
|
||||
ret = cStringIO()
|
||||
if isinstance(conf, six.string_types):
|
||||
ret = io.StringIO()
|
||||
if isinstance(conf, str):
|
||||
if slot:
|
||||
print("{0} {1}".format(slot, conf), file=ret, end="")
|
||||
print("{} {}".format(slot, conf), file=ret, end="")
|
||||
else:
|
||||
print("{0}".format(conf), file=ret, end="")
|
||||
print("{}".format(conf), file=ret, end="")
|
||||
elif isinstance(conf, list):
|
||||
is_section = False
|
||||
for item in conf:
|
||||
if "this" in item:
|
||||
is_section = True
|
||||
slot_this = six.text_type(item["this"])
|
||||
slot_this = str(item["this"])
|
||||
if is_section:
|
||||
print("<{0} {1}>".format(slot, slot_this), file=ret)
|
||||
print("<{} {}>".format(slot, slot_this), file=ret)
|
||||
for item in conf:
|
||||
for key, val in item.items():
|
||||
if key != "this":
|
||||
print(_parse_config(val, six.text_type(key)), file=ret)
|
||||
print("</{0}>".format(slot), file=ret)
|
||||
print(_parse_config(val, str(key)), file=ret)
|
||||
print("</{}>".format(slot), file=ret)
|
||||
else:
|
||||
for value in conf:
|
||||
print(_parse_config(value, six.text_type(slot)), file=ret)
|
||||
print(_parse_config(value, str(slot)), file=ret)
|
||||
elif isinstance(conf, dict):
|
||||
try:
|
||||
print("<{0} {1}>".format(slot, conf["this"]), file=ret)
|
||||
print("<{} {}>".format(slot, conf["this"]), file=ret)
|
||||
except KeyError:
|
||||
raise SaltException(
|
||||
'Apache section container "<{0}>" expects attribute. '
|
||||
'Apache section container "<{}>" expects attribute. '
|
||||
'Specify it using key "this".'.format(slot)
|
||||
)
|
||||
for key, value in six.iteritems(conf):
|
||||
for key, value in conf.items():
|
||||
if key != "this":
|
||||
if isinstance(value, six.string_types):
|
||||
print("{0} {1}".format(key, value), file=ret)
|
||||
if isinstance(value, str):
|
||||
print("{} {}".format(key, value), file=ret)
|
||||
elif isinstance(value, list):
|
||||
print(_parse_config(value, key), file=ret)
|
||||
elif isinstance(value, dict):
|
||||
print(_parse_config(value, key), file=ret)
|
||||
print("</{0}>".format(slot), file=ret)
|
||||
print("</{}>".format(slot), file=ret)
|
||||
|
||||
ret.seek(0)
|
||||
return ret.read()
|
||||
|
@ -469,7 +443,7 @@ def config(name, config, edit=True):
|
|||
|
||||
configs = []
|
||||
for entry in config:
|
||||
key = next(six.iterkeys(entry))
|
||||
key = next(iter(entry.keys()))
|
||||
configs.append(_parse_config(entry[key], key))
|
||||
|
||||
# Python auto-correct line endings
|
||||
|
|
|
@ -1,15 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Module for apcupsd
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import logging
|
||||
|
||||
import salt.utils.decorators as decorators
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -34,7 +29,7 @@ def __virtual__():
|
|||
return __virtualname__
|
||||
return (
|
||||
False,
|
||||
"{0} module can only be loaded on when apcupsd is installed".format(
|
||||
"{} module can only be loaded on when apcupsd is installed".format(
|
||||
__virtualname__
|
||||
),
|
||||
)
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Aptly Debian repository manager.
|
||||
|
||||
.. versionadded:: 2018.3.0
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
@ -16,9 +13,6 @@ import salt.utils.path
|
|||
import salt.utils.stringutils
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
# Import salt libs
|
||||
from salt.ext import six
|
||||
|
||||
_DEFAULT_CONFIG_PATH = "/etc/aptly.conf"
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -183,7 +177,7 @@ def get_repo(name, config_path=_DEFAULT_CONFIG_PATH, with_packages=False):
|
|||
salt '*' aptly.get_repo name="test-repo"
|
||||
"""
|
||||
_validate_config(config_path)
|
||||
with_packages = six.text_type(bool(with_packages)).lower()
|
||||
with_packages = str(bool(with_packages)).lower()
|
||||
|
||||
ret = dict()
|
||||
cmd = [
|
||||
|
@ -390,7 +384,7 @@ def delete_repo(name, config_path=_DEFAULT_CONFIG_PATH, force=False):
|
|||
salt '*' aptly.delete_repo name="test-repo"
|
||||
"""
|
||||
_validate_config(config_path)
|
||||
force = six.text_type(bool(force)).lower()
|
||||
force = str(bool(force)).lower()
|
||||
|
||||
current_repo = __salt__["aptly.get_repo"](name=name, config_path=config_path)
|
||||
|
||||
|
@ -517,7 +511,7 @@ def cleanup_db(config_path=_DEFAULT_CONFIG_PATH, dry_run=False):
|
|||
salt '*' aptly.cleanup_db
|
||||
"""
|
||||
_validate_config(config_path)
|
||||
dry_run = six.text_type(bool(dry_run)).lower()
|
||||
dry_run = str(bool(dry_run)).lower()
|
||||
|
||||
ret = {"deleted_keys": list(), "deleted_files": list()}
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Arista pyeapi
|
||||
=============
|
||||
|
@ -89,19 +88,13 @@ outside a ``pyeapi`` Proxy, e.g.:
|
|||
Minion. If you want to use the :mod:`pyeapi Proxy <salt.proxy.arista_pyeapi>`,
|
||||
please follow the documentation notes for a proper setup.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python stdlib
|
||||
import difflib
|
||||
import logging
|
||||
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import Salt libs
|
||||
from salt.ext import six
|
||||
from salt.utils.args import clean_kwargs
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import pyeapi
|
||||
|
||||
|
@ -519,7 +512,7 @@ def config(
|
|||
log.debug("Fetched from %s", config_file)
|
||||
log.debug(file_str)
|
||||
elif commands:
|
||||
if isinstance(commands, (six.string_types, six.text_type)):
|
||||
if isinstance(commands, ((str,), str)):
|
||||
commands = [commands]
|
||||
file_str = "\n".join(commands)
|
||||
# unify all the commands in a single file, to render them in a go
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Wrapper module for at(1)
|
||||
|
||||
|
@ -9,26 +8,19 @@ easily tag jobs.
|
|||
|
||||
.. versionchanged:: 2017.7.0
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
# Import python libs
|
||||
import re
|
||||
import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
||||
# pylint: enable=import-error,redefined-builtin
|
||||
from salt.exceptions import CommandNotFoundError
|
||||
from salt.ext import six
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,redefined-builtin
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# OS Families that should work (Ubuntu and Debian are the default)
|
||||
# TODO: Refactor some of this module to remove the checks for binaries
|
||||
|
@ -56,7 +48,7 @@ def _cmd(binary, *args):
|
|||
"""
|
||||
binary = salt.utils.path.which(binary)
|
||||
if not binary:
|
||||
raise CommandNotFoundError("{0}: command not found".format(binary))
|
||||
raise CommandNotFoundError("{}: command not found".format(binary))
|
||||
cmd = [binary] + list(args)
|
||||
return __salt__["cmd.run_stdout"]([binary] + list(args), python_shell=False)
|
||||
|
||||
|
@ -156,7 +148,7 @@ def atq(tag=None):
|
|||
job_tag = tmp.groups()[0]
|
||||
|
||||
if __grains__["os"] in BSD:
|
||||
job = six.text_type(job)
|
||||
job = str(job)
|
||||
else:
|
||||
job = int(job)
|
||||
|
||||
|
@ -223,16 +215,7 @@ def atrm(*args):
|
|||
ret = {"jobs": {"removed": opts, "tag": None}}
|
||||
else:
|
||||
opts = list(
|
||||
list(
|
||||
map(
|
||||
str,
|
||||
[
|
||||
i["job"]
|
||||
for i in atq()["jobs"]
|
||||
if six.text_type(i["job"]) in args
|
||||
],
|
||||
)
|
||||
)
|
||||
list(map(str, [i["job"] for i in atq()["jobs"] if str(i["job"]) in args],))
|
||||
)
|
||||
ret = {"jobs": {"removed": opts, "tag": None}}
|
||||
|
||||
|
@ -271,7 +254,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
|||
return "'at.at' is not available."
|
||||
|
||||
if "tag" in kwargs:
|
||||
stdin = "### SALT: {0}\n{1}".format(kwargs["tag"], " ".join(args[1:]))
|
||||
stdin = "### SALT: {}\n{}".format(kwargs["tag"], " ".join(args[1:]))
|
||||
else:
|
||||
stdin = " ".join(args[1:])
|
||||
cmd = [binary, args[0]]
|
||||
|
@ -296,7 +279,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
|||
output = output.split()[1]
|
||||
|
||||
if __grains__["os"] in BSD:
|
||||
return atq(six.text_type(output))
|
||||
return atq(str(output))
|
||||
else:
|
||||
return atq(int(output))
|
||||
|
||||
|
@ -315,12 +298,12 @@ def atc(jobid):
|
|||
"""
|
||||
# Shim to produce output similar to what __virtual__() should do
|
||||
# but __salt__ isn't available in __virtual__()
|
||||
output = _cmd("at", "-c", six.text_type(jobid))
|
||||
output = _cmd("at", "-c", str(jobid))
|
||||
|
||||
if output is None:
|
||||
return "'at.atc' is not available."
|
||||
elif output == "":
|
||||
return {"error": "invalid job id '{0}'".format(jobid)}
|
||||
return {"error": "invalid job id '{}'".format(jobid)}
|
||||
|
||||
return output
|
||||
|
||||
|
@ -339,8 +322,8 @@ def _atq(**kwargs):
|
|||
day = kwargs.get("day", None)
|
||||
month = kwargs.get("month", None)
|
||||
year = kwargs.get("year", None)
|
||||
if year and len(six.text_type(year)) == 2:
|
||||
year = "20{0}".format(year)
|
||||
if year and len(str(year)) == 2:
|
||||
year = "20{}".format(year)
|
||||
|
||||
jobinfo = atq()["jobs"]
|
||||
if not jobinfo:
|
||||
|
@ -364,28 +347,28 @@ def _atq(**kwargs):
|
|||
|
||||
if not hour:
|
||||
pass
|
||||
elif "{0:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||
elif "{:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not minute:
|
||||
pass
|
||||
elif "{0:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||
elif "{:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not day:
|
||||
pass
|
||||
elif "{0:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||
elif "{:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not month:
|
||||
pass
|
||||
elif "{0:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||
elif "{:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Wrapper for at(1) on Solaris-like systems
|
||||
|
||||
|
@ -12,25 +11,16 @@ Wrapper for at(1) on Solaris-like systems
|
|||
|
||||
.. versionadded:: 2017.7.0
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
# Import python libs
|
||||
import re
|
||||
import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
from salt.ext import six
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,redefined-builtin
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
__virtualname__ = "at"
|
||||
|
@ -102,7 +92,7 @@ def atq(tag=None):
|
|||
specs.append(tmp[5])
|
||||
|
||||
# make sure job is str
|
||||
job = six.text_type(job)
|
||||
job = str(job)
|
||||
|
||||
# search for any tags
|
||||
atjob_file = "/var/spool/cron/atjobs/{job}".format(job=job)
|
||||
|
@ -208,7 +198,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
|||
|
||||
# build job
|
||||
if "tag" in kwargs:
|
||||
stdin = "### SALT: {0}\n{1}".format(kwargs["tag"], " ".join(args[1:]))
|
||||
stdin = "### SALT: {}\n{}".format(kwargs["tag"], " ".join(args[1:]))
|
||||
else:
|
||||
stdin = " ".join(args[1:])
|
||||
|
||||
|
@ -226,7 +216,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
|||
return {"jobs": [], "error": res["stderr"]}
|
||||
else:
|
||||
jobid = res["stderr"].splitlines()[1]
|
||||
jobid = six.text_type(jobid.split()[1])
|
||||
jobid = str(jobid.split()[1])
|
||||
return atq(jobid)
|
||||
|
||||
|
||||
|
@ -250,7 +240,7 @@ def atc(jobid):
|
|||
[salt.utils.stringutils.to_unicode(x) for x in rfh.readlines()]
|
||||
)
|
||||
else:
|
||||
return {"error": "invalid job id '{0}'".format(jobid)}
|
||||
return {"error": "invalid job id '{}'".format(jobid)}
|
||||
|
||||
|
||||
def _atq(**kwargs):
|
||||
|
@ -267,8 +257,8 @@ def _atq(**kwargs):
|
|||
day = kwargs.get("day", None)
|
||||
month = kwargs.get("month", None)
|
||||
year = kwargs.get("year", None)
|
||||
if year and len(six.text_type(year)) == 2:
|
||||
year = "20{0}".format(year)
|
||||
if year and len(str(year)) == 2:
|
||||
year = "20{}".format(year)
|
||||
|
||||
jobinfo = atq()["jobs"]
|
||||
if not jobinfo:
|
||||
|
@ -292,28 +282,28 @@ def _atq(**kwargs):
|
|||
|
||||
if not hour:
|
||||
pass
|
||||
elif "{0:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||
elif "{:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not minute:
|
||||
pass
|
||||
elif "{0:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||
elif "{:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not day:
|
||||
pass
|
||||
elif "{0:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||
elif "{:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
if not month:
|
||||
pass
|
||||
elif "{0:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||
elif "{:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Manages configuration files via augeas
|
||||
|
||||
|
@ -23,21 +22,15 @@ This module requires the ``augeas`` Python module.
|
|||
For affected Debian/Ubuntu hosts, installing ``libpython2.7`` has been
|
||||
known to resolve the issue.
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import re
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.args
|
||||
import salt.utils.data
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import zip
|
||||
|
||||
# Make sure augeas python interface is installed
|
||||
HAS_AUGEAS = False
|
||||
|
@ -90,8 +83,7 @@ def _recurmatch(path, aug):
|
|||
|
||||
for i in aug.match(clean_path + "/*"):
|
||||
i = i.replace("!", "\\!") # escape some dirs
|
||||
for _match in _recurmatch(i, aug):
|
||||
yield _match
|
||||
yield from _recurmatch(i, aug)
|
||||
|
||||
|
||||
def _lstrip_word(word, prefix):
|
||||
|
@ -100,8 +92,8 @@ def _lstrip_word(word, prefix):
|
|||
from the beginning of the string
|
||||
"""
|
||||
|
||||
if six.text_type(word).startswith(prefix):
|
||||
return six.text_type(word)[len(prefix) :]
|
||||
if str(word).startswith(prefix):
|
||||
return str(word)[len(prefix) :]
|
||||
return word
|
||||
|
||||
|
||||
|
@ -110,7 +102,7 @@ def _check_load_paths(load_path):
|
|||
Checks the validity of the load_path, returns a sanitized version
|
||||
with invalid paths removed.
|
||||
"""
|
||||
if load_path is None or not isinstance(load_path, six.string_types):
|
||||
if load_path is None or not isinstance(load_path, str):
|
||||
return None
|
||||
|
||||
_paths = []
|
||||
|
@ -198,7 +190,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
|||
cmd, arg = command.split(" ", 1)
|
||||
|
||||
if cmd not in METHOD_MAP:
|
||||
ret["error"] = "Command {0} is not supported (yet)".format(cmd)
|
||||
ret["error"] = "Command {} is not supported (yet)".format(cmd)
|
||||
return ret
|
||||
|
||||
method = METHOD_MAP[cmd]
|
||||
|
@ -207,7 +199,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
|||
parts = salt.utils.args.shlex_split(arg)
|
||||
|
||||
if len(parts) not in nargs:
|
||||
err = "{0} takes {1} args: {2}".format(method, nargs, parts)
|
||||
err = "{} takes {} args: {}".format(method, nargs, parts)
|
||||
raise ValueError(err)
|
||||
if method == "set":
|
||||
path = make_path(parts[0])
|
||||
|
@ -226,7 +218,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
|||
label, where, path = parts
|
||||
if where not in ("before", "after"):
|
||||
raise ValueError(
|
||||
'Expected "before" or "after", not {0}'.format(where)
|
||||
'Expected "before" or "after", not {}'.format(where)
|
||||
)
|
||||
path = make_path(path)
|
||||
args = {"path": path, "label": label, "before": where == "before"}
|
||||
|
@ -240,7 +232,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
|||
arg = command
|
||||
ret["error"] = (
|
||||
"Invalid formatted command, "
|
||||
"see debug log for details: {0}".format(arg)
|
||||
"see debug log for details: {}".format(arg)
|
||||
)
|
||||
return ret
|
||||
|
||||
|
@ -253,13 +245,13 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
|||
try:
|
||||
aug.save()
|
||||
ret["retval"] = True
|
||||
except IOError as err:
|
||||
ret["error"] = six.text_type(err)
|
||||
except OSError as err:
|
||||
ret["error"] = str(err)
|
||||
|
||||
if lens and not lens.endswith(".lns"):
|
||||
ret["error"] += (
|
||||
'\nLenses are normally configured as "name.lns". '
|
||||
'Did you mean "{0}.lns"?'.format(lens)
|
||||
'Did you mean "{}.lns"?'.format(lens)
|
||||
)
|
||||
|
||||
aug.close()
|
||||
|
@ -296,12 +288,12 @@ def get(path, value="", load_path=None):
|
|||
|
||||
path = path.rstrip("/")
|
||||
if value:
|
||||
path += "/{0}".format(value.strip("/"))
|
||||
path += "/{}".format(value.strip("/"))
|
||||
|
||||
try:
|
||||
_match = aug.match(path)
|
||||
except RuntimeError as err:
|
||||
return {"error": six.text_type(err)}
|
||||
return {"error": str(err)}
|
||||
|
||||
if _match:
|
||||
ret[path] = aug.get(path)
|
||||
|
@ -349,7 +341,7 @@ def setvalue(*args):
|
|||
%wheel ALL = PASSWD : ALL , NOPASSWD : /usr/bin/apt-get , /usr/bin/aptitude
|
||||
"""
|
||||
load_path = None
|
||||
load_paths = [x for x in args if six.text_type(x).startswith("load_path=")]
|
||||
load_paths = [x for x in args if str(x).startswith("load_path=")]
|
||||
if load_paths:
|
||||
if len(load_paths) > 1:
|
||||
raise SaltInvocationError("Only one 'load_path=' value is permitted")
|
||||
|
@ -363,10 +355,9 @@ def setvalue(*args):
|
|||
tuples = [
|
||||
x
|
||||
for x in args
|
||||
if not six.text_type(x).startswith("prefix=")
|
||||
and not six.text_type(x).startswith("load_path=")
|
||||
if not str(x).startswith("prefix=") and not str(x).startswith("load_path=")
|
||||
]
|
||||
prefix = [x for x in args if six.text_type(x).startswith("prefix=")]
|
||||
prefix = [x for x in args if str(x).startswith("prefix=")]
|
||||
if prefix:
|
||||
if len(prefix) > 1:
|
||||
raise SaltInvocationError("Only one 'prefix=' value is permitted")
|
||||
|
@ -382,15 +373,15 @@ def setvalue(*args):
|
|||
if prefix:
|
||||
target_path = os.path.join(prefix.rstrip("/"), path.lstrip("/"))
|
||||
try:
|
||||
aug.set(target_path, six.text_type(value))
|
||||
aug.set(target_path, str(value))
|
||||
except ValueError as err:
|
||||
ret["error"] = "Multiple values: {0}".format(err)
|
||||
ret["error"] = "Multiple values: {}".format(err)
|
||||
|
||||
try:
|
||||
aug.save()
|
||||
ret["retval"] = True
|
||||
except IOError as err:
|
||||
ret["error"] = six.text_type(err)
|
||||
except OSError as err:
|
||||
ret["error"] = str(err)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -467,8 +458,8 @@ def remove(path, load_path=None):
|
|||
ret["error"] = "Invalid node"
|
||||
else:
|
||||
ret["retval"] = True
|
||||
except (RuntimeError, IOError) as err:
|
||||
ret["error"] = six.text_type(err)
|
||||
except (RuntimeError, OSError) as err:
|
||||
ret["error"] = str(err)
|
||||
|
||||
ret["count"] = count
|
||||
|
||||
|
@ -518,7 +509,7 @@ def ls(path, load_path=None): # pylint: disable=C0103
|
|||
matches = _match(match_path)
|
||||
ret = {}
|
||||
|
||||
for key, value in six.iteritems(matches):
|
||||
for key, value in matches.items():
|
||||
name = _lstrip_word(key, path)
|
||||
if _match(key + "/*"):
|
||||
ret[name + "/"] = value # has sub nodes, e.g. directory
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Azure (ARM) Compute Execution Module
|
||||
|
||||
|
@ -47,7 +46,6 @@ Azure (ARM) Compute Execution Module
|
|||
"""
|
||||
|
||||
# Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -127,9 +125,7 @@ def availability_set_create_or_update(
|
|||
"compute", "AvailabilitySet", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -145,7 +141,7 @@ def availability_set_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Azure (ARM) DNS Execution Module
|
||||
|
||||
|
@ -53,7 +52,6 @@ Optional provider parameters:
|
|||
"""
|
||||
|
||||
# Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -117,9 +115,7 @@ def record_set_create_or_update(name, zone_name, resource_group, record_type, **
|
|||
"dns", "RecordSet", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -138,7 +134,7 @@ def record_set_create_or_update(name, zone_name, resource_group, record_type, **
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -362,9 +358,7 @@ def zone_create_or_update(name, resource_group, **kwargs):
|
|||
try:
|
||||
zone_model = __utils__["azurearm.create_object_model"]("dns", "Zone", **kwargs)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -381,7 +375,7 @@ def zone_create_or_update(name, resource_group, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Azure (ARM) Network Execution Module
|
||||
|
||||
|
@ -47,13 +46,11 @@ Azure (ARM) Network Execution Module
|
|||
"""
|
||||
|
||||
# Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
# Salt libs
|
||||
from salt.exceptions import SaltInvocationError # pylint: disable=unused-import
|
||||
from salt.ext.six.moves import range
|
||||
|
||||
# Azure libs
|
||||
HAS_LIBS = False
|
||||
|
@ -187,9 +184,7 @@ def default_security_rule_get(name, security_group, resource_group, **kwargs):
|
|||
if default_rule["name"] == name:
|
||||
result = default_rule
|
||||
if not result:
|
||||
result = {
|
||||
"error": "Unable to find {0} in {1}!".format(name, security_group)
|
||||
}
|
||||
result = {"error": "Unable to find {} in {}!".format(name, security_group)}
|
||||
except KeyError as exc:
|
||||
log.error("Unable to find %s in %s!", name, security_group)
|
||||
result = {"error": str(exc)}
|
||||
|
@ -368,7 +363,7 @@ def security_rule_create_or_update(
|
|||
# pylint: disable=eval-used
|
||||
if eval(params[0]):
|
||||
# pylint: disable=exec-used
|
||||
exec("{0} = None".format(params[1]))
|
||||
exec("{} = None".format(params[1]))
|
||||
|
||||
netconn = __utils__["azurearm.get_client"]("network", **kwargs)
|
||||
|
||||
|
@ -392,9 +387,7 @@ def security_rule_create_or_update(
|
|||
**kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -412,7 +405,7 @@ def security_rule_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -528,9 +521,7 @@ def network_security_group_create_or_update(
|
|||
"network", "NetworkSecurityGroup", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -547,7 +538,7 @@ def network_security_group_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -803,9 +794,7 @@ def subnet_create_or_update(
|
|||
**kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -823,7 +812,7 @@ def subnet_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -979,9 +968,7 @@ def virtual_network_create_or_update(name, address_prefixes, resource_group, **k
|
|||
**kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -998,7 +985,7 @@ def virtual_network_create_or_update(name, address_prefixes, resource_group, **k
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -1322,9 +1309,7 @@ def load_balancer_create_or_update(name, resource_group, **kwargs):
|
|||
"network", "LoadBalancer", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -1341,7 +1326,7 @@ def load_balancer_create_or_update(name, resource_group, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -1572,9 +1557,7 @@ def network_interface_create_or_update(
|
|||
"network", "NetworkInterface", ip_configurations=ip_configurations, **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -1591,7 +1574,7 @@ def network_interface_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -1961,9 +1944,7 @@ def public_ip_address_create_or_update(name, resource_group, **kwargs):
|
|||
"network", "PublicIPAddress", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -1980,7 +1961,7 @@ def public_ip_address_create_or_update(name, resource_group, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -2172,9 +2153,7 @@ def route_filter_rule_create_or_update(
|
|||
**kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -2195,7 +2174,7 @@ def route_filter_rule_create_or_update(
|
|||
result = {"error": message}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -2338,9 +2317,7 @@ def route_filter_create_or_update(name, resource_group, **kwargs):
|
|||
"network", "RouteFilter", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -2357,7 +2334,7 @@ def route_filter_create_or_update(name, resource_group, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -2547,9 +2524,7 @@ def route_create_or_update(
|
|||
**kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -2567,7 +2542,7 @@ def route_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -2710,9 +2685,7 @@ def route_table_create_or_update(name, resource_group, **kwargs):
|
|||
"network", "RouteTable", **kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -2729,7 +2702,7 @@ def route_table_create_or_update(name, resource_group, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Azure (ARM) Resource Execution Module
|
||||
|
||||
|
@ -47,7 +46,6 @@ Azure (ARM) Resource Execution Module
|
|||
"""
|
||||
|
||||
# Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
|
||||
|
@ -452,9 +450,7 @@ def deployment_create_or_update(
|
|||
"resource", "DeploymentProperties", **deploy_kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -477,7 +473,7 @@ def deployment_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -630,9 +626,7 @@ def deployment_validate(
|
|||
"resource", "DeploymentProperties", **deploy_kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -651,7 +645,7 @@ def deployment_validate(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
@ -918,7 +912,7 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
|||
definition = definition_list[definition_name]
|
||||
else:
|
||||
definition = {
|
||||
"error": 'The policy definition named "{0}" could not be found.'.format(
|
||||
"error": 'The policy definition named "{}" could not be found.'.format(
|
||||
definition_name
|
||||
)
|
||||
}
|
||||
|
@ -938,7 +932,7 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
|||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
"error": "The object model could not be built. ({})".format(str(exc))
|
||||
}
|
||||
return result
|
||||
|
||||
|
@ -952,11 +946,11 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
"error": 'The policy definition named "{0}" could not be found.'.format(
|
||||
"error": 'The policy definition named "{}" could not be found.'.format(
|
||||
definition_name
|
||||
)
|
||||
}
|
||||
|
@ -1098,9 +1092,7 @@ def policy_definition_create_or_update(
|
|||
"resource.policy", "PolicyDefinition", **policy_kwargs
|
||||
)
|
||||
except TypeError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
||||
}
|
||||
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||
return result
|
||||
|
||||
try:
|
||||
|
@ -1113,7 +1105,7 @@ def policy_definition_create_or_update(
|
|||
result = {"error": str(exc)}
|
||||
except SerializationError as exc:
|
||||
result = {
|
||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
||||
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Support for Bluetooth (using BlueZ in Linux).
|
||||
|
||||
|
@ -9,19 +8,11 @@ The following packages are required packages for this module:
|
|||
bluez-utils >= 5.7
|
||||
pybluez >= 0.18
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import shlex
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.validate.net
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
|
||||
HAS_PYBLUEZ = False
|
||||
try:
|
||||
import bluetooth # pylint: disable=import-error
|
||||
|
@ -89,7 +80,7 @@ def address_():
|
|||
dev = comps[0]
|
||||
ret[dev] = {
|
||||
"device": dev,
|
||||
"path": "/sys/class/bluetooth/{0}".format(dev),
|
||||
"path": "/sys/class/bluetooth/{}".format(dev),
|
||||
}
|
||||
if "BD Address" in line:
|
||||
comps = line.split()
|
||||
|
@ -121,7 +112,7 @@ def power(dev, mode):
|
|||
else:
|
||||
state = "down"
|
||||
mode = "off"
|
||||
cmd = "hciconfig {0} {1}".format(dev, state)
|
||||
cmd = "hciconfig {} {}".format(dev, state)
|
||||
__salt__["cmd.run"](cmd).splitlines()
|
||||
info = address_()
|
||||
if info[dev]["power"] == mode:
|
||||
|
@ -142,9 +133,9 @@ def discoverable(dev):
|
|||
if dev not in address_():
|
||||
raise CommandExecutionError("Invalid dev passed to bluetooth.discoverable")
|
||||
|
||||
cmd = "hciconfig {0} iscan".format(dev)
|
||||
cmd = "hciconfig {} iscan".format(dev)
|
||||
__salt__["cmd.run"](cmd).splitlines()
|
||||
cmd = "hciconfig {0}".format(dev)
|
||||
cmd = "hciconfig {}".format(dev)
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
if "UP RUNNING ISCAN" in out:
|
||||
return True
|
||||
|
@ -164,9 +155,9 @@ def noscan(dev):
|
|||
if dev not in address_():
|
||||
raise CommandExecutionError("Invalid dev passed to bluetooth.noscan")
|
||||
|
||||
cmd = "hciconfig {0} noscan".format(dev)
|
||||
cmd = "hciconfig {} noscan".format(dev)
|
||||
__salt__["cmd.run"](cmd).splitlines()
|
||||
cmd = "hciconfig {0}".format(dev)
|
||||
cmd = "hciconfig {}".format(dev)
|
||||
out = __salt__["cmd.run"](cmd)
|
||||
if "SCAN" in out:
|
||||
return False
|
||||
|
@ -203,7 +194,7 @@ def block(bdaddr):
|
|||
if not salt.utils.validate.net.mac(bdaddr):
|
||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.block")
|
||||
|
||||
cmd = "hciconfig {0} block".format(bdaddr)
|
||||
cmd = "hciconfig {} block".format(bdaddr)
|
||||
__salt__["cmd.run"](cmd).splitlines()
|
||||
|
||||
|
||||
|
@ -220,7 +211,7 @@ def unblock(bdaddr):
|
|||
if not salt.utils.validate.net.mac(bdaddr):
|
||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.unblock")
|
||||
|
||||
cmd = "hciconfig {0} unblock".format(bdaddr)
|
||||
cmd = "hciconfig {} unblock".format(bdaddr)
|
||||
__salt__["cmd.run"](cmd).splitlines()
|
||||
|
||||
|
||||
|
@ -251,8 +242,8 @@ def pair(address, key):
|
|||
)
|
||||
|
||||
addy = address_()
|
||||
cmd = "echo {0} | bluez-simple-agent {1} {2}".format(
|
||||
_cmd_quote(addy["device"]), _cmd_quote(address), _cmd_quote(key)
|
||||
cmd = "echo {} | bluez-simple-agent {} {}".format(
|
||||
shlex.quote(addy["device"]), shlex.quote(address), shlex.quote(key)
|
||||
)
|
||||
out = __salt__["cmd.run"](cmd, python_shell=True).splitlines()
|
||||
return out
|
||||
|
@ -276,7 +267,7 @@ def unpair(address):
|
|||
if not salt.utils.validate.net.mac(address):
|
||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.unpair")
|
||||
|
||||
cmd = "bluez-test-device remove {0}".format(address)
|
||||
cmd = "bluez-test-device remove {}".format(address)
|
||||
out = __salt__["cmd.run"](cmd).splitlines()
|
||||
return out
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue