mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Pyupgrade and drop six
This commit is contained in:
parent
0d71775f51
commit
d9b50659b2
1565 changed files with 8268 additions and 16733 deletions
|
@ -1,5 +1,4 @@
|
||||||
#! /bin/env python
|
#! /bin/env python
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
The setup script for sodium_grabber
|
The setup script for sodium_grabber
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import getopt
|
import getopt
|
||||||
import os
|
import os
|
||||||
|
@ -49,9 +48,8 @@ def main(argv):
|
||||||
if target == "":
|
if target == "":
|
||||||
display_help()
|
display_help()
|
||||||
|
|
||||||
if sys.version_info >= (3, 0):
|
search = search.encode("utf-8")
|
||||||
search = search.encode("utf-8")
|
replace = replace.encode("utf-8")
|
||||||
replace = replace.encode("utf-8")
|
|
||||||
f = open(target, "rb").read()
|
f = open(target, "rb").read()
|
||||||
f = f.replace(search, replace)
|
f = f.replace(search, replace)
|
||||||
f = f.replace(search.lower(), replace)
|
f = f.replace(search.lower(), replace)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt._logging
|
salt._logging
|
||||||
~~~~~~~~~~~~~
|
~~~~~~~~~~~~~
|
||||||
|
@ -11,7 +10,5 @@
|
||||||
the python's logging system.
|
the python's logging system.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
from salt._logging.impl import * # pylint: disable=wildcard-import
|
from salt._logging.impl import * # pylint: disable=wildcard-import
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt._logging.handlers
|
salt._logging.handlers
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -6,18 +5,15 @@
|
||||||
Salt's logging handlers
|
Salt's logging handlers
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
import queue
|
||||||
import sys
|
import sys
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt._logging.mixins import ExcInfoOnLogLevelFormatMixin, NewStyleClassMixin
|
from salt._logging.mixins import ExcInfoOnLogLevelFormatMixin, NewStyleClassMixin
|
||||||
from salt.ext.six.moves import queue # pylint: disable=import-error,no-name-in-module
|
|
||||||
|
|
||||||
# from salt.utils.versions import warn_until_date
|
# from salt.utils.versions import warn_until_date
|
||||||
|
|
||||||
|
@ -48,7 +44,7 @@ class TemporaryLoggingHandler(logging.NullHandler):
|
||||||
# '{{date}}.'.format(name=__name__)
|
# '{{date}}.'.format(name=__name__)
|
||||||
# )
|
# )
|
||||||
self.__max_queue_size = max_queue_size
|
self.__max_queue_size = max_queue_size
|
||||||
super(TemporaryLoggingHandler, self).__init__(level=level)
|
super().__init__(level=level)
|
||||||
self.__messages = deque(maxlen=max_queue_size)
|
self.__messages = deque(maxlen=max_queue_size)
|
||||||
|
|
||||||
def handle(self, record):
|
def handle(self, record):
|
||||||
|
@ -117,7 +113,7 @@ class SysLogHandler(
|
||||||
del exc_type, exc, exc_traceback
|
del exc_type, exc, exc_traceback
|
||||||
|
|
||||||
if not handled:
|
if not handled:
|
||||||
super(SysLogHandler, self).handleError(record)
|
super().handleError(record)
|
||||||
|
|
||||||
|
|
||||||
class RotatingFileHandler(
|
class RotatingFileHandler(
|
||||||
|
@ -152,7 +148,7 @@ class RotatingFileHandler(
|
||||||
):
|
):
|
||||||
if self.level <= logging.WARNING:
|
if self.level <= logging.WARNING:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
'[WARNING ] Unable to rotate the log file "{0}" '
|
'[WARNING ] Unable to rotate the log file "{}" '
|
||||||
"because it is in use\n".format(self.baseFilename)
|
"because it is in use\n".format(self.baseFilename)
|
||||||
)
|
)
|
||||||
handled = True
|
handled = True
|
||||||
|
@ -162,7 +158,7 @@ class RotatingFileHandler(
|
||||||
del exc_type, exc, exc_traceback
|
del exc_type, exc, exc_traceback
|
||||||
|
|
||||||
if not handled:
|
if not handled:
|
||||||
super(RotatingFileHandler, self).handleError(record)
|
super().handleError(record)
|
||||||
|
|
||||||
|
|
||||||
class WatchedFileHandler(
|
class WatchedFileHandler(
|
||||||
|
@ -217,7 +213,7 @@ if sys.version_info < (3, 2):
|
||||||
except queue.Full:
|
except queue.Full:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"[WARNING ] Message queue is full, "
|
"[WARNING ] Message queue is full, "
|
||||||
'unable to write "{0}" to log'.format(record)
|
'unable to write "{}" to log'.format(record)
|
||||||
)
|
)
|
||||||
|
|
||||||
def prepare(self, record):
|
def prepare(self, record):
|
||||||
|
@ -266,7 +262,7 @@ elif sys.version_info < (3, 7):
|
||||||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||||
): # pylint: disable=no-member,inconsistent-mro
|
): # pylint: disable=no-member,inconsistent-mro
|
||||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||||
super(QueueHandler, self).__init__(queue)
|
super().__init__(queue)
|
||||||
# warn_until_date(
|
# warn_until_date(
|
||||||
# '20220101',
|
# '20220101',
|
||||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||||
|
@ -325,7 +321,7 @@ else:
|
||||||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||||
): # pylint: disable=no-member,inconsistent-mro
|
): # pylint: disable=no-member,inconsistent-mro
|
||||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||||
super(QueueHandler, self).__init__(queue)
|
super().__init__(queue)
|
||||||
# warn_until_date(
|
# warn_until_date(
|
||||||
# '20220101',
|
# '20220101',
|
||||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||||
|
@ -347,5 +343,5 @@ else:
|
||||||
except queue.Full:
|
except queue.Full:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"[WARNING ] Message queue is full, "
|
"[WARNING ] Message queue is full, "
|
||||||
'unable to write "{0}" to log.\n'.format(record)
|
'unable to write "{}" to log.\n'.format(record)
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
|
|
||||||
Salt's logging implementation classes/functionality
|
Salt's logging implementation classes/functionality
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -30,8 +28,6 @@ from salt.exceptions import LoggingRuntimeError # isort:skip
|
||||||
from salt.utils.ctx import RequestContext # isort:skip
|
from salt.utils.ctx import RequestContext # isort:skip
|
||||||
from salt.utils.textformat import TextFormat # isort:skip
|
from salt.utils.textformat import TextFormat # isort:skip
|
||||||
|
|
||||||
# from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
|
||||||
|
|
||||||
LOG_LEVELS = {
|
LOG_LEVELS = {
|
||||||
"all": logging.NOTSET,
|
"all": logging.NOTSET,
|
||||||
"debug": logging.DEBUG,
|
"debug": logging.DEBUG,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt._logging.mixins
|
salt._logging.mixins
|
||||||
~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -6,14 +5,12 @@
|
||||||
Logging related mix-ins
|
Logging related mix-ins
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
class NewStyleClassMixin(object):
|
class NewStyleClassMixin:
|
||||||
"""
|
"""
|
||||||
Simple new style class to make pylint shut up!
|
Simple new style class to make pylint shut up!
|
||||||
This is required because SaltLoggingClass can't subclass object directly:
|
This is required because SaltLoggingClass can't subclass object directly:
|
||||||
|
@ -22,7 +19,7 @@ class NewStyleClassMixin(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class LoggingProfileMixin(object):
|
class LoggingProfileMixin:
|
||||||
"""
|
"""
|
||||||
Simple mix-in class to add a trace method to python's logging.
|
Simple mix-in class to add a trace method to python's logging.
|
||||||
"""
|
"""
|
||||||
|
@ -31,7 +28,7 @@ class LoggingProfileMixin(object):
|
||||||
self.log(getattr(logging, "PROFILE", 15), msg, *args, **kwargs)
|
self.log(getattr(logging, "PROFILE", 15), msg, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class LoggingTraceMixin(object):
|
class LoggingTraceMixin:
|
||||||
"""
|
"""
|
||||||
Simple mix-in class to add a trace method to python's logging.
|
Simple mix-in class to add a trace method to python's logging.
|
||||||
"""
|
"""
|
||||||
|
@ -40,7 +37,7 @@ class LoggingTraceMixin(object):
|
||||||
self.log(getattr(logging, "TRACE", 5), msg, *args, **kwargs)
|
self.log(getattr(logging, "TRACE", 5), msg, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class LoggingGarbageMixin(object):
|
class LoggingGarbageMixin:
|
||||||
"""
|
"""
|
||||||
Simple mix-in class to add a garbage method to python's logging.
|
Simple mix-in class to add a garbage method to python's logging.
|
||||||
"""
|
"""
|
||||||
|
@ -74,10 +71,10 @@ class LoggingMixinMeta(type):
|
||||||
bases.append(LoggingTraceMixin)
|
bases.append(LoggingTraceMixin)
|
||||||
if include_garbage:
|
if include_garbage:
|
||||||
bases.append(LoggingGarbageMixin)
|
bases.append(LoggingGarbageMixin)
|
||||||
return super(LoggingMixinMeta, mcs).__new__(mcs, name, tuple(bases), attrs)
|
return super().__new__(mcs, name, tuple(bases), attrs)
|
||||||
|
|
||||||
|
|
||||||
class ExcInfoOnLogLevelFormatMixin(object):
|
class ExcInfoOnLogLevelFormatMixin:
|
||||||
"""
|
"""
|
||||||
Logging handler class mixin to properly handle including exc_info on a per logging handler basis
|
Logging handler class mixin to properly handle including exc_info on a per logging handler basis
|
||||||
"""
|
"""
|
||||||
|
@ -86,7 +83,7 @@ class ExcInfoOnLogLevelFormatMixin(object):
|
||||||
"""
|
"""
|
||||||
Format the log record to include exc_info if the handler is enabled for a specific log level
|
Format the log record to include exc_info if the handler is enabled for a specific log level
|
||||||
"""
|
"""
|
||||||
formatted_record = super(ExcInfoOnLogLevelFormatMixin, self).format(record)
|
formatted_record = super().format(record)
|
||||||
exc_info_on_loglevel = getattr(record, "exc_info_on_loglevel", None)
|
exc_info_on_loglevel = getattr(record, "exc_info_on_loglevel", None)
|
||||||
exc_info_on_loglevel_formatted = getattr(
|
exc_info_on_loglevel_formatted = getattr(
|
||||||
record, "exc_info_on_loglevel_formatted", None
|
record, "exc_info_on_loglevel_formatted", None
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
The acl module handles publisher_acl operations
|
The acl module handles publisher_acl operations
|
||||||
|
|
||||||
|
@ -9,16 +8,11 @@ found by reading the salt documentation:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libraries
|
# Import python libraries
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
|
class PublisherACL:
|
||||||
class PublisherACL(object):
|
|
||||||
"""
|
"""
|
||||||
Represents the publisher ACL and provides methods
|
Represents the publisher ACL and provides methods
|
||||||
to query the ACL for given operations
|
to query the ACL for given operations
|
||||||
|
@ -38,7 +32,7 @@ class PublisherACL(object):
|
||||||
|
|
||||||
def cmd_is_blacklisted(self, cmd):
|
def cmd_is_blacklisted(self, cmd):
|
||||||
# If this is a regular command, it is a single function
|
# If this is a regular command, it is a single function
|
||||||
if isinstance(cmd, six.string_types):
|
if isinstance(cmd, str):
|
||||||
cmd = [cmd]
|
cmd = [cmd]
|
||||||
for fun in cmd:
|
for fun in cmd:
|
||||||
if not salt.utils.stringutils.check_whitelist_blacklist(
|
if not salt.utils.stringutils.check_whitelist_blacklist(
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
An "Always Approved" eauth interface to test against, not intended for
|
An "Always Approved" eauth interface to test against, not intended for
|
||||||
production use
|
production use
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using Django Web Framework
|
Provide authentication using Django Web Framework
|
||||||
|
|
||||||
|
@ -47,16 +46,11 @@ indicated above, though the model DOES NOT have to be named
|
||||||
'SaltExternalAuthModel'.
|
'SaltExternalAuthModel'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
# pylint: disable=import-error
|
# pylint: disable=import-error
|
||||||
try:
|
try:
|
||||||
import django
|
import django
|
||||||
|
@ -118,7 +112,7 @@ def __django_auth_setup():
|
||||||
django_module_name, globals(), locals(), "SaltExternalAuthModel"
|
django_module_name, globals(), locals(), "SaltExternalAuthModel"
|
||||||
)
|
)
|
||||||
# pylint: enable=possibly-unused-variable
|
# pylint: enable=possibly-unused-variable
|
||||||
DJANGO_AUTH_CLASS_str = "django_auth_module.{0}".format(django_model_name)
|
DJANGO_AUTH_CLASS_str = "django_auth_module.{}".format(django_model_name)
|
||||||
DJANGO_AUTH_CLASS = eval(DJANGO_AUTH_CLASS_str) # pylint: disable=W0123
|
DJANGO_AUTH_CLASS = eval(DJANGO_AUTH_CLASS_str) # pylint: disable=W0123
|
||||||
|
|
||||||
|
|
||||||
|
@ -211,7 +205,7 @@ def acl(username):
|
||||||
found = False
|
found = False
|
||||||
for d in auth_dict[a.user_fk.username]:
|
for d in auth_dict[a.user_fk.username]:
|
||||||
if isinstance(d, dict):
|
if isinstance(d, dict):
|
||||||
if a.minion_or_fn_matcher in six.iterkeys(d):
|
if a.minion_or_fn_matcher in d.keys():
|
||||||
auth_dict[a.user_fk.username][a.minion_or_fn_matcher].append(
|
auth_dict[a.user_fk.username][a.minion_or_fn_matcher].append(
|
||||||
a.minion_fn
|
a.minion_fn
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using local files
|
Provide authentication using local files
|
||||||
|
|
||||||
|
@ -95,8 +94,6 @@ When using ``htdigest`` the ``^realm`` must be set:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using OpenStack Keystone
|
Provide authentication using OpenStack Keystone
|
||||||
|
|
||||||
:depends: - keystoneclient Python module
|
:depends: - keystoneclient Python module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from keystoneclient.v2_0 import client
|
from keystoneclient.v2_0 import client
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using MySQL.
|
Provide authentication using MySQL.
|
||||||
|
|
||||||
|
@ -49,7 +47,6 @@ Enable MySQL authentication.
|
||||||
:depends: - MySQL-python Python module
|
:depends: - MySQL-python Python module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# The pam components have been modified to be salty and have been taken from
|
# The pam components have been modified to be salty and have been taken from
|
||||||
# the pam module under this licence:
|
# the pam module under this licence:
|
||||||
# (c) 2007 Chris AtLee <chris@atlee.ca>
|
# (c) 2007 Chris AtLee <chris@atlee.ca>
|
||||||
|
@ -35,8 +34,6 @@ authenticated against. This defaults to `login`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from ctypes import (
|
from ctypes import (
|
||||||
|
@ -55,13 +52,8 @@ from ctypes import (
|
||||||
)
|
)
|
||||||
from ctypes.util import find_library
|
from ctypes.util import find_library
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.utils.user
|
import salt.utils.user
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -110,7 +102,7 @@ class PamMessage(Structure):
|
||||||
]
|
]
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<PamMessage {0} '{1}'>".format(self.msg_style, self.msg)
|
return "<PamMessage {} '{}'>".format(self.msg_style, self.msg)
|
||||||
|
|
||||||
|
|
||||||
class PamResponse(Structure):
|
class PamResponse(Structure):
|
||||||
|
@ -124,7 +116,7 @@ class PamResponse(Structure):
|
||||||
]
|
]
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<PamResponse {0} '{1}'>".format(self.resp_retcode, self.resp)
|
return "<PamResponse {} '{}'>".format(self.resp_retcode, self.resp)
|
||||||
|
|
||||||
|
|
||||||
CONV_FUNC = CFUNCTYPE(
|
CONV_FUNC = CFUNCTYPE(
|
||||||
|
@ -182,11 +174,11 @@ def authenticate(username, password):
|
||||||
"""
|
"""
|
||||||
service = __opts__.get("auth.pam.service", "login")
|
service = __opts__.get("auth.pam.service", "login")
|
||||||
|
|
||||||
if isinstance(username, six.text_type):
|
if isinstance(username, str):
|
||||||
username = username.encode(__salt_system_encoding__)
|
username = username.encode(__salt_system_encoding__)
|
||||||
if isinstance(password, six.text_type):
|
if isinstance(password, str):
|
||||||
password = password.encode(__salt_system_encoding__)
|
password = password.encode(__salt_system_encoding__)
|
||||||
if isinstance(service, six.text_type):
|
if isinstance(service, str):
|
||||||
service = service.encode(__salt_system_encoding__)
|
service = service.encode(__salt_system_encoding__)
|
||||||
|
|
||||||
@CONV_FUNC
|
@CONV_FUNC
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using a REST call
|
Provide authentication using a REST call
|
||||||
|
|
||||||
|
@ -23,12 +22,9 @@ as above.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.http
|
import salt.utils.http
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Provide authentication using YubiKey.
|
Provide authentication using YubiKey.
|
||||||
|
|
||||||
|
@ -38,8 +36,6 @@ the API key will be updated on all the YubiCloud servers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
|
@ -326,7 +326,7 @@ def beacon(config):
|
||||||
r_mask = 0
|
r_mask = 0
|
||||||
for sub in mask:
|
for sub in mask:
|
||||||
r_mask |= _get_mask(sub)
|
r_mask |= _get_mask(sub)
|
||||||
elif isinstance(mask, salt.ext.six.binary_type):
|
elif isinstance(mask, bytes):
|
||||||
r_mask = _get_mask(mask)
|
r_mask = _get_mask(mask)
|
||||||
else:
|
else:
|
||||||
r_mask = mask
|
r_mask = mask
|
||||||
|
|
33
salt/cache/__init__.py
vendored
33
salt/cache/__init__.py
vendored
|
@ -1,21 +1,16 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Loader mechanism for caching data, with data expiration, etc.
|
Loader mechanism for caching data, with data expiration, etc.
|
||||||
|
|
||||||
.. versionadded:: 2016.11.0
|
.. versionadded:: 2016.11.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.config
|
import salt.config
|
||||||
import salt.loader
|
import salt.loader
|
||||||
import salt.syspaths
|
import salt.syspaths
|
||||||
from salt.ext import six
|
|
||||||
from salt.payload import Serial
|
from salt.payload import Serial
|
||||||
from salt.utils.odict import OrderedDict
|
from salt.utils.odict import OrderedDict
|
||||||
|
|
||||||
|
@ -35,7 +30,7 @@ def factory(opts, **kwargs):
|
||||||
return cls(opts, **kwargs)
|
return cls(opts, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class Cache(object):
|
class Cache:
|
||||||
"""
|
"""
|
||||||
Base caching object providing access to the modular cache subsystem.
|
Base caching object providing access to the modular cache subsystem.
|
||||||
|
|
||||||
|
@ -83,7 +78,7 @@ class Cache(object):
|
||||||
|
|
||||||
def __lazy_init(self):
|
def __lazy_init(self):
|
||||||
self._modules = salt.loader.cache(self.opts, self.serial)
|
self._modules = salt.loader.cache(self.opts, self.serial)
|
||||||
fun = "{0}.init_kwargs".format(self.driver)
|
fun = "{}.init_kwargs".format(self.driver)
|
||||||
if fun in self.modules:
|
if fun in self.modules:
|
||||||
self._kwargs = self.modules[fun](self._kwargs)
|
self._kwargs = self.modules[fun](self._kwargs)
|
||||||
else:
|
else:
|
||||||
|
@ -154,7 +149,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.store".format(self.driver)
|
fun = "{}.store".format(self.driver)
|
||||||
return self.modules[fun](bank, key, data, **self._kwargs)
|
return self.modules[fun](bank, key, data, **self._kwargs)
|
||||||
|
|
||||||
def fetch(self, bank, key):
|
def fetch(self, bank, key):
|
||||||
|
@ -178,7 +173,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.fetch".format(self.driver)
|
fun = "{}.fetch".format(self.driver)
|
||||||
return self.modules[fun](bank, key, **self._kwargs)
|
return self.modules[fun](bank, key, **self._kwargs)
|
||||||
|
|
||||||
def updated(self, bank, key):
|
def updated(self, bank, key):
|
||||||
|
@ -202,7 +197,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.updated".format(self.driver)
|
fun = "{}.updated".format(self.driver)
|
||||||
return self.modules[fun](bank, key, **self._kwargs)
|
return self.modules[fun](bank, key, **self._kwargs)
|
||||||
|
|
||||||
def flush(self, bank, key=None):
|
def flush(self, bank, key=None):
|
||||||
|
@ -223,7 +218,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.flush".format(self.driver)
|
fun = "{}.flush".format(self.driver)
|
||||||
return self.modules[fun](bank, key=key, **self._kwargs)
|
return self.modules[fun](bank, key=key, **self._kwargs)
|
||||||
|
|
||||||
def list(self, bank):
|
def list(self, bank):
|
||||||
|
@ -242,7 +237,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.list".format(self.driver)
|
fun = "{}.list".format(self.driver)
|
||||||
return self.modules[fun](bank, **self._kwargs)
|
return self.modules[fun](bank, **self._kwargs)
|
||||||
|
|
||||||
def contains(self, bank, key=None):
|
def contains(self, bank, key=None):
|
||||||
|
@ -267,7 +262,7 @@ class Cache(object):
|
||||||
Raises an exception if cache driver detected an error accessing data
|
Raises an exception if cache driver detected an error accessing data
|
||||||
in the cache backend (auth, permissions, etc).
|
in the cache backend (auth, permissions, etc).
|
||||||
"""
|
"""
|
||||||
fun = "{0}.contains".format(self.driver)
|
fun = "{}.contains".format(self.driver)
|
||||||
return self.modules[fun](bank, key, **self._kwargs)
|
return self.modules[fun](bank, key, **self._kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -281,7 +276,7 @@ class MemCache(Cache):
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
def __init__(self, opts, **kwargs):
|
def __init__(self, opts, **kwargs):
|
||||||
super(MemCache, self).__init__(opts, **kwargs)
|
super().__init__(opts, **kwargs)
|
||||||
self.expire = opts.get("memcache_expire_seconds", 10)
|
self.expire = opts.get("memcache_expire_seconds", 10)
|
||||||
self.max = opts.get("memcache_max_items", 1024)
|
self.max = opts.get("memcache_max_items", 1024)
|
||||||
self.cleanup = opts.get("memcache_full_cleanup", False)
|
self.cleanup = opts.get("memcache_full_cleanup", False)
|
||||||
|
@ -294,7 +289,7 @@ class MemCache(Cache):
|
||||||
@classmethod
|
@classmethod
|
||||||
def __cleanup(cls, expire):
|
def __cleanup(cls, expire):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
for storage in six.itervalues(cls.data):
|
for storage in cls.data.values():
|
||||||
for key, data in list(storage.items()):
|
for key, data in list(storage.items()):
|
||||||
if data[0] + expire < now:
|
if data[0] + expire < now:
|
||||||
del storage[key]
|
del storage[key]
|
||||||
|
@ -302,7 +297,7 @@ class MemCache(Cache):
|
||||||
break
|
break
|
||||||
|
|
||||||
def _get_storage_id(self):
|
def _get_storage_id(self):
|
||||||
fun = "{0}.storage_id".format(self.driver)
|
fun = "{}.storage_id".format(self.driver)
|
||||||
if fun in self.modules:
|
if fun in self.modules:
|
||||||
return self.modules[fun](self.kwargs)
|
return self.modules[fun](self.kwargs)
|
||||||
else:
|
else:
|
||||||
|
@ -338,7 +333,7 @@ class MemCache(Cache):
|
||||||
return record[1]
|
return record[1]
|
||||||
|
|
||||||
# Have no value for the key or value is expired
|
# Have no value for the key or value is expired
|
||||||
data = super(MemCache, self).fetch(bank, key)
|
data = super().fetch(bank, key)
|
||||||
if len(self.storage) >= self.max:
|
if len(self.storage) >= self.max:
|
||||||
if self.cleanup:
|
if self.cleanup:
|
||||||
MemCache.__cleanup(self.expire)
|
MemCache.__cleanup(self.expire)
|
||||||
|
@ -349,7 +344,7 @@ class MemCache(Cache):
|
||||||
|
|
||||||
def store(self, bank, key, data):
|
def store(self, bank, key, data):
|
||||||
self.storage.pop((bank, key), None)
|
self.storage.pop((bank, key), None)
|
||||||
super(MemCache, self).store(bank, key, data)
|
super().store(bank, key, data)
|
||||||
if len(self.storage) >= self.max:
|
if len(self.storage) >= self.max:
|
||||||
if self.cleanup:
|
if self.cleanup:
|
||||||
MemCache.__cleanup(self.expire)
|
MemCache.__cleanup(self.expire)
|
||||||
|
@ -359,4 +354,4 @@ class MemCache(Cache):
|
||||||
|
|
||||||
def flush(self, bank, key=None):
|
def flush(self, bank, key=None):
|
||||||
self.storage.pop((bank, key), None)
|
self.storage.pop((bank, key), None)
|
||||||
super(MemCache, self).flush(bank, key)
|
super().flush(bank, key)
|
||||||
|
|
20
salt/cache/consul.py
vendored
20
salt/cache/consul.py
vendored
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Minion data cache plugin for Consul key/value data store.
|
Minion data cache plugin for Consul key/value data store.
|
||||||
|
|
||||||
|
@ -46,7 +45,6 @@ value to ``consul``:
|
||||||
.. _`python-consul documentation`: https://python-consul.readthedocs.io/en/latest/#consul
|
.. _`python-consul documentation`: https://python-consul.readthedocs.io/en/latest/#consul
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -106,13 +104,13 @@ def store(bank, key, data):
|
||||||
"""
|
"""
|
||||||
Store a key value.
|
Store a key value.
|
||||||
"""
|
"""
|
||||||
c_key = "{0}/{1}".format(bank, key)
|
c_key = "{}/{}".format(bank, key)
|
||||||
try:
|
try:
|
||||||
c_data = __context__["serial"].dumps(data)
|
c_data = __context__["serial"].dumps(data)
|
||||||
api.kv.put(c_key, c_data)
|
api.kv.put(c_key, c_data)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise SaltCacheError(
|
raise SaltCacheError(
|
||||||
"There was an error writing the key, {0}: {1}".format(c_key, exc)
|
"There was an error writing the key, {}: {}".format(c_key, exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,7 +118,7 @@ def fetch(bank, key):
|
||||||
"""
|
"""
|
||||||
Fetch a key value.
|
Fetch a key value.
|
||||||
"""
|
"""
|
||||||
c_key = "{0}/{1}".format(bank, key)
|
c_key = "{}/{}".format(bank, key)
|
||||||
try:
|
try:
|
||||||
_, value = api.kv.get(c_key)
|
_, value = api.kv.get(c_key)
|
||||||
if value is None:
|
if value is None:
|
||||||
|
@ -128,7 +126,7 @@ def fetch(bank, key):
|
||||||
return __context__["serial"].loads(value["Value"])
|
return __context__["serial"].loads(value["Value"])
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise SaltCacheError(
|
raise SaltCacheError(
|
||||||
"There was an error reading the key, {0}: {1}".format(c_key, exc)
|
"There was an error reading the key, {}: {}".format(c_key, exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -139,12 +137,12 @@ def flush(bank, key=None):
|
||||||
if key is None:
|
if key is None:
|
||||||
c_key = bank
|
c_key = bank
|
||||||
else:
|
else:
|
||||||
c_key = "{0}/{1}".format(bank, key)
|
c_key = "{}/{}".format(bank, key)
|
||||||
try:
|
try:
|
||||||
return api.kv.delete(c_key, recurse=key is None)
|
return api.kv.delete(c_key, recurse=key is None)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise SaltCacheError(
|
raise SaltCacheError(
|
||||||
"There was an error removing the key, {0}: {1}".format(c_key, exc)
|
"There was an error removing the key, {}: {}".format(c_key, exc)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -156,7 +154,7 @@ def list_(bank):
|
||||||
_, keys = api.kv.get(bank + "/", keys=True, separator="/")
|
_, keys = api.kv.get(bank + "/", keys=True, separator="/")
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise SaltCacheError(
|
raise SaltCacheError(
|
||||||
'There was an error getting the key "{0}": {1}'.format(bank, exc)
|
'There was an error getting the key "{}": {}'.format(bank, exc)
|
||||||
)
|
)
|
||||||
if keys is None:
|
if keys is None:
|
||||||
keys = []
|
keys = []
|
||||||
|
@ -178,10 +176,10 @@ def contains(bank, key):
|
||||||
return True # any key could be a branch and a leaf at the same time in Consul
|
return True # any key could be a branch and a leaf at the same time in Consul
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
c_key = "{0}/{1}".format(bank, key)
|
c_key = "{}/{}".format(bank, key)
|
||||||
_, value = api.kv.get(c_key)
|
_, value = api.kv.get(c_key)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise SaltCacheError(
|
raise SaltCacheError(
|
||||||
"There was an error getting the key, {0}: {1}".format(c_key, exc)
|
"There was an error getting the key, {}: {}".format(c_key, exc)
|
||||||
)
|
)
|
||||||
return value is not None
|
return value is not None
|
||||||
|
|
1
salt/cache/redis_cache.py
vendored
1
salt/cache/redis_cache.py
vendored
|
@ -141,7 +141,6 @@ import logging
|
||||||
from salt.exceptions import SaltCacheError
|
from salt.exceptions import SaltCacheError
|
||||||
|
|
||||||
# Import salt
|
# Import salt
|
||||||
from salt.ext.six.moves import range
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import redis
|
import redis
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
The management of salt command line utilities are stored in here
|
The management of salt command line utilities are stored in here
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import salt.cli.caller
|
import salt.cli.caller
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import salt.defaults.exitcodes # pylint: disable=W0611
|
import salt.defaults.exitcodes # pylint: disable=W0611
|
||||||
import salt.utils.parsers
|
import salt.utils.parsers
|
||||||
import salt.utils.profile
|
import salt.utils.profile
|
||||||
from salt.exceptions import SaltClientError
|
from salt.exceptions import SaltClientError
|
||||||
from salt.ext import six
|
|
||||||
from salt.utils.verify import check_user, verify_log
|
from salt.utils.verify import check_user, verify_log
|
||||||
|
|
||||||
|
|
||||||
|
@ -55,4 +51,4 @@ class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
||||||
)
|
)
|
||||||
|
|
||||||
except SaltClientError as exc:
|
except SaltClientError as exc:
|
||||||
raise SystemExit(six.text_type(exc))
|
raise SystemExit(str(exc))
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt.cli.spm
|
salt.cli.spm
|
||||||
~~~~~~~~~~~~~
|
~~~~~~~~~~~~~
|
||||||
|
@ -8,10 +7,7 @@
|
||||||
.. versionadded:: 2015.8.0
|
.. versionadded:: 2015.8.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.spm
|
import salt.spm
|
||||||
import salt.utils.parsers as parsers
|
import salt.utils.parsers as parsers
|
||||||
from salt.utils.verify import verify_env, verify_log
|
from salt.utils.verify import verify_env, verify_log
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import salt.client.ssh
|
import salt.client.ssh
|
||||||
|
|
|
@ -30,7 +30,7 @@ EX_SCP_NOT_FOUND = 14
|
||||||
EX_CANTCREAT = 73
|
EX_CANTCREAT = 73
|
||||||
|
|
||||||
|
|
||||||
class OptionsContainer(object):
|
class OptionsContainer:
|
||||||
"""
|
"""
|
||||||
An empty class for holding instance attribute values.
|
An empty class for holding instance attribute values.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Create ssh executor system
|
Create ssh executor system
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -13,8 +10,6 @@ import tempfile
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
import salt.client.ssh
|
import salt.client.ssh
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.client.ssh.shell
|
import salt.client.ssh.shell
|
||||||
import salt.loader
|
import salt.loader
|
||||||
import salt.minion
|
import salt.minion
|
||||||
|
@ -28,9 +23,6 @@ import salt.utils.thin
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
import salt.utils.verify
|
import salt.utils.verify
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,7 +33,7 @@ class SSHState(salt.state.State):
|
||||||
|
|
||||||
def __init__(self, opts, pillar=None, wrapper=None):
|
def __init__(self, opts, pillar=None, wrapper=None):
|
||||||
self.wrapper = wrapper
|
self.wrapper = wrapper
|
||||||
super(SSHState, self).__init__(opts, pillar)
|
super().__init__(opts, pillar)
|
||||||
|
|
||||||
def load_modules(self, data=None, proxy=None):
|
def load_modules(self, data=None, proxy=None):
|
||||||
"""
|
"""
|
||||||
|
@ -163,7 +155,7 @@ def salt_refs(data, ret=None):
|
||||||
proto = "salt://"
|
proto = "salt://"
|
||||||
if ret is None:
|
if ret is None:
|
||||||
ret = []
|
ret = []
|
||||||
if isinstance(data, six.string_types):
|
if isinstance(data, str):
|
||||||
if data.startswith(proto) and data not in ret:
|
if data.startswith(proto) and data not in ret:
|
||||||
ret.append(data)
|
ret.append(data)
|
||||||
if isinstance(data, list):
|
if isinstance(data, list):
|
||||||
|
@ -211,7 +203,7 @@ def prep_trans_tar(
|
||||||
cachedir = os.path.join("salt-ssh", id_).rstrip(os.sep)
|
cachedir = os.path.join("salt-ssh", id_).rstrip(os.sep)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# Minion ID should always be a str, but don't let an int break this
|
# Minion ID should always be a str, but don't let an int break this
|
||||||
cachedir = os.path.join("salt-ssh", six.text_type(id_)).rstrip(os.sep)
|
cachedir = os.path.join("salt-ssh", str(id_)).rstrip(os.sep)
|
||||||
|
|
||||||
for saltenv in file_refs:
|
for saltenv in file_refs:
|
||||||
# Location where files in this saltenv will be cached
|
# Location where files in this saltenv will be cached
|
||||||
|
@ -226,7 +218,7 @@ def prep_trans_tar(
|
||||||
cache_dest = os.path.join(cache_dest_root, short)
|
cache_dest = os.path.join(cache_dest_root, short)
|
||||||
try:
|
try:
|
||||||
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
|
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
|
||||||
except IOError:
|
except OSError:
|
||||||
path = ""
|
path = ""
|
||||||
if path:
|
if path:
|
||||||
tgt = os.path.join(env_root, short)
|
tgt = os.path.join(env_root, short)
|
||||||
|
@ -237,7 +229,7 @@ def prep_trans_tar(
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
|
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
|
||||||
except IOError:
|
except OSError:
|
||||||
files = ""
|
files = ""
|
||||||
if files:
|
if files:
|
||||||
for filename in files:
|
for filename in files:
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
The ssh client wrapper system contains the routines that are used to alter
|
The ssh client wrapper system contains the routines that are used to alter
|
||||||
how executions are run in the salt-ssh system, this allows for state routines
|
how executions are run in the salt-ssh system, this allows for state routines
|
||||||
|
@ -6,23 +5,16 @@ to be easily rewritten to execute in a way that makes them do the same tasks
|
||||||
as ZeroMQ salt, but via ssh.
|
as ZeroMQ salt, but via ssh.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import salt.client.ssh
|
import salt.client.ssh
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.loader
|
import salt.loader
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
|
class FunctionWrapper:
|
||||||
class FunctionWrapper(object):
|
|
||||||
"""
|
"""
|
||||||
Create an object that acts like the salt function dict and makes function
|
Create an object that acts like the salt function dict and makes function
|
||||||
calls remotely via the SSH shell system
|
calls remotely via the SSH shell system
|
||||||
|
@ -41,7 +33,7 @@ class FunctionWrapper(object):
|
||||||
minion_opts=None,
|
minion_opts=None,
|
||||||
**kwargs
|
**kwargs
|
||||||
):
|
):
|
||||||
super(FunctionWrapper, self).__init__()
|
super().__init__()
|
||||||
self.cmd_prefix = cmd_prefix
|
self.cmd_prefix = cmd_prefix
|
||||||
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
||||||
self.opts = opts
|
self.opts = opts
|
||||||
|
@ -95,7 +87,7 @@ class FunctionWrapper(object):
|
||||||
# We're in an inner FunctionWrapper as created by the code block
|
# We're in an inner FunctionWrapper as created by the code block
|
||||||
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
||||||
# then evaluate as normal
|
# then evaluate as normal
|
||||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
cmd = "{}.{}".format(self.cmd_prefix, cmd)
|
||||||
|
|
||||||
if cmd in self.wfuncs:
|
if cmd in self.wfuncs:
|
||||||
return self.wfuncs[cmd]
|
return self.wfuncs[cmd]
|
||||||
|
@ -111,10 +103,10 @@ class FunctionWrapper(object):
|
||||||
argv.extend([salt.utils.json.dumps(arg) for arg in args])
|
argv.extend([salt.utils.json.dumps(arg) for arg in args])
|
||||||
argv.extend(
|
argv.extend(
|
||||||
[
|
[
|
||||||
"{0}={1}".format(
|
"{}={}".format(
|
||||||
salt.utils.stringutils.to_str(key), salt.utils.json.dumps(val)
|
salt.utils.stringutils.to_str(key), salt.utils.json.dumps(val)
|
||||||
)
|
)
|
||||||
for key, val in six.iteritems(kwargs)
|
for key, val in kwargs.items()
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
single = salt.client.ssh.Single(
|
single = salt.client.ssh.Single(
|
||||||
|
@ -159,14 +151,14 @@ class FunctionWrapper(object):
|
||||||
# containing only 'cmd' module calls, in that case. We don't
|
# containing only 'cmd' module calls, in that case. We don't
|
||||||
# support assigning directly to prefixes in this way
|
# support assigning directly to prefixes in this way
|
||||||
raise KeyError(
|
raise KeyError(
|
||||||
"Cannot assign to module key {0} in the " "FunctionWrapper".format(cmd)
|
"Cannot assign to module key {} in the " "FunctionWrapper".format(cmd)
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.cmd_prefix:
|
if self.cmd_prefix:
|
||||||
# We're in an inner FunctionWrapper as created by the first code
|
# We're in an inner FunctionWrapper as created by the first code
|
||||||
# block in __getitem__. Reconstruct the original cmd in the form
|
# block in __getitem__. Reconstruct the original cmd in the form
|
||||||
# 'cmd.run' and then evaluate as normal
|
# 'cmd.run' and then evaluate as normal
|
||||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
cmd = "{}.{}".format(self.cmd_prefix, cmd)
|
||||||
|
|
||||||
if cmd in self.wfuncs:
|
if cmd in self.wfuncs:
|
||||||
self.wfuncs[cmd] = value
|
self.wfuncs[cmd] = value
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Wrapper function for mine operations for salt-ssh
|
Wrapper function for mine operations for salt-ssh
|
||||||
|
|
||||||
.. versionadded:: 2015.5.0
|
.. versionadded:: 2015.5.0
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.client.ssh
|
import salt.client.ssh
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
.. versionadded:: 2015.5.0
|
.. versionadded:: 2015.5.0
|
||||||
|
|
||||||
|
@ -9,13 +8,10 @@ salt-ssh calls and return the data from them.
|
||||||
|
|
||||||
No access control is needed because calls cannot originate from the minions.
|
No access control is needed because calls cannot originate from the minions.
|
||||||
"""
|
"""
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.client.ssh
|
import salt.client.ssh
|
||||||
import salt.runner
|
import salt.runner
|
||||||
import salt.utils.args
|
import salt.utils.args
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Wrap the saltcheck module to copy files to ssh minion before running tests
|
Wrap the saltcheck module to copy files to ssh minion before running tests
|
||||||
"""
|
"""
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -14,8 +11,6 @@ from contextlib import closing
|
||||||
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -33,9 +28,9 @@ def update_master_cache(states, saltenv="base"):
|
||||||
# Setup for copying states to gendir
|
# Setup for copying states to gendir
|
||||||
gendir = tempfile.mkdtemp()
|
gendir = tempfile.mkdtemp()
|
||||||
trans_tar = salt.utils.files.mkstemp()
|
trans_tar = salt.utils.files.mkstemp()
|
||||||
if "cp.fileclient_{0}".format(id(__opts__)) not in __context__:
|
if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
|
||||||
__context__[
|
__context__[
|
||||||
"cp.fileclient_{0}".format(id(__opts__))
|
"cp.fileclient_{}".format(id(__opts__))
|
||||||
] = salt.fileclient.get_file_client(__opts__)
|
] = salt.fileclient.get_file_client(__opts__)
|
||||||
|
|
||||||
# generate cp.list_states output and save to gendir
|
# generate cp.list_states output and save to gendir
|
||||||
|
@ -64,7 +59,7 @@ def update_master_cache(states, saltenv="base"):
|
||||||
log.debug("copying %s to %s", state_name, gendir)
|
log.debug("copying %s to %s", state_name, gendir)
|
||||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||||
# Duplicate cp.get_dir to gendir
|
# Duplicate cp.get_dir to gendir
|
||||||
copy_result = __context__["cp.fileclient_{0}".format(id(__opts__))].get_dir(
|
copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
|
||||||
qualified_name, gendir, saltenv
|
qualified_name, gendir, saltenv
|
||||||
)
|
)
|
||||||
if copy_result:
|
if copy_result:
|
||||||
|
@ -82,7 +77,7 @@ def update_master_cache(states, saltenv="base"):
|
||||||
else:
|
else:
|
||||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||||
copy_result = __context__[
|
copy_result = __context__[
|
||||||
"cp.fileclient_{0}".format(id(__opts__))
|
"cp.fileclient_{}".format(id(__opts__))
|
||||||
].get_dir(qualified_name, gendir, saltenv)
|
].get_dir(qualified_name, gendir, saltenv)
|
||||||
if copy_result:
|
if copy_result:
|
||||||
copy_result = [
|
copy_result = [
|
||||||
|
@ -117,7 +112,7 @@ def update_master_cache(states, saltenv="base"):
|
||||||
# Clean up local tar
|
# Clean up local tar
|
||||||
try:
|
try:
|
||||||
os.remove(trans_tar)
|
os.remove(trans_tar)
|
||||||
except (OSError, IOError):
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
tar_path = os.path.join(thin_dir, os.path.basename(trans_tar))
|
tar_path = os.path.join(thin_dir, os.path.basename(trans_tar))
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Primary interfaces for the salt-cloud system
|
Primary interfaces for the salt-cloud system
|
||||||
"""
|
"""
|
||||||
|
@ -11,14 +10,11 @@ Primary interfaces for the salt-cloud system
|
||||||
# The cli, master and cloud configs will merge for opts
|
# The cli, master and cloud configs will merge for opts
|
||||||
# the VM data will be in opts['profiles']
|
# the VM data will be in opts['profiles']
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.cloud
|
import salt.cloud
|
||||||
import salt.config
|
import salt.config
|
||||||
import salt.defaults.exitcodes
|
import salt.defaults.exitcodes
|
||||||
|
@ -28,10 +24,6 @@ import salt.utils.cloud
|
||||||
import salt.utils.parsers
|
import salt.utils.parsers
|
||||||
import salt.utils.user
|
import salt.utils.user
|
||||||
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
|
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import input
|
|
||||||
from salt.utils.verify import check_user, verify_env, verify_log, verify_log_files
|
from salt.utils.verify import check_user, verify_env, verify_log, verify_log_files
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -72,7 +64,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if logfile is not None:
|
if logfile is not None:
|
||||||
# Logfile is not using Syslog, verify
|
# Logfile is not using Syslog, verify
|
||||||
verify_log_files([logfile], salt_master_user)
|
verify_log_files([logfile], salt_master_user)
|
||||||
except (IOError, OSError) as err:
|
except OSError as err:
|
||||||
log.error("Error while verifying the environment: %s", err)
|
log.error("Error while verifying the environment: %s", err)
|
||||||
sys.exit(err.errno)
|
sys.exit(err.errno)
|
||||||
|
|
||||||
|
@ -185,7 +177,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if map_file is not None:
|
if map_file is not None:
|
||||||
if names != ():
|
if names != ():
|
||||||
msg = (
|
msg = (
|
||||||
"Supplying a mapfile, '{0}', in addition to instance names {1} "
|
"Supplying a mapfile, '{}', in addition to instance names {} "
|
||||||
"with the '--destroy' or '-d' function is not supported. "
|
"with the '--destroy' or '-d' function is not supported. "
|
||||||
"Please choose to delete either the entire map file or individual "
|
"Please choose to delete either the entire map file or individual "
|
||||||
"instances.".format(map_file, names)
|
"instances.".format(map_file, names)
|
||||||
|
@ -205,12 +197,12 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
|
|
||||||
msg = "The following virtual machines are set to be destroyed:\n"
|
msg = "The following virtual machines are set to be destroyed:\n"
|
||||||
names = set()
|
names = set()
|
||||||
for alias, drivers in six.iteritems(matching):
|
for alias, drivers in matching.items():
|
||||||
msg += " {0}:\n".format(alias)
|
msg += " {}:\n".format(alias)
|
||||||
for driver, vms in six.iteritems(drivers):
|
for driver, vms in drivers.items():
|
||||||
msg += " {0}:\n".format(driver)
|
msg += " {}:\n".format(driver)
|
||||||
for name in vms:
|
for name in vms:
|
||||||
msg += " {0}\n".format(name)
|
msg += " {}\n".format(name)
|
||||||
names.add(name)
|
names.add(name)
|
||||||
# pylint: disable=broad-except
|
# pylint: disable=broad-except
|
||||||
try:
|
try:
|
||||||
|
@ -238,7 +230,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
machines = []
|
machines = []
|
||||||
msg = (
|
msg = (
|
||||||
"The following virtual machines are set to be actioned with "
|
"The following virtual machines are set to be actioned with "
|
||||||
'"{0}":\n'.format(self.options.action)
|
'"{}":\n'.format(self.options.action)
|
||||||
)
|
)
|
||||||
for name in names:
|
for name in names:
|
||||||
if "=" in name:
|
if "=" in name:
|
||||||
|
@ -246,7 +238,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
key, value = name.split("=", 1)
|
key, value = name.split("=", 1)
|
||||||
kwargs[key] = value
|
kwargs[key] = value
|
||||||
else:
|
else:
|
||||||
msg += " {0}\n".format(name)
|
msg += " {}\n".format(name)
|
||||||
machines.append(name)
|
machines.append(name)
|
||||||
names = machines
|
names = machines
|
||||||
|
|
||||||
|
@ -272,7 +264,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
self.error(
|
self.error(
|
||||||
"Any arguments passed to --function need to be passed "
|
"Any arguments passed to --function need to be passed "
|
||||||
"as kwargs. Ex: image=ami-54cf5c3d. Remaining "
|
"as kwargs. Ex: image=ami-54cf5c3d. Remaining "
|
||||||
"arguments: {0}".format(args)
|
"arguments: {}".format(args)
|
||||||
)
|
)
|
||||||
# pylint: disable=broad-except
|
# pylint: disable=broad-except
|
||||||
try:
|
try:
|
||||||
|
@ -295,7 +287,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
|
|
||||||
elif self.options.set_password:
|
elif self.options.set_password:
|
||||||
username = self.credential_username
|
username = self.credential_username
|
||||||
provider_name = "salt.cloud.provider.{0}".format(self.credential_provider)
|
provider_name = "salt.cloud.provider.{}".format(self.credential_provider)
|
||||||
# TODO: check if provider is configured
|
# TODO: check if provider is configured
|
||||||
# set the password
|
# set the password
|
||||||
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
||||||
|
@ -315,8 +307,8 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if "errors" in dmap:
|
if "errors" in dmap:
|
||||||
# display profile errors
|
# display profile errors
|
||||||
msg += "Found the following errors:\n"
|
msg += "Found the following errors:\n"
|
||||||
for profile_name, error in six.iteritems(dmap["errors"]):
|
for profile_name, error in dmap["errors"].items():
|
||||||
msg += " {0}: {1}\n".format(profile_name, error)
|
msg += " {}: {}\n".format(profile_name, error)
|
||||||
sys.stderr.write(msg)
|
sys.stderr.write(msg)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
@ -324,19 +316,19 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if "existing" in dmap:
|
if "existing" in dmap:
|
||||||
msg += "The following virtual machines already exist:\n"
|
msg += "The following virtual machines already exist:\n"
|
||||||
for name in dmap["existing"]:
|
for name in dmap["existing"]:
|
||||||
msg += " {0}\n".format(name)
|
msg += " {}\n".format(name)
|
||||||
|
|
||||||
if dmap["create"]:
|
if dmap["create"]:
|
||||||
msg += "The following virtual machines are set to be " "created:\n"
|
msg += "The following virtual machines are set to be " "created:\n"
|
||||||
for name in dmap["create"]:
|
for name in dmap["create"]:
|
||||||
msg += " {0}\n".format(name)
|
msg += " {}\n".format(name)
|
||||||
|
|
||||||
if "destroy" in dmap:
|
if "destroy" in dmap:
|
||||||
msg += (
|
msg += (
|
||||||
"The following virtual machines are set to be " "destroyed:\n"
|
"The following virtual machines are set to be " "destroyed:\n"
|
||||||
)
|
)
|
||||||
for name in dmap["destroy"]:
|
for name in dmap["destroy"]:
|
||||||
msg += " {0}\n".format(name)
|
msg += " {}\n".format(name)
|
||||||
|
|
||||||
if not dmap["create"] and not dmap.get("destroy", None):
|
if not dmap["create"] and not dmap.get("destroy", None):
|
||||||
if not dmap.get("existing", None):
|
if not dmap.get("existing", None):
|
||||||
|
@ -389,7 +381,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if args:
|
if args:
|
||||||
self.error(
|
self.error(
|
||||||
"Any arguments passed to --bootstrap need to be passed as "
|
"Any arguments passed to --bootstrap need to be passed as "
|
||||||
"kwargs. Ex: ssh_username=larry. Remaining arguments: {0}".format(
|
"kwargs. Ex: ssh_username=larry. Remaining arguments: {}".format(
|
||||||
args
|
args
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -425,7 +417,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
# This is a salt cloud system exit
|
# This is a salt cloud system exit
|
||||||
if exc.exit_code > 0:
|
if exc.exit_code > 0:
|
||||||
# the exit code is bigger than 0, it's an error
|
# the exit code is bigger than 0, it's an error
|
||||||
msg = "Error: {0}".format(msg)
|
msg = "Error: {}".format(msg)
|
||||||
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
||||||
# It's not a system exit but it's an error we can
|
# It's not a system exit but it's an error we can
|
||||||
# handle
|
# handle
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt.cloud.exceptions
|
salt.cloud.exceptions
|
||||||
~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
@ -7,9 +6,7 @@
|
||||||
|
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.defaults.exitcodes
|
import salt.defaults.exitcodes
|
||||||
from salt.exceptions import SaltException
|
from salt.exceptions import SaltException
|
||||||
|
|
||||||
|
@ -27,7 +24,7 @@ class SaltCloudSystemExit(SaltCloudException):
|
||||||
|
|
||||||
def __init__(self, message, exit_code=salt.defaults.exitcodes.EX_GENERIC):
|
def __init__(self, message, exit_code=salt.defaults.exitcodes.EX_GENERIC):
|
||||||
SaltCloudException.__init__(self, message)
|
SaltCloudException.__init__(self, message)
|
||||||
self.message = "{0} [WARNING: salt.cloud.exceptions is deprecated. Please migrate to salt.exceptions!]".format(
|
self.message = "{} [WARNING: salt.cloud.exceptions is deprecated. Please migrate to salt.exceptions!]".format(
|
||||||
message
|
message
|
||||||
)
|
)
|
||||||
self.exit_code = exit_code
|
self.exit_code = exit_code
|
||||||
|
|
|
@ -9,6 +9,7 @@ import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
|
import urllib.parse
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
import salt.defaults.exitcodes
|
import salt.defaults.exitcodes
|
||||||
|
@ -28,12 +29,6 @@ import salt.utils.xdg
|
||||||
import salt.utils.yaml
|
import salt.utils.yaml
|
||||||
import salt.utils.zeromq
|
import salt.utils.zeromq
|
||||||
|
|
||||||
# pylint: disable=import-error,no-name-in-module
|
|
||||||
from salt.ext.six.moves.urllib.parse import urlparse
|
|
||||||
|
|
||||||
# pylint: enable=import-error,no-name-in-module
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
|
@ -2387,7 +2382,7 @@ def syndic_config(
|
||||||
]
|
]
|
||||||
for config_key in ("log_file", "key_logfile", "syndic_log_file"):
|
for config_key in ("log_file", "key_logfile", "syndic_log_file"):
|
||||||
# If this is not a URI and instead a local path
|
# If this is not a URI and instead a local path
|
||||||
if urlparse(opts.get(config_key, "")).scheme == "":
|
if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "":
|
||||||
prepend_root_dirs.append(config_key)
|
prepend_root_dirs.append(config_key)
|
||||||
prepend_root_dir(opts, prepend_root_dirs)
|
prepend_root_dir(opts, prepend_root_dirs)
|
||||||
return opts
|
return opts
|
||||||
|
@ -2638,7 +2633,7 @@ def cloud_config(
|
||||||
|
|
||||||
# prepend root_dir
|
# prepend root_dir
|
||||||
prepend_root_dirs = ["cachedir"]
|
prepend_root_dirs = ["cachedir"]
|
||||||
if "log_file" in opts and urlparse(opts["log_file"]).scheme == "":
|
if "log_file" in opts and urllib.parse.urlparse(opts["log_file"]).scheme == "":
|
||||||
prepend_root_dirs.append(opts["log_file"])
|
prepend_root_dirs.append(opts["log_file"])
|
||||||
prepend_root_dir(opts, prepend_root_dirs)
|
prepend_root_dir(opts, prepend_root_dirs)
|
||||||
|
|
||||||
|
@ -3707,7 +3702,7 @@ def apply_minion_config(
|
||||||
|
|
||||||
# These can be set to syslog, so, not actual paths on the system
|
# These can be set to syslog, so, not actual paths on the system
|
||||||
for config_key in ("log_file", "key_logfile"):
|
for config_key in ("log_file", "key_logfile"):
|
||||||
if urlparse(opts.get(config_key, "")).scheme == "":
|
if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "":
|
||||||
prepend_root_dirs.append(config_key)
|
prepend_root_dirs.append(config_key)
|
||||||
|
|
||||||
prepend_root_dir(opts, prepend_root_dirs)
|
prepend_root_dir(opts, prepend_root_dirs)
|
||||||
|
@ -3915,7 +3910,7 @@ def apply_master_config(overrides=None, defaults=None):
|
||||||
if log_setting is None:
|
if log_setting is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if urlparse(log_setting).scheme == "":
|
if urllib.parse.urlparse(log_setting).scheme == "":
|
||||||
prepend_root_dirs.append(config_key)
|
prepend_root_dirs.append(config_key)
|
||||||
|
|
||||||
prepend_root_dir(opts, prepend_root_dirs)
|
prepend_root_dir(opts, prepend_root_dirs)
|
||||||
|
@ -4116,7 +4111,7 @@ def apply_spm_config(overrides, defaults):
|
||||||
if log_setting is None:
|
if log_setting is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if urlparse(log_setting).scheme == "":
|
if urllib.parse.urlparse(log_setting).scheme == "":
|
||||||
prepend_root_dirs.append(config_key)
|
prepend_root_dirs.append(config_key)
|
||||||
|
|
||||||
prepend_root_dir(opts, prepend_root_dirs)
|
prepend_root_dir(opts, prepend_root_dirs)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
@ -9,10 +8,7 @@
|
||||||
Common salt configuration schemas
|
Common salt configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.utils.schema import ArrayItem, OneOfItem, Schema, StringItem
|
from salt.utils.schema import ArrayItem, OneOfItem, Schema, StringItem
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,17 +26,15 @@ class DefaultIncludeConfig(StringItem):
|
||||||
description = __doc__
|
description = __doc__
|
||||||
|
|
||||||
def __init__(self, default=None, pattern=None, **kwargs):
|
def __init__(self, default=None, pattern=None, **kwargs):
|
||||||
default = "{0}/*.conf".format(self.__confd_directory__)
|
default = "{}/*.conf".format(self.__confd_directory__)
|
||||||
pattern = r"(?:.*)/\*\.conf"
|
pattern = r"(?:.*)/\*\.conf"
|
||||||
super(DefaultIncludeConfig, self).__init__(
|
super().__init__(default=default, pattern=pattern, **kwargs)
|
||||||
default=default, pattern=pattern, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
def __validate_attributes__(self):
|
def __validate_attributes__(self):
|
||||||
self.__doc__ = DefaultIncludeConfig.__doc__.format(
|
self.__doc__ = DefaultIncludeConfig.__doc__.format(
|
||||||
self.__target__, self.__confd_directory__
|
self.__target__, self.__confd_directory__
|
||||||
)
|
)
|
||||||
super(DefaultIncludeConfig, self).__validate_attributes__()
|
super().__validate_attributes__()
|
||||||
|
|
||||||
def __get_description__(self):
|
def __get_description__(self):
|
||||||
return self.__doc__.format(self.__target__, self.__confd_directory__)
|
return self.__doc__.format(self.__target__, self.__confd_directory__)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||||
|
|
||||||
|
@ -9,10 +8,7 @@
|
||||||
ESX Cluster configuration schemas
|
ESX Cluster configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
from salt.utils.schema import (
|
from salt.utils.schema import (
|
||||||
AnyOfItem,
|
AnyOfItem,
|
||||||
ArrayItem,
|
ArrayItem,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||||
|
|
||||||
|
@ -9,10 +8,7 @@
|
||||||
ESX Datacenter configuration schemas
|
ESX Datacenter configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
from salt.utils.schema import ArrayItem, IntegerItem, Schema, StringItem
|
from salt.utils.schema import ArrayItem, IntegerItem, Schema, StringItem
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
:codeauthor: :email:`Alexandru Bleotu (alexandru.bleotu@morganstanley.com)`
|
||||||
|
|
||||||
|
@ -9,10 +8,7 @@
|
||||||
ESXi host configuration schemas
|
ESXi host configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
from salt.utils.schema import (
|
from salt.utils.schema import (
|
||||||
ArrayItem,
|
ArrayItem,
|
||||||
BooleanItem,
|
BooleanItem,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: :email:`Agnes Tevesz (agnes.tevesz@morganstanley.com)`
|
:codeauthor: :email:`Agnes Tevesz (agnes.tevesz@morganstanley.com)`
|
||||||
|
|
||||||
|
@ -8,8 +7,6 @@
|
||||||
ESX Virtual Machine configuration schemas
|
ESX Virtual Machine configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
from salt.utils.schema import (
|
from salt.utils.schema import (
|
||||||
AnyOfItem,
|
AnyOfItem,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
@ -8,12 +7,8 @@
|
||||||
Minion configuration schema
|
Minion configuration schema
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
from salt.config.schemas.common import IncludeConfig, MinionDefaultInclude
|
from salt.config.schemas.common import IncludeConfig, MinionDefaultInclude
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.utils.schema import IPv4Item, Schema
|
from salt.utils.schema import IPv4Item, Schema
|
||||||
|
|
||||||
# XXX: THIS IS WAY TOO MINIMAL, BUT EXISTS TO IMPLEMENT salt-ssh
|
# XXX: THIS IS WAY TOO MINIMAL, BUT EXISTS TO IMPLEMENT salt-ssh
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
@ -9,12 +8,8 @@
|
||||||
Salt SSH related configuration schemas
|
Salt SSH related configuration schemas
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
from salt.config.schemas.minion import MinionConfiguration
|
from salt.config.schemas.minion import MinionConfiguration
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
from salt.utils.schema import (
|
from salt.utils.schema import (
|
||||||
AnyOfItem,
|
AnyOfItem,
|
||||||
BooleanItem,
|
BooleanItem,
|
||||||
|
|
|
@ -7,8 +7,6 @@ import logging
|
||||||
import sys
|
import sys
|
||||||
from collections.abc import Iterable, Mapping, Sequence
|
from collections.abc import Iterable, Mapping, Sequence
|
||||||
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Default values, to be imported elsewhere in Salt code
|
Default values, to be imported elsewhere in Salt code
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Classification of Salt exit codes. These are intended to augment
|
Classification of Salt exit codes. These are intended to augment
|
||||||
universal exit codes (found in Python's `os` module with the `EX_`
|
universal exit codes (found in Python's `os` module with the `EX_`
|
||||||
|
|
|
@ -7,7 +7,6 @@ import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import salt.defaults.exitcodes
|
import salt.defaults.exitcodes
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -42,12 +41,9 @@ class SaltException(Exception):
|
||||||
|
|
||||||
if not isinstance(message, str):
|
if not isinstance(message, str):
|
||||||
message = str(message)
|
message = str(message)
|
||||||
# pylint: disable=incompatible-py3-code,undefined-variable
|
super().__init__(salt.utils.stringutils.to_str(message))
|
||||||
if six.PY3 or isinstance(message, unicode):
|
self.message = self.strerror = message
|
||||||
super().__init__(salt.utils.stringutils.to_str(message))
|
if isinstance(message, str):
|
||||||
self.message = self.strerror = message
|
|
||||||
# pylint: enable=incompatible-py3-code,undefined-variable
|
|
||||||
elif isinstance(message, str):
|
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
self.message = self.strerror = salt.utils.stringutils.to_unicode(message)
|
self.message = self.strerror = salt.utils.stringutils.to_unicode(message)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Executors Directory
|
Executors Directory
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Direct call executor module
|
Direct call executor module
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
|
|
||||||
def execute(opts, data, func, args, kwargs):
|
def execute(opts, data, func, args, kwargs):
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Splay function calls across targeted minions
|
Splay function calls across targeted minions
|
||||||
"""
|
"""
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Sudo executor module
|
Sudo executor module
|
||||||
"""
|
"""
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
import shlex
|
||||||
|
|
||||||
import salt.syspaths
|
import salt.syspaths
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
|
||||||
|
|
||||||
__virtualname__ = "sudo"
|
__virtualname__ = "sudo"
|
||||||
|
|
||||||
|
@ -67,9 +62,9 @@ def execute(opts, data, func, args, kwargs):
|
||||||
if data["fun"] in ("state.sls", "state.highstate", "state.apply"):
|
if data["fun"] in ("state.sls", "state.highstate", "state.apply"):
|
||||||
kwargs["concurrent"] = True
|
kwargs["concurrent"] = True
|
||||||
for arg in args:
|
for arg in args:
|
||||||
cmd.append(_cmd_quote(six.text_type(arg)))
|
cmd.append(shlex.quote(str(arg)))
|
||||||
for key in kwargs:
|
for key in kwargs:
|
||||||
cmd.append(_cmd_quote("{0}={1}".format(key, kwargs[key])))
|
cmd.append(shlex.quote("{}={}".format(key, kwargs[key])))
|
||||||
|
|
||||||
cmd_ret = __salt__["cmd.run_all"](cmd, use_vt=True, python_shell=False)
|
cmd_ret = __salt__["cmd.run_all"](cmd, use_vt=True, python_shell=False)
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@ import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
import salt.utils.versions
|
import salt.utils.versions
|
||||||
from salt.ext import six
|
|
||||||
from salt.utils.args import get_function_argspec as _argspec
|
from salt.utils.args import get_function_argspec as _argspec
|
||||||
from salt.utils.decorators import ensure_unicode_args
|
from salt.utils.decorators import ensure_unicode_args
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
The backend for serving files from the Azure blob storage service.
|
The backend for serving files from the Azure blob storage service.
|
||||||
|
|
||||||
|
@ -46,15 +45,12 @@ permissions.
|
||||||
Do not include the leading ? for sas_token if generated from the web
|
Do not include the leading ? for sas_token if generated from the web
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.fileserver
|
import salt.fileserver
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.gzip_util
|
import salt.utils.gzip_util
|
||||||
|
@ -62,9 +58,6 @@ import salt.utils.hashutils
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.utils.versions import LooseVersion
|
from salt.utils.versions import LooseVersion
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -165,7 +158,7 @@ def serve_file(load, fnd):
|
||||||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||||
fp_.seek(load["loc"])
|
fp_.seek(load["loc"])
|
||||||
data = fp_.read(__opts__["file_buffer_size"])
|
data = fp_.read(__opts__["file_buffer_size"])
|
||||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
if data and not salt.utils.files.is_binary(fpath):
|
||||||
data = data.decode(__salt_system_encoding__)
|
data = data.decode(__salt_system_encoding__)
|
||||||
if gzip and data:
|
if gzip and data:
|
||||||
data = salt.utils.gzip_util.compress(data, gzip)
|
data = salt.utils.gzip_util.compress(data, gzip)
|
||||||
|
@ -286,7 +279,7 @@ def file_hash(load, fnd):
|
||||||
hashdest = salt.utils.path.join(
|
hashdest = salt.utils.path.join(
|
||||||
hash_cachedir,
|
hash_cachedir,
|
||||||
load["saltenv"],
|
load["saltenv"],
|
||||||
"{0}.hash.{1}".format(relpath, __opts__["hash_type"]),
|
"{}.hash.{}".format(relpath, __opts__["hash_type"]),
|
||||||
)
|
)
|
||||||
if not os.path.isfile(hashdest):
|
if not os.path.isfile(hashdest):
|
||||||
if not os.path.exists(os.path.dirname(hashdest)):
|
if not os.path.exists(os.path.dirname(hashdest)):
|
||||||
|
@ -350,7 +343,7 @@ def _get_container_path(container):
|
||||||
and saltenv, separated by underscores
|
and saltenv, separated by underscores
|
||||||
"""
|
"""
|
||||||
root = os.path.join(__opts__["cachedir"], "azurefs")
|
root = os.path.join(__opts__["cachedir"], "azurefs")
|
||||||
container_dir = "{0}_{1}_{2}".format(
|
container_dir = "{}_{}_{}".format(
|
||||||
container.get("account_name", ""),
|
container.get("account_name", ""),
|
||||||
container.get("container_name", ""),
|
container.get("container_name", ""),
|
||||||
container.get("saltenv", "base"),
|
container.get("saltenv", "base"),
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Git Fileserver Backend
|
Git Fileserver Backend
|
||||||
|
|
||||||
|
@ -48,12 +47,9 @@ Walkthrough <tutorial-gitfs>`.
|
||||||
.. _GitPython: https://github.com/gitpython-developers/GitPython
|
.. _GitPython: https://github.com/gitpython-developers/GitPython
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.gitfs
|
import salt.utils.gitfs
|
||||||
from salt.exceptions import FileserverConfigError
|
from salt.exceptions import FileserverConfigError
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,6 @@ import salt.utils.stringutils
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
import salt.utils.versions
|
import salt.utils.versions
|
||||||
from salt.exceptions import FileserverConfigError
|
from salt.exceptions import FileserverConfigError
|
||||||
from salt.ext import six
|
|
||||||
from salt.utils.event import tagify
|
from salt.utils.event import tagify
|
||||||
|
|
||||||
VALID_BRANCH_METHODS = ("branches", "bookmarks", "mixed")
|
VALID_BRANCH_METHODS = ("branches", "bookmarks", "mixed")
|
||||||
|
@ -748,7 +747,7 @@ def serve_file(load, fnd):
|
||||||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||||
fp_.seek(load["loc"])
|
fp_.seek(load["loc"])
|
||||||
data = fp_.read(__opts__["file_buffer_size"])
|
data = fp_.read(__opts__["file_buffer_size"])
|
||||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
if data and not salt.utils.files.is_binary(fpath):
|
||||||
data = data.decode(__salt_system_encoding__)
|
data = data.decode(__salt_system_encoding__)
|
||||||
if gzip and data:
|
if gzip and data:
|
||||||
data = salt.utils.gzip_util.compress(data, gzip)
|
data = salt.utils.gzip_util.compress(data, gzip)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Fileserver backend which serves files pushed to the Master
|
Fileserver backend which serves files pushed to the Master
|
||||||
|
|
||||||
|
@ -27,14 +26,10 @@ Other minionfs settings include: :conf_master:`minionfs_whitelist`,
|
||||||
.. seealso:: :ref:`tutorial-minionfs`
|
.. seealso:: :ref:`tutorial-minionfs`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.fileserver
|
import salt.fileserver
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.gzip_util
|
import salt.utils.gzip_util
|
||||||
|
@ -44,9 +39,6 @@ import salt.utils.stringutils
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
import salt.utils.versions
|
import salt.utils.versions
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -142,7 +134,7 @@ def serve_file(load, fnd):
|
||||||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||||
fp_.seek(load["loc"])
|
fp_.seek(load["loc"])
|
||||||
data = fp_.read(__opts__["file_buffer_size"])
|
data = fp_.read(__opts__["file_buffer_size"])
|
||||||
if data and six.PY3 and not salt.utils.files.is_binary(fpath):
|
if data and not salt.utils.files.is_binary(fpath):
|
||||||
data = data.decode(__salt_system_encoding__)
|
data = data.decode(__salt_system_encoding__)
|
||||||
if gzip and data:
|
if gzip and data:
|
||||||
data = salt.utils.gzip_util.compress(data, gzip)
|
data = salt.utils.gzip_util.compress(data, gzip)
|
||||||
|
@ -192,7 +184,7 @@ def file_hash(load, fnd):
|
||||||
"minionfs",
|
"minionfs",
|
||||||
"hash",
|
"hash",
|
||||||
load["saltenv"],
|
load["saltenv"],
|
||||||
"{0}.hash.{1}".format(fnd["rel"], __opts__["hash_type"]),
|
"{}.hash.{}".format(fnd["rel"], __opts__["hash_type"]),
|
||||||
)
|
)
|
||||||
# if we have a cache, serve that if the mtime hasn't changed
|
# if we have a cache, serve that if the mtime hasn't changed
|
||||||
if os.path.exists(cache_path):
|
if os.path.exists(cache_path):
|
||||||
|
@ -228,7 +220,7 @@ def file_hash(load, fnd):
|
||||||
if not os.path.exists(cache_dir):
|
if not os.path.exists(cache_dir):
|
||||||
os.makedirs(cache_dir)
|
os.makedirs(cache_dir)
|
||||||
# save the cache object "hash:mtime"
|
# save the cache object "hash:mtime"
|
||||||
cache_object = "{0}:{1}".format(ret["hsum"], os.path.getmtime(path))
|
cache_object = "{}:{}".format(ret["hsum"], os.path.getmtime(path))
|
||||||
with salt.utils.files.flopen(cache_path, "w") as fp_:
|
with salt.utils.files.flopen(cache_path, "w") as fp_:
|
||||||
fp_.write(cache_object)
|
fp_.write(cache_object)
|
||||||
return ret
|
return ret
|
||||||
|
|
|
@ -83,6 +83,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import time
|
import time
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
import salt.fileserver as fs
|
import salt.fileserver as fs
|
||||||
import salt.modules
|
import salt.modules
|
||||||
|
@ -91,13 +92,6 @@ import salt.utils.gzip_util
|
||||||
import salt.utils.hashutils
|
import salt.utils.hashutils
|
||||||
import salt.utils.versions
|
import salt.utils.versions
|
||||||
|
|
||||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import filter
|
|
||||||
from salt.ext.six.moves.urllib.parse import quote as _quote
|
|
||||||
|
|
||||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
S3_CACHE_EXPIRE = 30 # cache for 30 seconds
|
S3_CACHE_EXPIRE = 30 # cache for 30 seconds
|
||||||
|
@ -240,7 +234,7 @@ def serve_file(load, fnd):
|
||||||
with salt.utils.files.fopen(cached_file_path, "rb") as fp_:
|
with salt.utils.files.fopen(cached_file_path, "rb") as fp_:
|
||||||
fp_.seek(load["loc"])
|
fp_.seek(load["loc"])
|
||||||
data = fp_.read(__opts__["file_buffer_size"])
|
data = fp_.read(__opts__["file_buffer_size"])
|
||||||
if data and six.PY3 and not salt.utils.files.is_binary(cached_file_path):
|
if data and not salt.utils.files.is_binary(cached_file_path):
|
||||||
data = data.decode(__salt_system_encoding__)
|
data = data.decode(__salt_system_encoding__)
|
||||||
if gzip and data:
|
if gzip and data:
|
||||||
data = salt.utils.gzip_util.compress(data, gzip)
|
data = salt.utils.gzip_util.compress(data, gzip)
|
||||||
|
@ -747,7 +741,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||||
service_url=service_url,
|
service_url=service_url,
|
||||||
verify_ssl=verify_ssl,
|
verify_ssl=verify_ssl,
|
||||||
location=location,
|
location=location,
|
||||||
path=_quote(path),
|
path=urllib.parse.quote(path),
|
||||||
local_file=cached_file_path,
|
local_file=cached_file_path,
|
||||||
full_headers=True,
|
full_headers=True,
|
||||||
path_style=path_style,
|
path_style=path_style,
|
||||||
|
@ -785,7 +779,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||||
service_url=service_url,
|
service_url=service_url,
|
||||||
verify_ssl=verify_ssl,
|
verify_ssl=verify_ssl,
|
||||||
location=location,
|
location=location,
|
||||||
path=_quote(path),
|
path=urllib.parse.quote(path),
|
||||||
local_file=cached_file_path,
|
local_file=cached_file_path,
|
||||||
path_style=path_style,
|
path_style=path_style,
|
||||||
https_enable=https_enable,
|
https_enable=https_enable,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Grains plugin directory
|
Grains plugin directory
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,14 +1,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate chronos proxy minion grains.
|
Generate chronos proxy minion grains.
|
||||||
|
|
||||||
.. versionadded:: 2015.8.2
|
.. versionadded:: 2015.8.2
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Import Python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.utils.http
|
import salt.utils.http
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains for cimc hosts.
|
Generate baseline proxy minion grains for cimc hosts.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import salt.proxy.cimc
|
import salt.proxy.cimc
|
||||||
|
|
||||||
# Import Salt Libs
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
__proxyenabled__ = ["cimc"]
|
__proxyenabled__ = ["cimc"]
|
||||||
|
|
|
@ -40,7 +40,6 @@ import salt.utils.path
|
||||||
import salt.utils.pkg.rpm
|
import salt.utils.pkg.rpm
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
from salt.ext.six.moves import range
|
|
||||||
from salt.utils.network import _get_interfaces
|
from salt.utils.network import _get_interfaces
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Detect disks
|
Detect disks
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import glob
|
import glob
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
@ -12,8 +9,6 @@ import re
|
||||||
# Solve the Chicken and egg problem where grains need to run before any
|
# Solve the Chicken and egg problem where grains need to run before any
|
||||||
# of the modules are loaded and are generally available for any usage.
|
# of the modules are loaded and are generally available for any usage.
|
||||||
import salt.modules.cmdmod
|
import salt.modules.cmdmod
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
@ -40,7 +35,7 @@ def disks():
|
||||||
log.trace("Disk grain does not support OS")
|
log.trace("Disk grain does not support OS")
|
||||||
|
|
||||||
|
|
||||||
class _geomconsts(object):
|
class _geomconsts:
|
||||||
GEOMNAME = "Geom name"
|
GEOMNAME = "Geom name"
|
||||||
MEDIASIZE = "Mediasize"
|
MEDIASIZE = "Mediasize"
|
||||||
SECTORSIZE = "Sectorsize"
|
SECTORSIZE = "Sectorsize"
|
||||||
|
@ -96,14 +91,14 @@ def _freebsd_geom():
|
||||||
geom = salt.utils.path.which("geom")
|
geom = salt.utils.path.which("geom")
|
||||||
ret = {"disks": {}, "ssds": []}
|
ret = {"disks": {}, "ssds": []}
|
||||||
|
|
||||||
devices = __salt__["cmd.run"]("{0} disk list".format(geom))
|
devices = __salt__["cmd.run"]("{} disk list".format(geom))
|
||||||
devices = devices.split("\n\n")
|
devices = devices.split("\n\n")
|
||||||
|
|
||||||
def parse_geom_attribs(device):
|
def parse_geom_attribs(device):
|
||||||
tmp = {}
|
tmp = {}
|
||||||
for line in device.split("\n"):
|
for line in device.split("\n"):
|
||||||
for attrib in _geom_attribs:
|
for attrib in _geom_attribs:
|
||||||
search = re.search(r"{0}:\s(.*)".format(attrib), line)
|
search = re.search(r"{}:\s(.*)".format(attrib), line)
|
||||||
if search:
|
if search:
|
||||||
value = _datavalue(
|
value = _datavalue(
|
||||||
_geomconsts._datatypes.get(attrib), search.group(1)
|
_geomconsts._datatypes.get(attrib), search.group(1)
|
||||||
|
@ -152,7 +147,7 @@ def _linux_disks():
|
||||||
"not report 0 or 1",
|
"not report 0 or 1",
|
||||||
device,
|
device,
|
||||||
)
|
)
|
||||||
except IOError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -167,7 +162,7 @@ def _windows_disks():
|
||||||
ret = {"disks": [], "ssds": []}
|
ret = {"disks": [], "ssds": []}
|
||||||
|
|
||||||
cmdret = __salt__["cmd.run_all"](
|
cmdret = __salt__["cmd.run_all"](
|
||||||
"{0} /namespace:{1} path {2} get {3} /format:table".format(
|
"{} /namespace:{} path {} get {} /format:table".format(
|
||||||
wmic, namespace, path, get
|
wmic, namespace, path, get
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -179,7 +174,7 @@ def _windows_disks():
|
||||||
info = line.split()
|
info = line.split()
|
||||||
if len(info) != 2 or not info[0].isdigit() or not info[1].isdigit():
|
if len(info) != 2 or not info[0].isdigit() or not info[1].isdigit():
|
||||||
continue
|
continue
|
||||||
device = r"\\.\PhysicalDrive{0}".format(info[0])
|
device = r"\\.\PhysicalDrive{}".format(info[0])
|
||||||
mediatype = info[1]
|
mediatype = info[1]
|
||||||
if mediatype == "3":
|
if mediatype == "3":
|
||||||
log.trace("Device %s reports itself as an HDD", device)
|
log.trace("Device %s reports itself as an HDD", device)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains for ESXi hosts.
|
Generate baseline proxy minion grains for ESXi hosts.
|
||||||
|
|
||||||
|
@ -6,8 +5,6 @@ Generate baseline proxy minion grains for ESXi hosts.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
|
@ -10,12 +10,10 @@ To enable these grains set ``fibre_channel_grains: True`` in the minion config.
|
||||||
|
|
||||||
fibre_channel_grains: True
|
fibre_channel_grains: True
|
||||||
"""
|
"""
|
||||||
# Import Python libs
|
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.modules.cmdmod
|
import salt.modules.cmdmod
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains for Dell FX2 chassis.
|
Generate baseline proxy minion grains for Dell FX2 chassis.
|
||||||
The challenge is that most of Salt isn't bootstrapped yet,
|
The challenge is that most of Salt isn't bootstrapped yet,
|
||||||
|
@ -6,7 +5,6 @@ so we need to repeat a bunch of things that would normally happen
|
||||||
in proxy/fx2.py--just enough to get data from the chassis to include
|
in proxy/fx2.py--just enough to get data from the chassis to include
|
||||||
in grains.
|
in grains.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,10 @@ To enable these grains set `iscsi_grains: True` in the minion config.
|
||||||
|
|
||||||
iscsi_grains: True
|
iscsi_grains: True
|
||||||
"""
|
"""
|
||||||
# Import Python libs
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.modules.cmdmod
|
import salt.modules.cmdmod
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
|
|
|
@ -2,10 +2,8 @@
|
||||||
Detect LVM Volumes
|
Detect LVM Volumes
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.modules.cmdmod
|
import salt.modules.cmdmod
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate marathon proxy minion grains.
|
Generate marathon proxy minion grains.
|
||||||
|
|
||||||
.. versionadded:: 2015.8.2
|
.. versionadded:: 2015.8.2
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import salt.utils.http
|
import salt.utils.http
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
@ -42,9 +40,7 @@ def os_data():
|
||||||
|
|
||||||
def marathon():
|
def marathon():
|
||||||
response = salt.utils.http.query(
|
response = salt.utils.http.query(
|
||||||
"{0}/v2/info".format(
|
"{}/v2/info".format(__opts__["proxy"].get("base_url", "http://locahost:8080",)),
|
||||||
__opts__["proxy"].get("base_url", "http://locahost:8080",)
|
|
||||||
),
|
|
||||||
decode_type="json",
|
decode_type="json",
|
||||||
decode=True,
|
decode=True,
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Detect MDADM RAIDs
|
Detect MDADM RAIDs
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -28,7 +24,7 @@ def mdadm():
|
||||||
continue
|
continue
|
||||||
if " : " in line:
|
if " : " in line:
|
||||||
devices.add(line.split(" : ")[0])
|
devices.add(line.split(" : ")[0])
|
||||||
except IOError:
|
except OSError:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
devices = sorted(devices)
|
devices = sorted(devices)
|
||||||
|
|
|
@ -14,11 +14,9 @@ metadata server set `metadata_server_grains: True` in the minion config.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.http as http
|
import salt.utils.http as http
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
|
|
|
@ -1,16 +1,11 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Set grains describing the minion process.
|
Set grains describing the minion process.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.user
|
import salt.utils.user
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,10 @@ To enable these grains set `nvme_grains: True` in the minion config.
|
||||||
|
|
||||||
nvme_grains: True
|
nvme_grains: True
|
||||||
"""
|
"""
|
||||||
# Import Python libs
|
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
|
@ -1,17 +1,12 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains for panos hosts.
|
Generate baseline proxy minion grains for panos hosts.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import Python Libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import salt.proxy.panos
|
import salt.proxy.panos
|
||||||
|
|
||||||
# Import Salt Libs
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
__proxyenabled__ = ["panos"]
|
__proxyenabled__ = ["panos"]
|
||||||
|
|
|
@ -3,10 +3,8 @@ Grain that indicates the system is pending a reboot
|
||||||
See functions in salt.utils.win_system to see what conditions would indicate
|
See functions in salt.utils.win_system to see what conditions would indicate
|
||||||
a reboot is pending
|
a reboot is pending
|
||||||
"""
|
"""
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
import salt.utils.win_system
|
import salt.utils.win_system
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
#
|
||||||
# Copyright 2015 SUSE LLC
|
# Copyright 2015 SUSE LLC
|
||||||
#
|
#
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains
|
Generate baseline proxy minion grains
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Generate baseline proxy minion grains
|
Generate baseline proxy minion grains
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
ZFS grain provider
|
ZFS grain provider
|
||||||
|
|
||||||
|
@ -10,16 +9,12 @@ ZFS grain provider
|
||||||
.. versionadded:: 2018.3.0
|
.. versionadded:: 2018.3.0
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Solve the Chicken and egg problem where grains need to run before any
|
# Solve the Chicken and egg problem where grains need to run before any
|
||||||
# of the modules are loaded and are generally available for any usage.
|
# of the modules are loaded and are generally available for any usage.
|
||||||
import salt.modules.cmdmod
|
import salt.modules.cmdmod
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.dictupdate
|
import salt.utils.dictupdate
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
@ -9,7 +8,6 @@
|
||||||
This is where Salt's logging gets set up. Currently, the required imports
|
This is where Salt's logging gets set up. Currently, the required imports
|
||||||
are made to assure backwards compatibility.
|
are made to assure backwards compatibility.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import severals classes/functions from salt.log.setup for backwards
|
# Import severals classes/functions from salt.log.setup for backwards
|
||||||
# compatibility
|
# compatibility
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
salt.log.handlers
|
salt.log.handlers
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
|
@ -8,12 +7,9 @@
|
||||||
Custom logging handlers to be used in salt.
|
Custom logging handlers to be used in salt.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt._logging.handlers import (
|
from salt._logging.handlers import (
|
||||||
FileHandler,
|
FileHandler,
|
||||||
QueueHandler,
|
QueueHandler,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Fluent Logging Handler
|
Fluent Logging Handler
|
||||||
======================
|
======================
|
||||||
|
@ -73,8 +72,6 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
@ -86,12 +83,7 @@ import types
|
||||||
|
|
||||||
import salt.utils.msgpack
|
import salt.utils.msgpack
|
||||||
import salt.utils.network
|
import salt.utils.network
|
||||||
|
|
||||||
# Import Third party libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.log.mixins import NewStyleClassMixIn
|
from salt.log.mixins import NewStyleClassMixIn
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.log.setup import LOG_LEVELS
|
from salt.log.setup import LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -192,9 +184,9 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
self.tags = tags
|
self.tags = tags
|
||||||
self.msg_path = msg_path if msg_path else payload_type
|
self.msg_path = msg_path if msg_path else payload_type
|
||||||
self.msg_type = msg_type if msg_type else payload_type
|
self.msg_type = msg_type if msg_type else payload_type
|
||||||
format_func = "format_{0}_v{1}".format(payload_type, version).replace(".", "_")
|
format_func = "format_{}_v{}".format(payload_type, version).replace(".", "_")
|
||||||
self.format = getattr(self, format_func)
|
self.format = getattr(self, format_func)
|
||||||
super(MessageFormatter, self).__init__(fmt=None, datefmt=None)
|
super().__init__(fmt=None, datefmt=None)
|
||||||
|
|
||||||
def formatTime(self, record, datefmt=None):
|
def formatTime(self, record, datefmt=None):
|
||||||
if self.payload_type == "gelf": # GELF uses epoch times
|
if self.payload_type == "gelf": # GELF uses epoch times
|
||||||
|
@ -220,7 +212,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict.update({"full_message": exc_info})
|
message_dict.update({"full_message": exc_info})
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -245,13 +237,13 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
|
|
||||||
# pylint: disable=incompatible-py3-code
|
# pylint: disable=incompatible-py3-code
|
||||||
if isinstance(
|
if isinstance(
|
||||||
value, (six.string_types, bool, dict, float, int, list, types.NoneType)
|
value, ((str,), bool, dict, float, int, list, types.NoneType)
|
||||||
):
|
):
|
||||||
val = value
|
val = value
|
||||||
# pylint: enable=incompatible-py3-code
|
# pylint: enable=incompatible-py3-code
|
||||||
else:
|
else:
|
||||||
val = repr(value)
|
val = repr(value)
|
||||||
message_dict.update({"{0}".format(key): val})
|
message_dict.update({"{}".format(key): val})
|
||||||
return message_dict
|
return message_dict
|
||||||
|
|
||||||
def format_gelf_v1_1(self, record):
|
def format_gelf_v1_1(self, record):
|
||||||
|
@ -273,7 +265,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict.update({"full_message": exc_info})
|
message_dict.update({"full_message": exc_info})
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -298,14 +290,14 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
|
|
||||||
# pylint: disable=incompatible-py3-code
|
# pylint: disable=incompatible-py3-code
|
||||||
if isinstance(
|
if isinstance(
|
||||||
value, (six.string_types, bool, dict, float, int, list, types.NoneType)
|
value, ((str,), bool, dict, float, int, list, types.NoneType)
|
||||||
):
|
):
|
||||||
val = value
|
val = value
|
||||||
# pylint: enable=incompatible-py3-code
|
# pylint: enable=incompatible-py3-code
|
||||||
else:
|
else:
|
||||||
val = repr(value)
|
val = repr(value)
|
||||||
# GELF spec require "non-standard" fields to be prefixed with '_' (underscore).
|
# GELF spec require "non-standard" fields to be prefixed with '_' (underscore).
|
||||||
message_dict.update({"_{0}".format(key): val})
|
message_dict.update({"_{}".format(key): val})
|
||||||
|
|
||||||
return message_dict
|
return message_dict
|
||||||
|
|
||||||
|
@ -327,7 +319,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
"processName": record.processName,
|
"processName": record.processName,
|
||||||
},
|
},
|
||||||
"@message": record.getMessage(),
|
"@message": record.getMessage(),
|
||||||
"@source": "{0}://{1}/{2}".format(self.msg_type, host, self.msg_path),
|
"@source": "{}://{}/{}".format(self.msg_type, host, self.msg_path),
|
||||||
"@source_host": host,
|
"@source_host": host,
|
||||||
"@source_path": self.msg_path,
|
"@source_path": self.msg_path,
|
||||||
"@tags": self.tags,
|
"@tags": self.tags,
|
||||||
|
@ -338,7 +330,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -371,7 +363,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["@fields"][key] = value
|
message_dict["@fields"][key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||||
message_dict["@fields"][key] = value
|
message_dict["@fields"][key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -403,7 +395,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["exc_info"] = self.formatException(record.exc_info)
|
message_dict["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -436,7 +428,7 @@ class MessageFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict[key] = value
|
message_dict[key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||||
message_dict[key] = value
|
message_dict[key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -470,7 +462,7 @@ class FluentHandler(logging.Handler):
|
||||||
self.release()
|
self.release()
|
||||||
|
|
||||||
|
|
||||||
class FluentSender(object):
|
class FluentSender:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
tag,
|
tag,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Log4Mongo Logging Handler
|
Log4Mongo Logging Handler
|
||||||
=========================
|
=========================
|
||||||
|
@ -34,18 +33,13 @@
|
||||||
This work was inspired by the Salt logging handlers for LogStash and
|
This work was inspired by the Salt logging handlers for LogStash and
|
||||||
Sentry and by the log4mongo Python implementation.
|
Sentry and by the log4mongo Python implementation.
|
||||||
"""
|
"""
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.log.mixins import NewStyleClassMixIn
|
from salt.log.mixins import NewStyleClassMixIn
|
||||||
from salt.log.setup import LOG_LEVELS
|
from salt.log.setup import LOG_LEVELS
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
try:
|
try:
|
||||||
from log4mongo.handlers import MongoHandler, MongoFormatter
|
from log4mongo.handlers import MongoHandler, MongoFormatter
|
||||||
|
|
||||||
|
@ -84,7 +78,7 @@ def setup_handlers():
|
||||||
}
|
}
|
||||||
|
|
||||||
config_opts = {}
|
config_opts = {}
|
||||||
for config_opt, arg_name in six.iteritems(config_fields):
|
for config_opt, arg_name in config_fields.items():
|
||||||
config_opts[arg_name] = __opts__[handler_id].get(config_opt)
|
config_opts[arg_name] = __opts__[handler_id].get(config_opt)
|
||||||
|
|
||||||
config_opts["level"] = LOG_LEVELS[
|
config_opts["level"] = LOG_LEVELS[
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Logstash Logging Handler
|
Logstash Logging Handler
|
||||||
========================
|
========================
|
||||||
|
@ -155,8 +154,6 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
@ -166,12 +163,7 @@ import os
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
import salt.utils.network
|
import salt.utils.network
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
|
|
||||||
# Import Third party libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.log.mixins import NewStyleClassMixIn
|
from salt.log.mixins import NewStyleClassMixIn
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.log.setup import LOG_LEVELS
|
from salt.log.setup import LOG_LEVELS
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -274,8 +266,8 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
self.msg_path = msg_path
|
self.msg_path = msg_path
|
||||||
self.msg_type = msg_type
|
self.msg_type = msg_type
|
||||||
self.version = version
|
self.version = version
|
||||||
self.format = getattr(self, "format_v{0}".format(version))
|
self.format = getattr(self, "format_v{}".format(version))
|
||||||
super(LogstashFormatter, self).__init__(fmt=None, datefmt=None)
|
super().__init__(fmt=None, datefmt=None)
|
||||||
|
|
||||||
def formatTime(self, record, datefmt=None):
|
def formatTime(self, record, datefmt=None):
|
||||||
return datetime.datetime.utcfromtimestamp(record.created).isoformat()[:-3] + "Z"
|
return datetime.datetime.utcfromtimestamp(record.created).isoformat()[:-3] + "Z"
|
||||||
|
@ -295,7 +287,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
"processName": record.processName,
|
"processName": record.processName,
|
||||||
},
|
},
|
||||||
"@message": record.getMessage(),
|
"@message": record.getMessage(),
|
||||||
"@source": "{0}://{1}/{2}".format(self.msg_type, host, self.msg_path),
|
"@source": "{}://{}/{}".format(self.msg_type, host, self.msg_path),
|
||||||
"@source_host": host,
|
"@source_host": host,
|
||||||
"@source_path": self.msg_path,
|
"@source_path": self.msg_path,
|
||||||
"@tags": ["salt"],
|
"@tags": ["salt"],
|
||||||
|
@ -306,7 +298,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
message_dict["@fields"]["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -339,7 +331,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["@fields"][key] = value
|
message_dict["@fields"][key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||||
message_dict["@fields"][key] = value
|
message_dict["@fields"][key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -368,7 +360,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict["exc_info"] = self.formatException(record.exc_info)
|
message_dict["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
# Add any extra attributes to the message field
|
# Add any extra attributes to the message field
|
||||||
for key, value in six.iteritems(record.__dict__):
|
for key, value in record.__dict__.items():
|
||||||
if key in (
|
if key in (
|
||||||
"args",
|
"args",
|
||||||
"asctime",
|
"asctime",
|
||||||
|
@ -401,7 +393,7 @@ class LogstashFormatter(logging.Formatter, NewStyleClassMixIn):
|
||||||
message_dict[key] = value
|
message_dict[key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(value, (six.string_types, bool, dict, float, int, list)):
|
if isinstance(value, ((str,), bool, dict, float, int, list)):
|
||||||
message_dict[key] = value
|
message_dict[key] = value
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -424,7 +416,7 @@ class ZMQLogstashHander(logging.Handler, NewStyleClassMixIn):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, address, level=logging.NOTSET, zmq_hwm=1000):
|
def __init__(self, address, level=logging.NOTSET, zmq_hwm=1000):
|
||||||
super(ZMQLogstashHander, self).__init__(level=level)
|
super().__init__(level=level)
|
||||||
self._context = self._publisher = None
|
self._context = self._publisher = None
|
||||||
self._address = address
|
self._address = address
|
||||||
self._zmq_hwm = zmq_hwm
|
self._zmq_hwm = zmq_hwm
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Sentry Logging Handler
|
Sentry Logging Handler
|
||||||
======================
|
======================
|
||||||
|
@ -85,17 +84,13 @@
|
||||||
.. _`Raven`: https://raven.readthedocs.io
|
.. _`Raven`: https://raven.readthedocs.io
|
||||||
.. _`Raven client documentation`: https://raven.readthedocs.io/en/latest/config/index.html#client-arguments
|
.. _`Raven client documentation`: https://raven.readthedocs.io/en/latest/config/index.html#client-arguments
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.loader
|
import salt.loader
|
||||||
from salt.log import LOG_LEVELS
|
from salt.log import LOG_LEVELS
|
||||||
|
|
||||||
# Import 3rd party libs
|
|
||||||
try:
|
try:
|
||||||
import raven
|
import raven
|
||||||
from raven.handlers.logging import SentryHandler
|
from raven.handlers.logging import SentryHandler
|
||||||
|
@ -138,9 +133,7 @@ def setup_handlers():
|
||||||
transport_registry = TransportRegistry(default_transports)
|
transport_registry = TransportRegistry(default_transports)
|
||||||
url = urlparse(dsn)
|
url = urlparse(dsn)
|
||||||
if not transport_registry.supported_scheme(url.scheme):
|
if not transport_registry.supported_scheme(url.scheme):
|
||||||
raise ValueError(
|
raise ValueError("Unsupported Sentry DSN scheme: {}".format(url.scheme))
|
||||||
"Unsupported Sentry DSN scheme: {0}".format(url.scheme)
|
|
||||||
)
|
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
log.info("Raven failed to parse the configuration provided DSN: %s", exc)
|
log.info("Raven failed to parse the configuration provided DSN: %s", exc)
|
||||||
|
|
||||||
|
@ -217,7 +210,7 @@ def setup_handlers():
|
||||||
if exclude_patterns:
|
if exclude_patterns:
|
||||||
filter_regexes = [re.compile(pattern) for pattern in exclude_patterns]
|
filter_regexes = [re.compile(pattern) for pattern in exclude_patterns]
|
||||||
|
|
||||||
class FilterExcludedMessages(object):
|
class FilterExcludedMessages:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def filter(record):
|
def filter(record):
|
||||||
m = record.getMessage()
|
m = record.getMessage()
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||||
|
|
||||||
|
@ -11,10 +10,7 @@
|
||||||
Some mix-in classes to be used in salt's logging
|
Some mix-in classes to be used in salt's logging
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
# pylint: disable=unused-import
|
# pylint: disable=unused-import
|
||||||
from salt._logging.mixins import (
|
from salt._logging.mixins import (
|
||||||
ExcInfoOnLogLevelFormatMixin as ExcInfoOnLogLevelFormatMixIn,
|
ExcInfoOnLogLevelFormatMixin as ExcInfoOnLogLevelFormatMixIn,
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Metaproxy Directory
|
Metaproxy Directory
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Execution Module Directory
|
Execution Module Directory
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Manage account locks on AIX systems
|
Manage account locks on AIX systems
|
||||||
|
|
||||||
|
@ -7,7 +6,6 @@ Manage account locks on AIX systems
|
||||||
:depends: none
|
:depends: none
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python librarie
|
# Import python librarie
|
||||||
import logging
|
import logging
|
||||||
|
@ -43,7 +41,7 @@ def login_failures(user):
|
||||||
salt <minion_id> shadow.login_failures ALL
|
salt <minion_id> shadow.login_failures ALL
|
||||||
"""
|
"""
|
||||||
|
|
||||||
cmd = "lsuser -a unsuccessful_login_count {0}".format(user)
|
cmd = "lsuser -a unsuccessful_login_count {}".format(user)
|
||||||
cmd += " | grep -E 'unsuccessful_login_count=([3-9]|[0-9][0-9]+)'"
|
cmd += " | grep -E 'unsuccessful_login_count=([3-9]|[0-9][0-9]+)'"
|
||||||
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
||||||
|
|
||||||
|
@ -67,7 +65,7 @@ def locked(user):
|
||||||
salt <minion_id> shadow.locked ALL
|
salt <minion_id> shadow.locked ALL
|
||||||
"""
|
"""
|
||||||
|
|
||||||
cmd = "lsuser -a account_locked {0}".format(user)
|
cmd = "lsuser -a account_locked {}".format(user)
|
||||||
cmd += ' | grep "account_locked=true"'
|
cmd += ' | grep "account_locked=true"'
|
||||||
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
out = __salt__["cmd.run_all"](cmd, output_loglevel="trace", python_shell=True)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Support for Apache
|
Support for Apache
|
||||||
|
|
||||||
|
@ -9,43 +8,18 @@ Support for Apache
|
||||||
Debian-based system is detected.
|
Debian-based system is detected.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
from __future__ import (
|
|
||||||
absolute_import,
|
|
||||||
generators,
|
|
||||||
print_function,
|
|
||||||
unicode_literals,
|
|
||||||
with_statement,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
from salt.exceptions import SaltException
|
from salt.exceptions import SaltException
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
# pylint: disable=import-error,no-name-in-module
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import cStringIO
|
|
||||||
from salt.ext.six.moves.urllib.error import URLError
|
|
||||||
from salt.ext.six.moves.urllib.request import (
|
|
||||||
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
|
||||||
)
|
|
||||||
from salt.ext.six.moves.urllib.request import (
|
|
||||||
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
|
||||||
)
|
|
||||||
from salt.ext.six.moves.urllib.request import build_opener as _build_opener
|
|
||||||
from salt.ext.six.moves.urllib.request import install_opener as _install_opener
|
|
||||||
from salt.ext.six.moves.urllib.request import urlopen as _urlopen
|
|
||||||
|
|
||||||
# pylint: enable=import-error,no-name-in-module
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -86,7 +60,7 @@ def version():
|
||||||
|
|
||||||
salt '*' apache.version
|
salt '*' apache.version
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -v".format(_detect_os())
|
cmd = "{} -v".format(_detect_os())
|
||||||
out = __salt__["cmd.run"](cmd).splitlines()
|
out = __salt__["cmd.run"](cmd).splitlines()
|
||||||
ret = out[0].split(": ")
|
ret = out[0].split(": ")
|
||||||
return ret[1]
|
return ret[1]
|
||||||
|
@ -102,7 +76,7 @@ def fullversion():
|
||||||
|
|
||||||
salt '*' apache.fullversion
|
salt '*' apache.fullversion
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -V".format(_detect_os())
|
cmd = "{} -V".format(_detect_os())
|
||||||
ret = {}
|
ret = {}
|
||||||
ret["compiled_with"] = []
|
ret["compiled_with"] = []
|
||||||
out = __salt__["cmd.run"](cmd).splitlines()
|
out = __salt__["cmd.run"](cmd).splitlines()
|
||||||
|
@ -131,7 +105,7 @@ def modules():
|
||||||
|
|
||||||
salt '*' apache.modules
|
salt '*' apache.modules
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -M".format(_detect_os())
|
cmd = "{} -M".format(_detect_os())
|
||||||
ret = {}
|
ret = {}
|
||||||
ret["static"] = []
|
ret["static"] = []
|
||||||
ret["shared"] = []
|
ret["shared"] = []
|
||||||
|
@ -157,7 +131,7 @@ def servermods():
|
||||||
|
|
||||||
salt '*' apache.servermods
|
salt '*' apache.servermods
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -l".format(_detect_os())
|
cmd = "{} -l".format(_detect_os())
|
||||||
ret = []
|
ret = []
|
||||||
out = __salt__["cmd.run"](cmd).splitlines()
|
out = __salt__["cmd.run"](cmd).splitlines()
|
||||||
for line in out:
|
for line in out:
|
||||||
|
@ -179,7 +153,7 @@ def directives():
|
||||||
|
|
||||||
salt '*' apache.directives
|
salt '*' apache.directives
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -L".format(_detect_os())
|
cmd = "{} -L".format(_detect_os())
|
||||||
ret = {}
|
ret = {}
|
||||||
out = __salt__["cmd.run"](cmd)
|
out = __salt__["cmd.run"](cmd)
|
||||||
out = out.replace("\n\t", "\t")
|
out = out.replace("\n\t", "\t")
|
||||||
|
@ -206,7 +180,7 @@ def vhosts():
|
||||||
|
|
||||||
salt -t 10 '*' apache.vhosts
|
salt -t 10 '*' apache.vhosts
|
||||||
"""
|
"""
|
||||||
cmd = "{0} -S".format(_detect_os())
|
cmd = "{} -S".format(_detect_os())
|
||||||
ret = {}
|
ret = {}
|
||||||
namevhost = ""
|
namevhost = ""
|
||||||
out = __salt__["cmd.run"](cmd)
|
out = __salt__["cmd.run"](cmd)
|
||||||
|
@ -247,9 +221,9 @@ def signal(signal=None):
|
||||||
return
|
return
|
||||||
# Make sure you use the right arguments
|
# Make sure you use the right arguments
|
||||||
if signal in valid_signals:
|
if signal in valid_signals:
|
||||||
arguments = " -k {0}".format(signal)
|
arguments = " -k {}".format(signal)
|
||||||
else:
|
else:
|
||||||
arguments = " {0}".format(signal)
|
arguments = " {}".format(signal)
|
||||||
cmd = _detect_os() + arguments
|
cmd = _detect_os() + arguments
|
||||||
out = __salt__["cmd.run_all"](cmd)
|
out = __salt__["cmd.run_all"](cmd)
|
||||||
|
|
||||||
|
@ -263,7 +237,7 @@ def signal(signal=None):
|
||||||
ret = out["stdout"].strip()
|
ret = out["stdout"].strip()
|
||||||
# No output for something like: apachectl graceful
|
# No output for something like: apachectl graceful
|
||||||
else:
|
else:
|
||||||
ret = 'Command: "{0}" completed successfully!'.format(cmd)
|
ret = 'Command: "{}" completed successfully!'.format(cmd)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
@ -352,28 +326,28 @@ def server_status(profile="default"):
|
||||||
|
|
||||||
# Get configuration from pillar
|
# Get configuration from pillar
|
||||||
url = __salt__["config.get"](
|
url = __salt__["config.get"](
|
||||||
"apache.server-status:{0}:url".format(profile), "http://localhost/server-status"
|
"apache.server-status:{}:url".format(profile), "http://localhost/server-status"
|
||||||
)
|
)
|
||||||
user = __salt__["config.get"]("apache.server-status:{0}:user".format(profile), "")
|
user = __salt__["config.get"]("apache.server-status:{}:user".format(profile), "")
|
||||||
passwd = __salt__["config.get"]("apache.server-status:{0}:pass".format(profile), "")
|
passwd = __salt__["config.get"]("apache.server-status:{}:pass".format(profile), "")
|
||||||
realm = __salt__["config.get"]("apache.server-status:{0}:realm".format(profile), "")
|
realm = __salt__["config.get"]("apache.server-status:{}:realm".format(profile), "")
|
||||||
timeout = __salt__["config.get"](
|
timeout = __salt__["config.get"](
|
||||||
"apache.server-status:{0}:timeout".format(profile), 5
|
"apache.server-status:{}:timeout".format(profile), 5
|
||||||
)
|
)
|
||||||
|
|
||||||
# create authentication handler if configuration exists
|
# create authentication handler if configuration exists
|
||||||
if user and passwd:
|
if user and passwd:
|
||||||
basic = _HTTPBasicAuthHandler()
|
basic = urllib.request.HTTPBasicAuthHandler()
|
||||||
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
digest = _HTTPDigestAuthHandler()
|
digest = urllib.request.HTTPDigestAuthHandler()
|
||||||
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
_install_opener(_build_opener(basic, digest))
|
urllib.request.install_opener(urllib.request.build_opener(basic, digest))
|
||||||
|
|
||||||
# get http data
|
# get http data
|
||||||
url += "?auto"
|
url += "?auto"
|
||||||
try:
|
try:
|
||||||
response = _urlopen(url, timeout=timeout).read().splitlines()
|
response = urllib.request.urlopen(url, timeout=timeout).read().splitlines()
|
||||||
except URLError:
|
except urllib.error.URLError:
|
||||||
return "error"
|
return "error"
|
||||||
|
|
||||||
# parse the data
|
# parse the data
|
||||||
|
@ -402,45 +376,45 @@ def _parse_config(conf, slot=None):
|
||||||
:param conf: defined config structure
|
:param conf: defined config structure
|
||||||
:param slot: name of section container if needed
|
:param slot: name of section container if needed
|
||||||
"""
|
"""
|
||||||
ret = cStringIO()
|
ret = io.StringIO()
|
||||||
if isinstance(conf, six.string_types):
|
if isinstance(conf, str):
|
||||||
if slot:
|
if slot:
|
||||||
print("{0} {1}".format(slot, conf), file=ret, end="")
|
print("{} {}".format(slot, conf), file=ret, end="")
|
||||||
else:
|
else:
|
||||||
print("{0}".format(conf), file=ret, end="")
|
print("{}".format(conf), file=ret, end="")
|
||||||
elif isinstance(conf, list):
|
elif isinstance(conf, list):
|
||||||
is_section = False
|
is_section = False
|
||||||
for item in conf:
|
for item in conf:
|
||||||
if "this" in item:
|
if "this" in item:
|
||||||
is_section = True
|
is_section = True
|
||||||
slot_this = six.text_type(item["this"])
|
slot_this = str(item["this"])
|
||||||
if is_section:
|
if is_section:
|
||||||
print("<{0} {1}>".format(slot, slot_this), file=ret)
|
print("<{} {}>".format(slot, slot_this), file=ret)
|
||||||
for item in conf:
|
for item in conf:
|
||||||
for key, val in item.items():
|
for key, val in item.items():
|
||||||
if key != "this":
|
if key != "this":
|
||||||
print(_parse_config(val, six.text_type(key)), file=ret)
|
print(_parse_config(val, str(key)), file=ret)
|
||||||
print("</{0}>".format(slot), file=ret)
|
print("</{}>".format(slot), file=ret)
|
||||||
else:
|
else:
|
||||||
for value in conf:
|
for value in conf:
|
||||||
print(_parse_config(value, six.text_type(slot)), file=ret)
|
print(_parse_config(value, str(slot)), file=ret)
|
||||||
elif isinstance(conf, dict):
|
elif isinstance(conf, dict):
|
||||||
try:
|
try:
|
||||||
print("<{0} {1}>".format(slot, conf["this"]), file=ret)
|
print("<{} {}>".format(slot, conf["this"]), file=ret)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SaltException(
|
raise SaltException(
|
||||||
'Apache section container "<{0}>" expects attribute. '
|
'Apache section container "<{}>" expects attribute. '
|
||||||
'Specify it using key "this".'.format(slot)
|
'Specify it using key "this".'.format(slot)
|
||||||
)
|
)
|
||||||
for key, value in six.iteritems(conf):
|
for key, value in conf.items():
|
||||||
if key != "this":
|
if key != "this":
|
||||||
if isinstance(value, six.string_types):
|
if isinstance(value, str):
|
||||||
print("{0} {1}".format(key, value), file=ret)
|
print("{} {}".format(key, value), file=ret)
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
print(_parse_config(value, key), file=ret)
|
print(_parse_config(value, key), file=ret)
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
print(_parse_config(value, key), file=ret)
|
print(_parse_config(value, key), file=ret)
|
||||||
print("</{0}>".format(slot), file=ret)
|
print("</{}>".format(slot), file=ret)
|
||||||
|
|
||||||
ret.seek(0)
|
ret.seek(0)
|
||||||
return ret.read()
|
return ret.read()
|
||||||
|
@ -469,7 +443,7 @@ def config(name, config, edit=True):
|
||||||
|
|
||||||
configs = []
|
configs = []
|
||||||
for entry in config:
|
for entry in config:
|
||||||
key = next(six.iterkeys(entry))
|
key = next(iter(entry.keys()))
|
||||||
configs.append(_parse_config(entry[key], key))
|
configs.append(_parse_config(entry[key], key))
|
||||||
|
|
||||||
# Python auto-correct line endings
|
# Python auto-correct line endings
|
||||||
|
|
|
@ -1,15 +1,10 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Module for apcupsd
|
Module for apcupsd
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import Python libs
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import salt.utils.decorators as decorators
|
import salt.utils.decorators as decorators
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -34,7 +29,7 @@ def __virtual__():
|
||||||
return __virtualname__
|
return __virtualname__
|
||||||
return (
|
return (
|
||||||
False,
|
False,
|
||||||
"{0} module can only be loaded on when apcupsd is installed".format(
|
"{} module can only be loaded on when apcupsd is installed".format(
|
||||||
__virtualname__
|
__virtualname__
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Aptly Debian repository manager.
|
Aptly Debian repository manager.
|
||||||
|
|
||||||
.. versionadded:: 2018.3.0
|
.. versionadded:: 2018.3.0
|
||||||
"""
|
"""
|
||||||
# Import python libs
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -16,9 +13,6 @@ import salt.utils.path
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
from salt.exceptions import SaltInvocationError
|
from salt.exceptions import SaltInvocationError
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
_DEFAULT_CONFIG_PATH = "/etc/aptly.conf"
|
_DEFAULT_CONFIG_PATH = "/etc/aptly.conf"
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -183,7 +177,7 @@ def get_repo(name, config_path=_DEFAULT_CONFIG_PATH, with_packages=False):
|
||||||
salt '*' aptly.get_repo name="test-repo"
|
salt '*' aptly.get_repo name="test-repo"
|
||||||
"""
|
"""
|
||||||
_validate_config(config_path)
|
_validate_config(config_path)
|
||||||
with_packages = six.text_type(bool(with_packages)).lower()
|
with_packages = str(bool(with_packages)).lower()
|
||||||
|
|
||||||
ret = dict()
|
ret = dict()
|
||||||
cmd = [
|
cmd = [
|
||||||
|
@ -390,7 +384,7 @@ def delete_repo(name, config_path=_DEFAULT_CONFIG_PATH, force=False):
|
||||||
salt '*' aptly.delete_repo name="test-repo"
|
salt '*' aptly.delete_repo name="test-repo"
|
||||||
"""
|
"""
|
||||||
_validate_config(config_path)
|
_validate_config(config_path)
|
||||||
force = six.text_type(bool(force)).lower()
|
force = str(bool(force)).lower()
|
||||||
|
|
||||||
current_repo = __salt__["aptly.get_repo"](name=name, config_path=config_path)
|
current_repo = __salt__["aptly.get_repo"](name=name, config_path=config_path)
|
||||||
|
|
||||||
|
@ -517,7 +511,7 @@ def cleanup_db(config_path=_DEFAULT_CONFIG_PATH, dry_run=False):
|
||||||
salt '*' aptly.cleanup_db
|
salt '*' aptly.cleanup_db
|
||||||
"""
|
"""
|
||||||
_validate_config(config_path)
|
_validate_config(config_path)
|
||||||
dry_run = six.text_type(bool(dry_run)).lower()
|
dry_run = str(bool(dry_run)).lower()
|
||||||
|
|
||||||
ret = {"deleted_keys": list(), "deleted_files": list()}
|
ret = {"deleted_keys": list(), "deleted_files": list()}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Arista pyeapi
|
Arista pyeapi
|
||||||
=============
|
=============
|
||||||
|
@ -89,19 +88,13 @@ outside a ``pyeapi`` Proxy, e.g.:
|
||||||
Minion. If you want to use the :mod:`pyeapi Proxy <salt.proxy.arista_pyeapi>`,
|
Minion. If you want to use the :mod:`pyeapi Proxy <salt.proxy.arista_pyeapi>`,
|
||||||
please follow the documentation notes for a proper setup.
|
please follow the documentation notes for a proper setup.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
# Import python stdlib
|
|
||||||
import difflib
|
import difflib
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from salt.exceptions import CommandExecutionError
|
from salt.exceptions import CommandExecutionError
|
||||||
|
|
||||||
# Import Salt libs
|
|
||||||
from salt.ext import six
|
|
||||||
from salt.utils.args import clean_kwargs
|
from salt.utils.args import clean_kwargs
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
try:
|
try:
|
||||||
import pyeapi
|
import pyeapi
|
||||||
|
|
||||||
|
@ -519,7 +512,7 @@ def config(
|
||||||
log.debug("Fetched from %s", config_file)
|
log.debug("Fetched from %s", config_file)
|
||||||
log.debug(file_str)
|
log.debug(file_str)
|
||||||
elif commands:
|
elif commands:
|
||||||
if isinstance(commands, (six.string_types, six.text_type)):
|
if isinstance(commands, ((str,), str)):
|
||||||
commands = [commands]
|
commands = [commands]
|
||||||
file_str = "\n".join(commands)
|
file_str = "\n".join(commands)
|
||||||
# unify all the commands in a single file, to render them in a go
|
# unify all the commands in a single file, to render them in a go
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Wrapper module for at(1)
|
Wrapper module for at(1)
|
||||||
|
|
||||||
|
@ -9,26 +8,19 @@ easily tag jobs.
|
||||||
|
|
||||||
.. versionchanged:: 2017.7.0
|
.. versionchanged:: 2017.7.0
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
|
|
||||||
# pylint: enable=import-error,redefined-builtin
|
# pylint: enable=import-error,redefined-builtin
|
||||||
from salt.exceptions import CommandNotFoundError
|
from salt.exceptions import CommandNotFoundError
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
# pylint: disable=import-error,redefined-builtin
|
# pylint: disable=import-error,redefined-builtin
|
||||||
from salt.ext.six.moves import map
|
|
||||||
|
|
||||||
# OS Families that should work (Ubuntu and Debian are the default)
|
# OS Families that should work (Ubuntu and Debian are the default)
|
||||||
# TODO: Refactor some of this module to remove the checks for binaries
|
# TODO: Refactor some of this module to remove the checks for binaries
|
||||||
|
@ -56,7 +48,7 @@ def _cmd(binary, *args):
|
||||||
"""
|
"""
|
||||||
binary = salt.utils.path.which(binary)
|
binary = salt.utils.path.which(binary)
|
||||||
if not binary:
|
if not binary:
|
||||||
raise CommandNotFoundError("{0}: command not found".format(binary))
|
raise CommandNotFoundError("{}: command not found".format(binary))
|
||||||
cmd = [binary] + list(args)
|
cmd = [binary] + list(args)
|
||||||
return __salt__["cmd.run_stdout"]([binary] + list(args), python_shell=False)
|
return __salt__["cmd.run_stdout"]([binary] + list(args), python_shell=False)
|
||||||
|
|
||||||
|
@ -156,7 +148,7 @@ def atq(tag=None):
|
||||||
job_tag = tmp.groups()[0]
|
job_tag = tmp.groups()[0]
|
||||||
|
|
||||||
if __grains__["os"] in BSD:
|
if __grains__["os"] in BSD:
|
||||||
job = six.text_type(job)
|
job = str(job)
|
||||||
else:
|
else:
|
||||||
job = int(job)
|
job = int(job)
|
||||||
|
|
||||||
|
@ -223,16 +215,7 @@ def atrm(*args):
|
||||||
ret = {"jobs": {"removed": opts, "tag": None}}
|
ret = {"jobs": {"removed": opts, "tag": None}}
|
||||||
else:
|
else:
|
||||||
opts = list(
|
opts = list(
|
||||||
list(
|
list(map(str, [i["job"] for i in atq()["jobs"] if str(i["job"]) in args],))
|
||||||
map(
|
|
||||||
str,
|
|
||||||
[
|
|
||||||
i["job"]
|
|
||||||
for i in atq()["jobs"]
|
|
||||||
if six.text_type(i["job"]) in args
|
|
||||||
],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
ret = {"jobs": {"removed": opts, "tag": None}}
|
ret = {"jobs": {"removed": opts, "tag": None}}
|
||||||
|
|
||||||
|
@ -271,7 +254,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
||||||
return "'at.at' is not available."
|
return "'at.at' is not available."
|
||||||
|
|
||||||
if "tag" in kwargs:
|
if "tag" in kwargs:
|
||||||
stdin = "### SALT: {0}\n{1}".format(kwargs["tag"], " ".join(args[1:]))
|
stdin = "### SALT: {}\n{}".format(kwargs["tag"], " ".join(args[1:]))
|
||||||
else:
|
else:
|
||||||
stdin = " ".join(args[1:])
|
stdin = " ".join(args[1:])
|
||||||
cmd = [binary, args[0]]
|
cmd = [binary, args[0]]
|
||||||
|
@ -296,7 +279,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
||||||
output = output.split()[1]
|
output = output.split()[1]
|
||||||
|
|
||||||
if __grains__["os"] in BSD:
|
if __grains__["os"] in BSD:
|
||||||
return atq(six.text_type(output))
|
return atq(str(output))
|
||||||
else:
|
else:
|
||||||
return atq(int(output))
|
return atq(int(output))
|
||||||
|
|
||||||
|
@ -315,12 +298,12 @@ def atc(jobid):
|
||||||
"""
|
"""
|
||||||
# Shim to produce output similar to what __virtual__() should do
|
# Shim to produce output similar to what __virtual__() should do
|
||||||
# but __salt__ isn't available in __virtual__()
|
# but __salt__ isn't available in __virtual__()
|
||||||
output = _cmd("at", "-c", six.text_type(jobid))
|
output = _cmd("at", "-c", str(jobid))
|
||||||
|
|
||||||
if output is None:
|
if output is None:
|
||||||
return "'at.atc' is not available."
|
return "'at.atc' is not available."
|
||||||
elif output == "":
|
elif output == "":
|
||||||
return {"error": "invalid job id '{0}'".format(jobid)}
|
return {"error": "invalid job id '{}'".format(jobid)}
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
|
@ -339,8 +322,8 @@ def _atq(**kwargs):
|
||||||
day = kwargs.get("day", None)
|
day = kwargs.get("day", None)
|
||||||
month = kwargs.get("month", None)
|
month = kwargs.get("month", None)
|
||||||
year = kwargs.get("year", None)
|
year = kwargs.get("year", None)
|
||||||
if year and len(six.text_type(year)) == 2:
|
if year and len(str(year)) == 2:
|
||||||
year = "20{0}".format(year)
|
year = "20{}".format(year)
|
||||||
|
|
||||||
jobinfo = atq()["jobs"]
|
jobinfo = atq()["jobs"]
|
||||||
if not jobinfo:
|
if not jobinfo:
|
||||||
|
@ -364,28 +347,28 @@ def _atq(**kwargs):
|
||||||
|
|
||||||
if not hour:
|
if not hour:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
elif "{:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not minute:
|
if not minute:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
elif "{:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not day:
|
if not day:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(day)) == job["date"].split("-")[2]:
|
elif "{:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not month:
|
if not month:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(month)) == job["date"].split("-")[1]:
|
elif "{:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Wrapper for at(1) on Solaris-like systems
|
Wrapper for at(1) on Solaris-like systems
|
||||||
|
|
||||||
|
@ -12,25 +11,16 @@ Wrapper for at(1) on Solaris-like systems
|
||||||
|
|
||||||
.. versionadded:: 2017.7.0
|
.. versionadded:: 2017.7.0
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.path
|
import salt.utils.path
|
||||||
import salt.utils.platform
|
import salt.utils.platform
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
from salt.ext import six
|
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
# pylint: disable=import-error,redefined-builtin
|
|
||||||
from salt.ext.six.moves import map
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
__virtualname__ = "at"
|
__virtualname__ = "at"
|
||||||
|
@ -102,7 +92,7 @@ def atq(tag=None):
|
||||||
specs.append(tmp[5])
|
specs.append(tmp[5])
|
||||||
|
|
||||||
# make sure job is str
|
# make sure job is str
|
||||||
job = six.text_type(job)
|
job = str(job)
|
||||||
|
|
||||||
# search for any tags
|
# search for any tags
|
||||||
atjob_file = "/var/spool/cron/atjobs/{job}".format(job=job)
|
atjob_file = "/var/spool/cron/atjobs/{job}".format(job=job)
|
||||||
|
@ -208,7 +198,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
||||||
|
|
||||||
# build job
|
# build job
|
||||||
if "tag" in kwargs:
|
if "tag" in kwargs:
|
||||||
stdin = "### SALT: {0}\n{1}".format(kwargs["tag"], " ".join(args[1:]))
|
stdin = "### SALT: {}\n{}".format(kwargs["tag"], " ".join(args[1:]))
|
||||||
else:
|
else:
|
||||||
stdin = " ".join(args[1:])
|
stdin = " ".join(args[1:])
|
||||||
|
|
||||||
|
@ -226,7 +216,7 @@ def at(*args, **kwargs): # pylint: disable=C0103
|
||||||
return {"jobs": [], "error": res["stderr"]}
|
return {"jobs": [], "error": res["stderr"]}
|
||||||
else:
|
else:
|
||||||
jobid = res["stderr"].splitlines()[1]
|
jobid = res["stderr"].splitlines()[1]
|
||||||
jobid = six.text_type(jobid.split()[1])
|
jobid = str(jobid.split()[1])
|
||||||
return atq(jobid)
|
return atq(jobid)
|
||||||
|
|
||||||
|
|
||||||
|
@ -250,7 +240,7 @@ def atc(jobid):
|
||||||
[salt.utils.stringutils.to_unicode(x) for x in rfh.readlines()]
|
[salt.utils.stringutils.to_unicode(x) for x in rfh.readlines()]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return {"error": "invalid job id '{0}'".format(jobid)}
|
return {"error": "invalid job id '{}'".format(jobid)}
|
||||||
|
|
||||||
|
|
||||||
def _atq(**kwargs):
|
def _atq(**kwargs):
|
||||||
|
@ -267,8 +257,8 @@ def _atq(**kwargs):
|
||||||
day = kwargs.get("day", None)
|
day = kwargs.get("day", None)
|
||||||
month = kwargs.get("month", None)
|
month = kwargs.get("month", None)
|
||||||
year = kwargs.get("year", None)
|
year = kwargs.get("year", None)
|
||||||
if year and len(six.text_type(year)) == 2:
|
if year and len(str(year)) == 2:
|
||||||
year = "20{0}".format(year)
|
year = "20{}".format(year)
|
||||||
|
|
||||||
jobinfo = atq()["jobs"]
|
jobinfo = atq()["jobs"]
|
||||||
if not jobinfo:
|
if not jobinfo:
|
||||||
|
@ -292,28 +282,28 @@ def _atq(**kwargs):
|
||||||
|
|
||||||
if not hour:
|
if not hour:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
elif "{:02d}".format(int(hour)) == job["time"].split(":")[0]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not minute:
|
if not minute:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
elif "{:02d}".format(int(minute)) == job["time"].split(":")[1]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not day:
|
if not day:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(day)) == job["date"].split("-")[2]:
|
elif "{:02d}".format(int(day)) == job["date"].split("-")[2]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not month:
|
if not month:
|
||||||
pass
|
pass
|
||||||
elif "{0:02d}".format(int(month)) == job["date"].split("-")[1]:
|
elif "{:02d}".format(int(month)) == job["date"].split("-")[1]:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Manages configuration files via augeas
|
Manages configuration files via augeas
|
||||||
|
|
||||||
|
@ -23,21 +22,15 @@ This module requires the ``augeas`` Python module.
|
||||||
For affected Debian/Ubuntu hosts, installing ``libpython2.7`` has been
|
For affected Debian/Ubuntu hosts, installing ``libpython2.7`` has been
|
||||||
known to resolve the issue.
|
known to resolve the issue.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.args
|
import salt.utils.args
|
||||||
import salt.utils.data
|
import salt.utils.data
|
||||||
import salt.utils.stringutils
|
import salt.utils.stringutils
|
||||||
from salt.exceptions import SaltInvocationError
|
from salt.exceptions import SaltInvocationError
|
||||||
from salt.ext import six
|
|
||||||
from salt.ext.six.moves import zip
|
|
||||||
|
|
||||||
# Make sure augeas python interface is installed
|
# Make sure augeas python interface is installed
|
||||||
HAS_AUGEAS = False
|
HAS_AUGEAS = False
|
||||||
|
@ -90,8 +83,7 @@ def _recurmatch(path, aug):
|
||||||
|
|
||||||
for i in aug.match(clean_path + "/*"):
|
for i in aug.match(clean_path + "/*"):
|
||||||
i = i.replace("!", "\\!") # escape some dirs
|
i = i.replace("!", "\\!") # escape some dirs
|
||||||
for _match in _recurmatch(i, aug):
|
yield from _recurmatch(i, aug)
|
||||||
yield _match
|
|
||||||
|
|
||||||
|
|
||||||
def _lstrip_word(word, prefix):
|
def _lstrip_word(word, prefix):
|
||||||
|
@ -100,8 +92,8 @@ def _lstrip_word(word, prefix):
|
||||||
from the beginning of the string
|
from the beginning of the string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if six.text_type(word).startswith(prefix):
|
if str(word).startswith(prefix):
|
||||||
return six.text_type(word)[len(prefix) :]
|
return str(word)[len(prefix) :]
|
||||||
return word
|
return word
|
||||||
|
|
||||||
|
|
||||||
|
@ -110,7 +102,7 @@ def _check_load_paths(load_path):
|
||||||
Checks the validity of the load_path, returns a sanitized version
|
Checks the validity of the load_path, returns a sanitized version
|
||||||
with invalid paths removed.
|
with invalid paths removed.
|
||||||
"""
|
"""
|
||||||
if load_path is None or not isinstance(load_path, six.string_types):
|
if load_path is None or not isinstance(load_path, str):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
_paths = []
|
_paths = []
|
||||||
|
@ -198,7 +190,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
||||||
cmd, arg = command.split(" ", 1)
|
cmd, arg = command.split(" ", 1)
|
||||||
|
|
||||||
if cmd not in METHOD_MAP:
|
if cmd not in METHOD_MAP:
|
||||||
ret["error"] = "Command {0} is not supported (yet)".format(cmd)
|
ret["error"] = "Command {} is not supported (yet)".format(cmd)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
method = METHOD_MAP[cmd]
|
method = METHOD_MAP[cmd]
|
||||||
|
@ -207,7 +199,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
||||||
parts = salt.utils.args.shlex_split(arg)
|
parts = salt.utils.args.shlex_split(arg)
|
||||||
|
|
||||||
if len(parts) not in nargs:
|
if len(parts) not in nargs:
|
||||||
err = "{0} takes {1} args: {2}".format(method, nargs, parts)
|
err = "{} takes {} args: {}".format(method, nargs, parts)
|
||||||
raise ValueError(err)
|
raise ValueError(err)
|
||||||
if method == "set":
|
if method == "set":
|
||||||
path = make_path(parts[0])
|
path = make_path(parts[0])
|
||||||
|
@ -226,7 +218,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
||||||
label, where, path = parts
|
label, where, path = parts
|
||||||
if where not in ("before", "after"):
|
if where not in ("before", "after"):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Expected "before" or "after", not {0}'.format(where)
|
'Expected "before" or "after", not {}'.format(where)
|
||||||
)
|
)
|
||||||
path = make_path(path)
|
path = make_path(path)
|
||||||
args = {"path": path, "label": label, "before": where == "before"}
|
args = {"path": path, "label": label, "before": where == "before"}
|
||||||
|
@ -240,7 +232,7 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
||||||
arg = command
|
arg = command
|
||||||
ret["error"] = (
|
ret["error"] = (
|
||||||
"Invalid formatted command, "
|
"Invalid formatted command, "
|
||||||
"see debug log for details: {0}".format(arg)
|
"see debug log for details: {}".format(arg)
|
||||||
)
|
)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -253,13 +245,13 @@ def execute(context=None, lens=None, commands=(), load_path=None):
|
||||||
try:
|
try:
|
||||||
aug.save()
|
aug.save()
|
||||||
ret["retval"] = True
|
ret["retval"] = True
|
||||||
except IOError as err:
|
except OSError as err:
|
||||||
ret["error"] = six.text_type(err)
|
ret["error"] = str(err)
|
||||||
|
|
||||||
if lens and not lens.endswith(".lns"):
|
if lens and not lens.endswith(".lns"):
|
||||||
ret["error"] += (
|
ret["error"] += (
|
||||||
'\nLenses are normally configured as "name.lns". '
|
'\nLenses are normally configured as "name.lns". '
|
||||||
'Did you mean "{0}.lns"?'.format(lens)
|
'Did you mean "{}.lns"?'.format(lens)
|
||||||
)
|
)
|
||||||
|
|
||||||
aug.close()
|
aug.close()
|
||||||
|
@ -296,12 +288,12 @@ def get(path, value="", load_path=None):
|
||||||
|
|
||||||
path = path.rstrip("/")
|
path = path.rstrip("/")
|
||||||
if value:
|
if value:
|
||||||
path += "/{0}".format(value.strip("/"))
|
path += "/{}".format(value.strip("/"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_match = aug.match(path)
|
_match = aug.match(path)
|
||||||
except RuntimeError as err:
|
except RuntimeError as err:
|
||||||
return {"error": six.text_type(err)}
|
return {"error": str(err)}
|
||||||
|
|
||||||
if _match:
|
if _match:
|
||||||
ret[path] = aug.get(path)
|
ret[path] = aug.get(path)
|
||||||
|
@ -349,7 +341,7 @@ def setvalue(*args):
|
||||||
%wheel ALL = PASSWD : ALL , NOPASSWD : /usr/bin/apt-get , /usr/bin/aptitude
|
%wheel ALL = PASSWD : ALL , NOPASSWD : /usr/bin/apt-get , /usr/bin/aptitude
|
||||||
"""
|
"""
|
||||||
load_path = None
|
load_path = None
|
||||||
load_paths = [x for x in args if six.text_type(x).startswith("load_path=")]
|
load_paths = [x for x in args if str(x).startswith("load_path=")]
|
||||||
if load_paths:
|
if load_paths:
|
||||||
if len(load_paths) > 1:
|
if len(load_paths) > 1:
|
||||||
raise SaltInvocationError("Only one 'load_path=' value is permitted")
|
raise SaltInvocationError("Only one 'load_path=' value is permitted")
|
||||||
|
@ -363,10 +355,9 @@ def setvalue(*args):
|
||||||
tuples = [
|
tuples = [
|
||||||
x
|
x
|
||||||
for x in args
|
for x in args
|
||||||
if not six.text_type(x).startswith("prefix=")
|
if not str(x).startswith("prefix=") and not str(x).startswith("load_path=")
|
||||||
and not six.text_type(x).startswith("load_path=")
|
|
||||||
]
|
]
|
||||||
prefix = [x for x in args if six.text_type(x).startswith("prefix=")]
|
prefix = [x for x in args if str(x).startswith("prefix=")]
|
||||||
if prefix:
|
if prefix:
|
||||||
if len(prefix) > 1:
|
if len(prefix) > 1:
|
||||||
raise SaltInvocationError("Only one 'prefix=' value is permitted")
|
raise SaltInvocationError("Only one 'prefix=' value is permitted")
|
||||||
|
@ -382,15 +373,15 @@ def setvalue(*args):
|
||||||
if prefix:
|
if prefix:
|
||||||
target_path = os.path.join(prefix.rstrip("/"), path.lstrip("/"))
|
target_path = os.path.join(prefix.rstrip("/"), path.lstrip("/"))
|
||||||
try:
|
try:
|
||||||
aug.set(target_path, six.text_type(value))
|
aug.set(target_path, str(value))
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
ret["error"] = "Multiple values: {0}".format(err)
|
ret["error"] = "Multiple values: {}".format(err)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
aug.save()
|
aug.save()
|
||||||
ret["retval"] = True
|
ret["retval"] = True
|
||||||
except IOError as err:
|
except OSError as err:
|
||||||
ret["error"] = six.text_type(err)
|
ret["error"] = str(err)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
@ -467,8 +458,8 @@ def remove(path, load_path=None):
|
||||||
ret["error"] = "Invalid node"
|
ret["error"] = "Invalid node"
|
||||||
else:
|
else:
|
||||||
ret["retval"] = True
|
ret["retval"] = True
|
||||||
except (RuntimeError, IOError) as err:
|
except (RuntimeError, OSError) as err:
|
||||||
ret["error"] = six.text_type(err)
|
ret["error"] = str(err)
|
||||||
|
|
||||||
ret["count"] = count
|
ret["count"] = count
|
||||||
|
|
||||||
|
@ -518,7 +509,7 @@ def ls(path, load_path=None): # pylint: disable=C0103
|
||||||
matches = _match(match_path)
|
matches = _match(match_path)
|
||||||
ret = {}
|
ret = {}
|
||||||
|
|
||||||
for key, value in six.iteritems(matches):
|
for key, value in matches.items():
|
||||||
name = _lstrip_word(key, path)
|
name = _lstrip_word(key, path)
|
||||||
if _match(key + "/*"):
|
if _match(key + "/*"):
|
||||||
ret[name + "/"] = value # has sub nodes, e.g. directory
|
ret[name + "/"] = value # has sub nodes, e.g. directory
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Azure (ARM) Compute Execution Module
|
Azure (ARM) Compute Execution Module
|
||||||
|
|
||||||
|
@ -47,7 +46,6 @@ Azure (ARM) Compute Execution Module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Python libs
|
# Python libs
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -127,9 +125,7 @@ def availability_set_create_or_update(
|
||||||
"compute", "AvailabilitySet", **kwargs
|
"compute", "AvailabilitySet", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -145,7 +141,7 @@ def availability_set_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Azure (ARM) DNS Execution Module
|
Azure (ARM) DNS Execution Module
|
||||||
|
|
||||||
|
@ -53,7 +52,6 @@ Optional provider parameters:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Python libs
|
# Python libs
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -117,9 +115,7 @@ def record_set_create_or_update(name, zone_name, resource_group, record_type, **
|
||||||
"dns", "RecordSet", **kwargs
|
"dns", "RecordSet", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -138,7 +134,7 @@ def record_set_create_or_update(name, zone_name, resource_group, record_type, **
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -362,9 +358,7 @@ def zone_create_or_update(name, resource_group, **kwargs):
|
||||||
try:
|
try:
|
||||||
zone_model = __utils__["azurearm.create_object_model"]("dns", "Zone", **kwargs)
|
zone_model = __utils__["azurearm.create_object_model"]("dns", "Zone", **kwargs)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -381,7 +375,7 @@ def zone_create_or_update(name, resource_group, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Azure (ARM) Network Execution Module
|
Azure (ARM) Network Execution Module
|
||||||
|
|
||||||
|
@ -47,13 +46,11 @@ Azure (ARM) Network Execution Module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Python libs
|
# Python libs
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Salt libs
|
# Salt libs
|
||||||
from salt.exceptions import SaltInvocationError # pylint: disable=unused-import
|
from salt.exceptions import SaltInvocationError # pylint: disable=unused-import
|
||||||
from salt.ext.six.moves import range
|
|
||||||
|
|
||||||
# Azure libs
|
# Azure libs
|
||||||
HAS_LIBS = False
|
HAS_LIBS = False
|
||||||
|
@ -187,9 +184,7 @@ def default_security_rule_get(name, security_group, resource_group, **kwargs):
|
||||||
if default_rule["name"] == name:
|
if default_rule["name"] == name:
|
||||||
result = default_rule
|
result = default_rule
|
||||||
if not result:
|
if not result:
|
||||||
result = {
|
result = {"error": "Unable to find {} in {}!".format(name, security_group)}
|
||||||
"error": "Unable to find {0} in {1}!".format(name, security_group)
|
|
||||||
}
|
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
log.error("Unable to find %s in %s!", name, security_group)
|
log.error("Unable to find %s in %s!", name, security_group)
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
|
@ -368,7 +363,7 @@ def security_rule_create_or_update(
|
||||||
# pylint: disable=eval-used
|
# pylint: disable=eval-used
|
||||||
if eval(params[0]):
|
if eval(params[0]):
|
||||||
# pylint: disable=exec-used
|
# pylint: disable=exec-used
|
||||||
exec("{0} = None".format(params[1]))
|
exec("{} = None".format(params[1]))
|
||||||
|
|
||||||
netconn = __utils__["azurearm.get_client"]("network", **kwargs)
|
netconn = __utils__["azurearm.get_client"]("network", **kwargs)
|
||||||
|
|
||||||
|
@ -392,9 +387,7 @@ def security_rule_create_or_update(
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -412,7 +405,7 @@ def security_rule_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -528,9 +521,7 @@ def network_security_group_create_or_update(
|
||||||
"network", "NetworkSecurityGroup", **kwargs
|
"network", "NetworkSecurityGroup", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -547,7 +538,7 @@ def network_security_group_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -803,9 +794,7 @@ def subnet_create_or_update(
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -823,7 +812,7 @@ def subnet_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -979,9 +968,7 @@ def virtual_network_create_or_update(name, address_prefixes, resource_group, **k
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -998,7 +985,7 @@ def virtual_network_create_or_update(name, address_prefixes, resource_group, **k
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -1322,9 +1309,7 @@ def load_balancer_create_or_update(name, resource_group, **kwargs):
|
||||||
"network", "LoadBalancer", **kwargs
|
"network", "LoadBalancer", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1341,7 +1326,7 @@ def load_balancer_create_or_update(name, resource_group, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -1572,9 +1557,7 @@ def network_interface_create_or_update(
|
||||||
"network", "NetworkInterface", ip_configurations=ip_configurations, **kwargs
|
"network", "NetworkInterface", ip_configurations=ip_configurations, **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1591,7 +1574,7 @@ def network_interface_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -1961,9 +1944,7 @@ def public_ip_address_create_or_update(name, resource_group, **kwargs):
|
||||||
"network", "PublicIPAddress", **kwargs
|
"network", "PublicIPAddress", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1980,7 +1961,7 @@ def public_ip_address_create_or_update(name, resource_group, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -2172,9 +2153,7 @@ def route_filter_rule_create_or_update(
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -2195,7 +2174,7 @@ def route_filter_rule_create_or_update(
|
||||||
result = {"error": message}
|
result = {"error": message}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -2338,9 +2317,7 @@ def route_filter_create_or_update(name, resource_group, **kwargs):
|
||||||
"network", "RouteFilter", **kwargs
|
"network", "RouteFilter", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -2357,7 +2334,7 @@ def route_filter_create_or_update(name, resource_group, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -2547,9 +2524,7 @@ def route_create_or_update(
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -2567,7 +2542,7 @@ def route_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -2710,9 +2685,7 @@ def route_table_create_or_update(name, resource_group, **kwargs):
|
||||||
"network", "RouteTable", **kwargs
|
"network", "RouteTable", **kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -2729,7 +2702,7 @@ def route_table_create_or_update(name, resource_group, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Azure (ARM) Resource Execution Module
|
Azure (ARM) Resource Execution Module
|
||||||
|
|
||||||
|
@ -47,7 +46,6 @@ Azure (ARM) Resource Execution Module
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Python libs
|
# Python libs
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -452,9 +450,7 @@ def deployment_create_or_update(
|
||||||
"resource", "DeploymentProperties", **deploy_kwargs
|
"resource", "DeploymentProperties", **deploy_kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -477,7 +473,7 @@ def deployment_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -630,9 +626,7 @@ def deployment_validate(
|
||||||
"resource", "DeploymentProperties", **deploy_kwargs
|
"resource", "DeploymentProperties", **deploy_kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -651,7 +645,7 @@ def deployment_validate(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -918,7 +912,7 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
||||||
definition = definition_list[definition_name]
|
definition = definition_list[definition_name]
|
||||||
else:
|
else:
|
||||||
definition = {
|
definition = {
|
||||||
"error": 'The policy definition named "{0}" could not be found.'.format(
|
"error": 'The policy definition named "{}" could not be found.'.format(
|
||||||
definition_name
|
definition_name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -938,7 +932,7 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
"error": "The object model could not be built. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -952,11 +946,11 @@ def policy_assignment_create(name, scope, definition_name, **kwargs):
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
result = {
|
result = {
|
||||||
"error": 'The policy definition named "{0}" could not be found.'.format(
|
"error": 'The policy definition named "{}" could not be found.'.format(
|
||||||
definition_name
|
definition_name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1098,9 +1092,7 @@ def policy_definition_create_or_update(
|
||||||
"resource.policy", "PolicyDefinition", **policy_kwargs
|
"resource.policy", "PolicyDefinition", **policy_kwargs
|
||||||
)
|
)
|
||||||
except TypeError as exc:
|
except TypeError as exc:
|
||||||
result = {
|
result = {"error": "The object model could not be built. ({})".format(str(exc))}
|
||||||
"error": "The object model could not be built. ({0})".format(str(exc))
|
|
||||||
}
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1113,7 +1105,7 @@ def policy_definition_create_or_update(
|
||||||
result = {"error": str(exc)}
|
result = {"error": str(exc)}
|
||||||
except SerializationError as exc:
|
except SerializationError as exc:
|
||||||
result = {
|
result = {
|
||||||
"error": "The object model could not be parsed. ({0})".format(str(exc))
|
"error": "The object model could not be parsed. ({})".format(str(exc))
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
"""
|
"""
|
||||||
Support for Bluetooth (using BlueZ in Linux).
|
Support for Bluetooth (using BlueZ in Linux).
|
||||||
|
|
||||||
|
@ -9,19 +8,11 @@ The following packages are required packages for this module:
|
||||||
bluez-utils >= 5.7
|
bluez-utils >= 5.7
|
||||||
pybluez >= 0.18
|
pybluez >= 0.18
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
import shlex
|
||||||
|
|
||||||
# Import salt libs
|
|
||||||
import salt.utils.validate.net
|
import salt.utils.validate.net
|
||||||
from salt.exceptions import CommandExecutionError
|
from salt.exceptions import CommandExecutionError
|
||||||
|
|
||||||
# Import 3rd-party libs
|
|
||||||
# pylint: disable=import-error
|
|
||||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
|
||||||
|
|
||||||
# pylint: enable=import-error
|
|
||||||
|
|
||||||
|
|
||||||
HAS_PYBLUEZ = False
|
HAS_PYBLUEZ = False
|
||||||
try:
|
try:
|
||||||
import bluetooth # pylint: disable=import-error
|
import bluetooth # pylint: disable=import-error
|
||||||
|
@ -89,7 +80,7 @@ def address_():
|
||||||
dev = comps[0]
|
dev = comps[0]
|
||||||
ret[dev] = {
|
ret[dev] = {
|
||||||
"device": dev,
|
"device": dev,
|
||||||
"path": "/sys/class/bluetooth/{0}".format(dev),
|
"path": "/sys/class/bluetooth/{}".format(dev),
|
||||||
}
|
}
|
||||||
if "BD Address" in line:
|
if "BD Address" in line:
|
||||||
comps = line.split()
|
comps = line.split()
|
||||||
|
@ -121,7 +112,7 @@ def power(dev, mode):
|
||||||
else:
|
else:
|
||||||
state = "down"
|
state = "down"
|
||||||
mode = "off"
|
mode = "off"
|
||||||
cmd = "hciconfig {0} {1}".format(dev, state)
|
cmd = "hciconfig {} {}".format(dev, state)
|
||||||
__salt__["cmd.run"](cmd).splitlines()
|
__salt__["cmd.run"](cmd).splitlines()
|
||||||
info = address_()
|
info = address_()
|
||||||
if info[dev]["power"] == mode:
|
if info[dev]["power"] == mode:
|
||||||
|
@ -142,9 +133,9 @@ def discoverable(dev):
|
||||||
if dev not in address_():
|
if dev not in address_():
|
||||||
raise CommandExecutionError("Invalid dev passed to bluetooth.discoverable")
|
raise CommandExecutionError("Invalid dev passed to bluetooth.discoverable")
|
||||||
|
|
||||||
cmd = "hciconfig {0} iscan".format(dev)
|
cmd = "hciconfig {} iscan".format(dev)
|
||||||
__salt__["cmd.run"](cmd).splitlines()
|
__salt__["cmd.run"](cmd).splitlines()
|
||||||
cmd = "hciconfig {0}".format(dev)
|
cmd = "hciconfig {}".format(dev)
|
||||||
out = __salt__["cmd.run"](cmd)
|
out = __salt__["cmd.run"](cmd)
|
||||||
if "UP RUNNING ISCAN" in out:
|
if "UP RUNNING ISCAN" in out:
|
||||||
return True
|
return True
|
||||||
|
@ -164,9 +155,9 @@ def noscan(dev):
|
||||||
if dev not in address_():
|
if dev not in address_():
|
||||||
raise CommandExecutionError("Invalid dev passed to bluetooth.noscan")
|
raise CommandExecutionError("Invalid dev passed to bluetooth.noscan")
|
||||||
|
|
||||||
cmd = "hciconfig {0} noscan".format(dev)
|
cmd = "hciconfig {} noscan".format(dev)
|
||||||
__salt__["cmd.run"](cmd).splitlines()
|
__salt__["cmd.run"](cmd).splitlines()
|
||||||
cmd = "hciconfig {0}".format(dev)
|
cmd = "hciconfig {}".format(dev)
|
||||||
out = __salt__["cmd.run"](cmd)
|
out = __salt__["cmd.run"](cmd)
|
||||||
if "SCAN" in out:
|
if "SCAN" in out:
|
||||||
return False
|
return False
|
||||||
|
@ -203,7 +194,7 @@ def block(bdaddr):
|
||||||
if not salt.utils.validate.net.mac(bdaddr):
|
if not salt.utils.validate.net.mac(bdaddr):
|
||||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.block")
|
raise CommandExecutionError("Invalid BD address passed to bluetooth.block")
|
||||||
|
|
||||||
cmd = "hciconfig {0} block".format(bdaddr)
|
cmd = "hciconfig {} block".format(bdaddr)
|
||||||
__salt__["cmd.run"](cmd).splitlines()
|
__salt__["cmd.run"](cmd).splitlines()
|
||||||
|
|
||||||
|
|
||||||
|
@ -220,7 +211,7 @@ def unblock(bdaddr):
|
||||||
if not salt.utils.validate.net.mac(bdaddr):
|
if not salt.utils.validate.net.mac(bdaddr):
|
||||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.unblock")
|
raise CommandExecutionError("Invalid BD address passed to bluetooth.unblock")
|
||||||
|
|
||||||
cmd = "hciconfig {0} unblock".format(bdaddr)
|
cmd = "hciconfig {} unblock".format(bdaddr)
|
||||||
__salt__["cmd.run"](cmd).splitlines()
|
__salt__["cmd.run"](cmd).splitlines()
|
||||||
|
|
||||||
|
|
||||||
|
@ -251,8 +242,8 @@ def pair(address, key):
|
||||||
)
|
)
|
||||||
|
|
||||||
addy = address_()
|
addy = address_()
|
||||||
cmd = "echo {0} | bluez-simple-agent {1} {2}".format(
|
cmd = "echo {} | bluez-simple-agent {} {}".format(
|
||||||
_cmd_quote(addy["device"]), _cmd_quote(address), _cmd_quote(key)
|
shlex.quote(addy["device"]), shlex.quote(address), shlex.quote(key)
|
||||||
)
|
)
|
||||||
out = __salt__["cmd.run"](cmd, python_shell=True).splitlines()
|
out = __salt__["cmd.run"](cmd, python_shell=True).splitlines()
|
||||||
return out
|
return out
|
||||||
|
@ -276,7 +267,7 @@ def unpair(address):
|
||||||
if not salt.utils.validate.net.mac(address):
|
if not salt.utils.validate.net.mac(address):
|
||||||
raise CommandExecutionError("Invalid BD address passed to bluetooth.unpair")
|
raise CommandExecutionError("Invalid BD address passed to bluetooth.unpair")
|
||||||
|
|
||||||
cmd = "bluez-test-device remove {0}".format(address)
|
cmd = "bluez-test-device remove {}".format(address)
|
||||||
out = __salt__["cmd.run"](cmd).splitlines()
|
out = __salt__["cmd.run"](cmd).splitlines()
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue