mirror of
https://github.com/saltstack/salt.git
synced 2025-04-15 09:10:20 +00:00
Blacken salt
This commit is contained in:
parent
c1baa329c6
commit
0b2a5613b3
2648 changed files with 425716 additions and 364406 deletions
|
@ -5,3 +5,4 @@ force_grid_wrap=0
|
|||
line_length=88
|
||||
ensure_newline_before_comments=True
|
||||
use_parentheses=True
|
||||
skip=salt/ext/,tests/kitchen/tests/
|
||||
|
|
|
@ -8,7 +8,7 @@ extension-pkg-whitelist=
|
|||
# Add files or directories to the blacklist. They should be base names, not
|
||||
# paths.
|
||||
ignore=CVS,
|
||||
ext
|
||||
ext,
|
||||
|
||||
# Add files or directories matching the regex patterns to the blacklist. The
|
||||
# regex matches against base names, not paths.
|
||||
|
@ -398,6 +398,7 @@ init-import=no
|
|||
# List of qualified module names which can have objects that can redefine
|
||||
# builtins.
|
||||
redefining-builtins-modules=six.moves,
|
||||
salt.ext.six.moves,
|
||||
past.builtins,
|
||||
future.builtins,
|
||||
builtins,
|
||||
|
@ -474,7 +475,9 @@ ignored-classes=SQLObject
|
|||
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||
# and thus existing member attributes cannot be deduced by static analysis). It
|
||||
# supports qualified module names, as well as Unix pattern matching.
|
||||
ignored-modules=
|
||||
ignored-modules=salt.ext.six.moves,
|
||||
six.moves,
|
||||
_MovedItems,
|
||||
|
||||
# Show a hint with possible names when a member name was not found. The aspect
|
||||
# of finding the hint is based on edit distance.
|
||||
|
@ -513,7 +516,7 @@ min-similarity-lines=4
|
|||
fileperms-default=0644
|
||||
|
||||
# File paths to ignore file permission. Glob patterns allowed.
|
||||
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
|
||||
fileperms-ignore-paths=setup.py,noxfile.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py,tests/unit/files/rosters/ansible/roster.py
|
||||
|
||||
|
||||
[MODERNIZE]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
|
@ -8,54 +8,49 @@
|
|||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Setup the Transifex client configuration file
|
||||
'''
|
||||
"""
|
||||
|
||||
import getpass
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import sys
|
||||
import getpass
|
||||
|
||||
import ConfigParser
|
||||
|
||||
HOST = 'https://www.transifex.com'
|
||||
HOST = "https://www.transifex.com"
|
||||
RCFILE = os.path.abspath(
|
||||
os.environ.get(
|
||||
'TRANSIFEX_RC',
|
||||
os.path.expanduser('~/.transifexrc')
|
||||
)
|
||||
os.environ.get("TRANSIFEX_RC", os.path.expanduser("~/.transifexrc"))
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
"""
|
||||
Run the setup code
|
||||
'''
|
||||
"""
|
||||
print(
|
||||
'This script will setup a Transifex client configuration file, or, '
|
||||
'if it already exists, make some minimal checks to see if it\'s '
|
||||
'properly configured\n'
|
||||
"This script will setup a Transifex client configuration file, or, "
|
||||
"if it already exists, make some minimal checks to see if it's "
|
||||
"properly configured\n"
|
||||
)
|
||||
if not os.path.exists(RCFILE):
|
||||
while True:
|
||||
username = os.environ.get('TRANSIFEX_USER', None)
|
||||
username = os.environ.get("TRANSIFEX_USER", None)
|
||||
if username is not None:
|
||||
break
|
||||
try:
|
||||
username = raw_input(
|
||||
'What is your username on Transifex.com? '
|
||||
)
|
||||
username = raw_input("What is your username on Transifex.com? ")
|
||||
if username:
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
print
|
||||
sys.exit(1)
|
||||
while True:
|
||||
password = os.environ.get('TRANSIFEX_PASS', None)
|
||||
password = os.environ.get("TRANSIFEX_PASS", None)
|
||||
if password is not None:
|
||||
break
|
||||
try:
|
||||
password = getpass.getpass(
|
||||
'What is your password on Transifex.com? '
|
||||
)
|
||||
password = getpass.getpass("What is your password on Transifex.com? ")
|
||||
if password:
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
|
@ -64,16 +59,16 @@ def main():
|
|||
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config.add_section(HOST)
|
||||
config.set(HOST, 'token', '')
|
||||
config.set(HOST, 'hostname', HOST)
|
||||
config.set(HOST, 'username', username)
|
||||
config.set(HOST, 'password', password)
|
||||
config.set(HOST, "token", "")
|
||||
config.set(HOST, "hostname", HOST)
|
||||
config.set(HOST, "username", username)
|
||||
config.set(HOST, "password", password)
|
||||
|
||||
config.write(open(RCFILE, 'w'))
|
||||
print('username and password stored in \'{0}\''.format(RCFILE))
|
||||
config.write(open(RCFILE, "w"))
|
||||
print("username and password stored in '{0}'".format(RCFILE))
|
||||
|
||||
os.chmod(RCFILE, 0600)
|
||||
print('Secured the permissions on \'{0}\' to 0600'.format(RCFILE))
|
||||
print("Secured the permissions on '{0}' to 0600".format(RCFILE))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
@ -82,24 +77,30 @@ def main():
|
|||
config.read([RCFILE])
|
||||
|
||||
if not config.has_section(HOST):
|
||||
print('\'~/.transifexrc\' is not properly configured, it\'s missing '
|
||||
'the {0} section'.format(HOST))
|
||||
print(
|
||||
"'~/.transifexrc' is not properly configured, it's missing "
|
||||
"the {0} section".format(HOST)
|
||||
)
|
||||
|
||||
for setting in ('username', 'password', 'hostname', 'token'):
|
||||
for setting in ("username", "password", "hostname", "token"):
|
||||
if not config.has_option(HOST, setting):
|
||||
print('\'~/.transifexrc\' is not properly configured, it\'s '
|
||||
'missing the {0} option'.format(setting))
|
||||
print(
|
||||
"'~/.transifexrc' is not properly configured, it's "
|
||||
"missing the {0} option".format(setting)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if setting == 'token':
|
||||
if setting == "token":
|
||||
# Token should be left empty
|
||||
continue
|
||||
|
||||
if not config.get(HOST, setting):
|
||||
print('\'~/.transifexrc\' is not properly configured, it\'s '
|
||||
'missing a value for the {0} option'.format(setting))
|
||||
print(
|
||||
"'~/.transifexrc' is not properly configured, it's "
|
||||
"missing a value for the {0} option".format(setting)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -13,18 +13,16 @@ import re
|
|||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.roles import set_classes
|
||||
|
||||
from pygments.lexer import RegexLexer, bygroups
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
from pygments.token import Literal, Text, Operator, Keyword, Name, Number
|
||||
from pygments.token import Keyword, Literal, Name, Number, Operator, Text
|
||||
from pygments.util import ClassNotFound
|
||||
|
||||
from sphinx import addnodes
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.domains import Domain, ObjType, Index
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.util.nodes import make_refnode
|
||||
from sphinx.domains import Domain, Index, ObjType
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util.docfields import GroupedField, TypedField
|
||||
from sphinx.util.nodes import make_refnode
|
||||
|
||||
|
||||
class DocRef(object):
|
||||
|
@ -44,252 +42,275 @@ class DocRef(object):
|
|||
location of the RFC which defines some HTTP method.
|
||||
|
||||
"""
|
||||
return '{0}#{1}{2}'.format(self.base_url, self.anchor, self.section)
|
||||
return "{0}#{1}{2}".format(self.base_url, self.anchor, self.section)
|
||||
|
||||
|
||||
#: The URL of the HTTP/1.1 RFC which defines the HTTP methods OPTIONS, GET,
|
||||
#: HEAD, POST, PUT, DELETE, TRACE, and CONNECT.
|
||||
RFC2616 = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html'
|
||||
RFC2616 = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html"
|
||||
|
||||
#: The name to use for section anchors in RFC2616.
|
||||
RFC2616ANCHOR = 'sec'
|
||||
RFC2616ANCHOR = "sec"
|
||||
|
||||
#: The URL of the RFC which defines the HTTP PATCH method.
|
||||
RFC5789 = 'http://tools.ietf.org/html/rfc5789'
|
||||
RFC5789 = "http://tools.ietf.org/html/rfc5789"
|
||||
|
||||
#: The name to use for section anchors in RFC5789.
|
||||
RFC5789ANCHOR = 'section-'
|
||||
RFC5789ANCHOR = "section-"
|
||||
|
||||
#: Mapping from lowercase HTTP method name to :class:`DocRef` object which
|
||||
#: maintains the URL which points to the section of the RFC which defines that
|
||||
#: HTTP method.
|
||||
DOCREFS = {
|
||||
'patch': DocRef(RFC5789, RFC5789ANCHOR, 2),
|
||||
'options': DocRef(RFC2616, RFC2616ANCHOR, 9.2),
|
||||
'get': DocRef(RFC2616, RFC2616ANCHOR, 9.3),
|
||||
'head': DocRef(RFC2616, RFC2616ANCHOR, 9.4),
|
||||
'post': DocRef(RFC2616, RFC2616ANCHOR, 9.5),
|
||||
'put': DocRef(RFC2616, RFC2616ANCHOR, 9.6),
|
||||
'delete': DocRef(RFC2616, RFC2616ANCHOR, 9.7),
|
||||
'trace': DocRef(RFC2616, RFC2616ANCHOR, 9.8),
|
||||
'connect': DocRef(RFC2616, RFC2616ANCHOR, 9.9)
|
||||
"patch": DocRef(RFC5789, RFC5789ANCHOR, 2),
|
||||
"options": DocRef(RFC2616, RFC2616ANCHOR, 9.2),
|
||||
"get": DocRef(RFC2616, RFC2616ANCHOR, 9.3),
|
||||
"head": DocRef(RFC2616, RFC2616ANCHOR, 9.4),
|
||||
"post": DocRef(RFC2616, RFC2616ANCHOR, 9.5),
|
||||
"put": DocRef(RFC2616, RFC2616ANCHOR, 9.6),
|
||||
"delete": DocRef(RFC2616, RFC2616ANCHOR, 9.7),
|
||||
"trace": DocRef(RFC2616, RFC2616ANCHOR, 9.8),
|
||||
"connect": DocRef(RFC2616, RFC2616ANCHOR, 9.9),
|
||||
}
|
||||
|
||||
HTTP_STATUS_CODES = {
|
||||
100: 'Continue',
|
||||
101: 'Switching Protocols',
|
||||
102: 'Processing',
|
||||
200: 'OK',
|
||||
201: 'Created',
|
||||
202: 'Accepted',
|
||||
203: 'Non Authoritative Information',
|
||||
204: 'No Content',
|
||||
205: 'Reset Content',
|
||||
206: 'Partial Content',
|
||||
207: 'Multi Status',
|
||||
226: 'IM Used', # see RFC 3229
|
||||
300: 'Multiple Choices',
|
||||
301: 'Moved Permanently',
|
||||
302: 'Found',
|
||||
303: 'See Other',
|
||||
304: 'Not Modified',
|
||||
305: 'Use Proxy',
|
||||
307: 'Temporary Redirect',
|
||||
400: 'Bad Request',
|
||||
401: 'Unauthorized',
|
||||
402: 'Payment Required', # unused
|
||||
403: 'Forbidden',
|
||||
404: 'Not Found',
|
||||
405: 'Method Not Allowed',
|
||||
406: 'Not Acceptable',
|
||||
407: 'Proxy Authentication Required',
|
||||
408: 'Request Timeout',
|
||||
409: 'Conflict',
|
||||
410: 'Gone',
|
||||
411: 'Length Required',
|
||||
412: 'Precondition Failed',
|
||||
413: 'Request Entity Too Large',
|
||||
414: 'Request URI Too Long',
|
||||
415: 'Unsupported Media Type',
|
||||
416: 'Requested Range Not Satisfiable',
|
||||
417: 'Expectation Failed',
|
||||
418: "I'm a teapot", # see RFC 2324
|
||||
422: 'Unprocessable Entity',
|
||||
423: 'Locked',
|
||||
424: 'Failed Dependency',
|
||||
426: 'Upgrade Required',
|
||||
449: 'Retry With', # proprietary MS extension
|
||||
500: 'Internal Server Error',
|
||||
501: 'Not Implemented',
|
||||
502: 'Bad Gateway',
|
||||
503: 'Service Unavailable',
|
||||
504: 'Gateway Timeout',
|
||||
505: 'HTTP Version Not Supported',
|
||||
507: 'Insufficient Storage',
|
||||
510: 'Not Extended'
|
||||
100: "Continue",
|
||||
101: "Switching Protocols",
|
||||
102: "Processing",
|
||||
200: "OK",
|
||||
201: "Created",
|
||||
202: "Accepted",
|
||||
203: "Non Authoritative Information",
|
||||
204: "No Content",
|
||||
205: "Reset Content",
|
||||
206: "Partial Content",
|
||||
207: "Multi Status",
|
||||
226: "IM Used", # see RFC 3229
|
||||
300: "Multiple Choices",
|
||||
301: "Moved Permanently",
|
||||
302: "Found",
|
||||
303: "See Other",
|
||||
304: "Not Modified",
|
||||
305: "Use Proxy",
|
||||
307: "Temporary Redirect",
|
||||
400: "Bad Request",
|
||||
401: "Unauthorized",
|
||||
402: "Payment Required", # unused
|
||||
403: "Forbidden",
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
406: "Not Acceptable",
|
||||
407: "Proxy Authentication Required",
|
||||
408: "Request Timeout",
|
||||
409: "Conflict",
|
||||
410: "Gone",
|
||||
411: "Length Required",
|
||||
412: "Precondition Failed",
|
||||
413: "Request Entity Too Large",
|
||||
414: "Request URI Too Long",
|
||||
415: "Unsupported Media Type",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
417: "Expectation Failed",
|
||||
418: "I'm a teapot", # see RFC 2324
|
||||
422: "Unprocessable Entity",
|
||||
423: "Locked",
|
||||
424: "Failed Dependency",
|
||||
426: "Upgrade Required",
|
||||
449: "Retry With", # proprietary MS extension
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
502: "Bad Gateway",
|
||||
503: "Service Unavailable",
|
||||
504: "Gateway Timeout",
|
||||
505: "HTTP Version Not Supported",
|
||||
507: "Insufficient Storage",
|
||||
510: "Not Extended",
|
||||
}
|
||||
|
||||
http_sig_param_re = re.compile(r'\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)',
|
||||
re.VERBOSE)
|
||||
http_sig_param_re = re.compile(
|
||||
r"\((?:(?P<type>[^:)]+):)?(?P<name>[\w_]+)\)", re.VERBOSE
|
||||
)
|
||||
|
||||
|
||||
def http_resource_anchor(method, path):
|
||||
path = re.sub(r'[<>:/]', '-', path)
|
||||
return method.lower() + '-' + path
|
||||
path = re.sub(r"[<>:/]", "-", path)
|
||||
return method.lower() + "-" + path
|
||||
|
||||
|
||||
class HTTPResource(ObjectDescription):
|
||||
|
||||
doc_field_types = [
|
||||
TypedField('parameter', label='Parameters',
|
||||
names=('param', 'parameter', 'arg', 'argument'),
|
||||
typerolename='obj', typenames=('paramtype', 'type')),
|
||||
TypedField('jsonparameter', label='JSON Parameters',
|
||||
names=('jsonparameter', 'jsonparam', 'json'),
|
||||
typerolename='obj', typenames=('jsonparamtype', 'jsontype')),
|
||||
TypedField('queryparameter', label='Query Parameters',
|
||||
names=('queryparameter', 'queryparam', 'qparam', 'query'),
|
||||
typerolename='obj', typenames=('queryparamtype', 'querytype', 'qtype')),
|
||||
GroupedField('formparameter', label='Form Parameters',
|
||||
names=('formparameter', 'formparam', 'fparam', 'form')),
|
||||
GroupedField('requestheader', label='Request Headers',
|
||||
rolename='mailheader',
|
||||
names=('reqheader', 'requestheader')),
|
||||
GroupedField('responseheader', label='Response Headers',
|
||||
rolename='mailheader',
|
||||
names=('resheader', 'responseheader')),
|
||||
GroupedField('statuscode', label='Status Codes',
|
||||
rolename='statuscode',
|
||||
names=('statuscode', 'status', 'code'))
|
||||
TypedField(
|
||||
"parameter",
|
||||
label="Parameters",
|
||||
names=("param", "parameter", "arg", "argument"),
|
||||
typerolename="obj",
|
||||
typenames=("paramtype", "type"),
|
||||
),
|
||||
TypedField(
|
||||
"jsonparameter",
|
||||
label="JSON Parameters",
|
||||
names=("jsonparameter", "jsonparam", "json"),
|
||||
typerolename="obj",
|
||||
typenames=("jsonparamtype", "jsontype"),
|
||||
),
|
||||
TypedField(
|
||||
"queryparameter",
|
||||
label="Query Parameters",
|
||||
names=("queryparameter", "queryparam", "qparam", "query"),
|
||||
typerolename="obj",
|
||||
typenames=("queryparamtype", "querytype", "qtype"),
|
||||
),
|
||||
GroupedField(
|
||||
"formparameter",
|
||||
label="Form Parameters",
|
||||
names=("formparameter", "formparam", "fparam", "form"),
|
||||
),
|
||||
GroupedField(
|
||||
"requestheader",
|
||||
label="Request Headers",
|
||||
rolename="mailheader",
|
||||
names=("reqheader", "requestheader"),
|
||||
),
|
||||
GroupedField(
|
||||
"responseheader",
|
||||
label="Response Headers",
|
||||
rolename="mailheader",
|
||||
names=("resheader", "responseheader"),
|
||||
),
|
||||
GroupedField(
|
||||
"statuscode",
|
||||
label="Status Codes",
|
||||
rolename="statuscode",
|
||||
names=("statuscode", "status", "code"),
|
||||
),
|
||||
]
|
||||
|
||||
method = NotImplemented
|
||||
|
||||
def handle_signature(self, sig, signode):
|
||||
method = self.method.upper() + ' '
|
||||
method = self.method.upper() + " "
|
||||
signode += addnodes.desc_name(method, method)
|
||||
offset = 0
|
||||
for match in http_sig_param_re.finditer(sig):
|
||||
path = sig[offset:match.start()]
|
||||
path = sig[offset : match.start()]
|
||||
signode += addnodes.desc_name(path, path)
|
||||
params = addnodes.desc_parameterlist()
|
||||
typ = match.group('type')
|
||||
typ = match.group("type")
|
||||
if typ:
|
||||
typ = typ + ': '
|
||||
typ = typ + ": "
|
||||
params += addnodes.desc_annotation(typ, typ)
|
||||
name = match.group('name')
|
||||
name = match.group("name")
|
||||
params += addnodes.desc_parameter(name, name)
|
||||
signode += params
|
||||
offset = match.end()
|
||||
if offset < len(sig):
|
||||
path = sig[offset:len(sig)]
|
||||
path = sig[offset : len(sig)]
|
||||
signode += addnodes.desc_name(path, path)
|
||||
fullname = self.method.upper() + ' ' + path
|
||||
signode['method'] = self.method
|
||||
signode['path'] = sig
|
||||
signode['fullname'] = fullname
|
||||
fullname = self.method.upper() + " " + path
|
||||
signode["method"] = self.method
|
||||
signode["path"] = sig
|
||||
signode["fullname"] = fullname
|
||||
return (fullname, self.method, sig)
|
||||
|
||||
def needs_arglist(self):
|
||||
return False
|
||||
|
||||
def add_target_and_index(self, name_cls, sig, signode):
|
||||
signode['ids'].append(http_resource_anchor(*name_cls[1:]))
|
||||
self.env.domaindata['http'][self.method][sig] = (self.env.docname, '')
|
||||
signode["ids"].append(http_resource_anchor(*name_cls[1:]))
|
||||
self.env.domaindata["http"][self.method][sig] = (self.env.docname, "")
|
||||
|
||||
def get_index_text(self, modname, name):
|
||||
return ''
|
||||
return ""
|
||||
|
||||
|
||||
class HTTPOptions(HTTPResource):
|
||||
|
||||
method = 'options'
|
||||
method = "options"
|
||||
|
||||
|
||||
class HTTPHead(HTTPResource):
|
||||
|
||||
method = 'head'
|
||||
method = "head"
|
||||
|
||||
|
||||
class HTTPPatch(HTTPResource):
|
||||
|
||||
method = 'patch'
|
||||
method = "patch"
|
||||
|
||||
|
||||
class HTTPPost(HTTPResource):
|
||||
|
||||
method = 'post'
|
||||
method = "post"
|
||||
|
||||
|
||||
class HTTPGet(HTTPResource):
|
||||
|
||||
method = 'get'
|
||||
method = "get"
|
||||
|
||||
|
||||
class HTTPPut(HTTPResource):
|
||||
|
||||
method = 'put'
|
||||
method = "put"
|
||||
|
||||
|
||||
class HTTPDelete(HTTPResource):
|
||||
|
||||
method = 'delete'
|
||||
method = "delete"
|
||||
|
||||
|
||||
class HTTPTrace(HTTPResource):
|
||||
|
||||
method = 'trace'
|
||||
method = "trace"
|
||||
|
||||
|
||||
def http_statuscode_role(name, rawtext, text, lineno, inliner,
|
||||
options={}, content=[]):
|
||||
def http_statuscode_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
if text.isdigit():
|
||||
code = int(text)
|
||||
try:
|
||||
status = HTTP_STATUS_CODES[code]
|
||||
except KeyError:
|
||||
msg = inliner.reporter.error('%d is invalid HTTP status code'
|
||||
% code, lineno=lineno)
|
||||
msg = inliner.reporter.error(
|
||||
"%d is invalid HTTP status code" % code, lineno=lineno
|
||||
)
|
||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||
return [prb], [msg]
|
||||
else:
|
||||
try:
|
||||
code, status = re.split(r'\s', text.strip(), 1)
|
||||
code, status = re.split(r"\s", text.strip(), 1)
|
||||
code = int(code)
|
||||
except ValueError:
|
||||
msg = inliner.reporter.error(
|
||||
'HTTP status code must be an integer (e.g. `200`) or '
|
||||
'start with an integer (e.g. `200 OK`); %r is invalid' %
|
||||
text,
|
||||
line=lineno
|
||||
"HTTP status code must be an integer (e.g. `200`) or "
|
||||
"start with an integer (e.g. `200 OK`); %r is invalid" % text,
|
||||
line=lineno,
|
||||
)
|
||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||
return [prb], [msg]
|
||||
nodes.reference(rawtext)
|
||||
if code == 226:
|
||||
url = 'http://www.ietf.org/rfc/rfc3229.txt'
|
||||
url = "http://www.ietf.org/rfc/rfc3229.txt"
|
||||
if code == 418:
|
||||
url = 'http://www.ietf.org/rfc/rfc2324.txt'
|
||||
url = "http://www.ietf.org/rfc/rfc2324.txt"
|
||||
if code == 449:
|
||||
url = 'http://msdn.microsoft.com/en-us/library' \
|
||||
'/dd891478(v=prot.10).aspx'
|
||||
url = "http://msdn.microsoft.com/en-us/library" "/dd891478(v=prot.10).aspx"
|
||||
elif code in HTTP_STATUS_CODES:
|
||||
url = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html' \
|
||||
'#sec10.' + ('%d.%d' % (code // 100, 1 + code % 100))
|
||||
url = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html" "#sec10." + (
|
||||
"%d.%d" % (code // 100, 1 + code % 100)
|
||||
)
|
||||
else:
|
||||
url = ''
|
||||
url = ""
|
||||
set_classes(options)
|
||||
node = nodes.reference(rawtext, '%d %s' % (code, status),
|
||||
refuri=url, **options)
|
||||
node = nodes.reference(rawtext, "%d %s" % (code, status), refuri=url, **options)
|
||||
return [node], []
|
||||
|
||||
|
||||
def http_method_role(name, rawtext, text, lineno, inliner,
|
||||
options={}, content=[]):
|
||||
def http_method_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
||||
method = str(text).lower()
|
||||
if method not in DOCREFS:
|
||||
msg = inliner.reporter.error('%s is not valid HTTP method' % method,
|
||||
lineno=lineno)
|
||||
msg = inliner.reporter.error(
|
||||
"%s is not valid HTTP method" % method, lineno=lineno
|
||||
)
|
||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||
return [prb], [msg]
|
||||
url = str(DOCREFS[method])
|
||||
|
@ -298,51 +319,61 @@ def http_method_role(name, rawtext, text, lineno, inliner,
|
|||
|
||||
|
||||
class HTTPXRefRole(XRefRole):
|
||||
|
||||
def __init__(self, method, **kwargs):
|
||||
XRefRole.__init__(self, **kwargs)
|
||||
self.method = method
|
||||
|
||||
def process_link(self, env, refnode, has_explicit_title, title, target):
|
||||
if not target.startswith('/'):
|
||||
if not target.startswith("/"):
|
||||
pass
|
||||
if not has_explicit_title:
|
||||
title = self.method.upper() + ' ' + title
|
||||
title = self.method.upper() + " " + title
|
||||
return title, target
|
||||
|
||||
|
||||
class HTTPIndex(Index):
|
||||
|
||||
name = 'routingtable'
|
||||
localname = 'HTTP Routing Table'
|
||||
shortname = 'routing table'
|
||||
name = "routingtable"
|
||||
localname = "HTTP Routing Table"
|
||||
shortname = "routing table"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(HTTPIndex, self).__init__(*args, **kwargs)
|
||||
|
||||
self.ignore = [[l for l in x.split('/') if l]
|
||||
for x in self.domain.env.config['http_index_ignore_prefixes']]
|
||||
self.ignore = [
|
||||
[l for l in x.split("/") if l]
|
||||
for x in self.domain.env.config["http_index_ignore_prefixes"]
|
||||
]
|
||||
self.ignore.sort(key=lambda x: -len(x))
|
||||
|
||||
def grouping_prefix(self, path):
|
||||
letters = [x for x in path.split('/') if x]
|
||||
letters = [x for x in path.split("/") if x]
|
||||
for prefix in self.ignore:
|
||||
if letters[:len(prefix)] == prefix:
|
||||
return '/' + '/'.join(letters[:len(prefix) + 1])
|
||||
return '/%s' % (letters[0] if letters else '',)
|
||||
if letters[: len(prefix)] == prefix:
|
||||
return "/" + "/".join(letters[: len(prefix) + 1])
|
||||
return "/%s" % (letters[0] if letters else "",)
|
||||
|
||||
def generate(self, docnames=None):
|
||||
content = {}
|
||||
items = ((method, path, info)
|
||||
items = (
|
||||
(method, path, info)
|
||||
for method, routes in self.domain.routes.items()
|
||||
for path, info in routes.items())
|
||||
for path, info in routes.items()
|
||||
)
|
||||
items = sorted(items, key=lambda item: item[1])
|
||||
for method, path, info in items:
|
||||
entries = content.setdefault(self.grouping_prefix(path), [])
|
||||
entries.append([
|
||||
method.upper() + ' ' + path, 0, info[0],
|
||||
http_resource_anchor(method, path), '', '', info[1]
|
||||
])
|
||||
entries.append(
|
||||
[
|
||||
method.upper() + " " + path,
|
||||
0,
|
||||
info[0],
|
||||
http_resource_anchor(method, path),
|
||||
"",
|
||||
"",
|
||||
info[1],
|
||||
]
|
||||
)
|
||||
content = sorted(content.items(), key=lambda k: k[0])
|
||||
return (content, True)
|
||||
|
||||
|
@ -350,53 +381,53 @@ class HTTPIndex(Index):
|
|||
class HTTPDomain(Domain):
|
||||
"""HTTP domain."""
|
||||
|
||||
name = 'http'
|
||||
label = 'HTTP'
|
||||
name = "http"
|
||||
label = "HTTP"
|
||||
|
||||
object_types = {
|
||||
'options': ObjType('options', 'options', 'obj'),
|
||||
'head': ObjType('head', 'head', 'obj'),
|
||||
'post': ObjType('post', 'post', 'obj'),
|
||||
'get': ObjType('get', 'get', 'obj'),
|
||||
'put': ObjType('put', 'put', 'obj'),
|
||||
'patch': ObjType('patch', 'patch', 'obj'),
|
||||
'delete': ObjType('delete', 'delete', 'obj'),
|
||||
'trace': ObjType('trace', 'trace', 'obj')
|
||||
"options": ObjType("options", "options", "obj"),
|
||||
"head": ObjType("head", "head", "obj"),
|
||||
"post": ObjType("post", "post", "obj"),
|
||||
"get": ObjType("get", "get", "obj"),
|
||||
"put": ObjType("put", "put", "obj"),
|
||||
"patch": ObjType("patch", "patch", "obj"),
|
||||
"delete": ObjType("delete", "delete", "obj"),
|
||||
"trace": ObjType("trace", "trace", "obj"),
|
||||
}
|
||||
|
||||
directives = {
|
||||
'options': HTTPOptions,
|
||||
'head': HTTPHead,
|
||||
'post': HTTPPost,
|
||||
'get': HTTPGet,
|
||||
'put': HTTPPut,
|
||||
'patch': HTTPPatch,
|
||||
'delete': HTTPDelete,
|
||||
'trace': HTTPTrace
|
||||
"options": HTTPOptions,
|
||||
"head": HTTPHead,
|
||||
"post": HTTPPost,
|
||||
"get": HTTPGet,
|
||||
"put": HTTPPut,
|
||||
"patch": HTTPPatch,
|
||||
"delete": HTTPDelete,
|
||||
"trace": HTTPTrace,
|
||||
}
|
||||
|
||||
roles = {
|
||||
'options': HTTPXRefRole('options'),
|
||||
'head': HTTPXRefRole('head'),
|
||||
'post': HTTPXRefRole('post'),
|
||||
'get': HTTPXRefRole('get'),
|
||||
'put': HTTPXRefRole('put'),
|
||||
'patch': HTTPXRefRole('patch'),
|
||||
'delete': HTTPXRefRole('delete'),
|
||||
'trace': HTTPXRefRole('trace'),
|
||||
'statuscode': http_statuscode_role,
|
||||
'method': http_method_role
|
||||
"options": HTTPXRefRole("options"),
|
||||
"head": HTTPXRefRole("head"),
|
||||
"post": HTTPXRefRole("post"),
|
||||
"get": HTTPXRefRole("get"),
|
||||
"put": HTTPXRefRole("put"),
|
||||
"patch": HTTPXRefRole("patch"),
|
||||
"delete": HTTPXRefRole("delete"),
|
||||
"trace": HTTPXRefRole("trace"),
|
||||
"statuscode": http_statuscode_role,
|
||||
"method": http_method_role,
|
||||
}
|
||||
|
||||
initial_data = {
|
||||
'options': {}, # path: (docname, synopsis)
|
||||
'head': {},
|
||||
'post': {},
|
||||
'get': {},
|
||||
'put': {},
|
||||
'patch': {},
|
||||
'delete': {},
|
||||
'trace': {}
|
||||
"options": {}, # path: (docname, synopsis)
|
||||
"head": {},
|
||||
"post": {},
|
||||
"get": {},
|
||||
"put": {},
|
||||
"patch": {},
|
||||
"delete": {},
|
||||
"trace": {},
|
||||
}
|
||||
|
||||
# indices = [HTTPIndex]
|
||||
|
@ -412,17 +443,15 @@ class HTTPDomain(Domain):
|
|||
if info[0] == docname:
|
||||
del routes[path]
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target,
|
||||
node, contnode):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
try:
|
||||
info = self.data[str(typ)][target]
|
||||
except KeyError:
|
||||
return
|
||||
else:
|
||||
anchor = http_resource_anchor(typ, target)
|
||||
title = typ.upper() + ' ' + target
|
||||
return make_refnode(builder, fromdocname, info[0], anchor,
|
||||
contnode, title)
|
||||
title = typ.upper() + " " + target
|
||||
return make_refnode(builder, fromdocname, info[0], anchor, contnode, title)
|
||||
|
||||
def get_objects(self):
|
||||
for method, routes in self.routes.items():
|
||||
|
@ -434,16 +463,16 @@ class HTTPDomain(Domain):
|
|||
class HTTPLexer(RegexLexer):
|
||||
"""Lexer for HTTP sessions."""
|
||||
|
||||
name = 'HTTP'
|
||||
aliases = ['http']
|
||||
name = "HTTP"
|
||||
aliases = ["http"]
|
||||
|
||||
flags = re.DOTALL
|
||||
|
||||
def header_callback(self, match):
|
||||
if match.group(1).lower() == 'content-type':
|
||||
if match.group(1).lower() == "content-type":
|
||||
content_type = match.group(5).strip()
|
||||
if ';' in content_type:
|
||||
content_type = content_type[:content_type.find(';')].strip()
|
||||
if ";" in content_type:
|
||||
content_type = content_type[: content_type.find(";")].strip()
|
||||
self.content_type = content_type
|
||||
yield match.start(1), Name.Attribute, match.group(1)
|
||||
yield match.start(2), Text, match.group(2)
|
||||
|
@ -458,11 +487,12 @@ class HTTPLexer(RegexLexer):
|
|||
yield match.start(3), Text, match.group(3)
|
||||
|
||||
def content_callback(self, match):
|
||||
content_type = getattr(self, 'content_type', None)
|
||||
content_type = getattr(self, "content_type", None)
|
||||
content = match.group()
|
||||
offset = match.start()
|
||||
if content_type:
|
||||
from pygments.lexers import get_lexer_for_mimetype
|
||||
|
||||
try:
|
||||
lexer = get_lexer_for_mimetype(content_type)
|
||||
except ClassNotFound:
|
||||
|
@ -474,33 +504,50 @@ class HTTPLexer(RegexLexer):
|
|||
yield offset, Text, content
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
(r'(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)'
|
||||
r'(HTTPS?)(/)(1\.[01])(\r?\n|$)',
|
||||
bygroups(Name.Function, Text, Name.Namespace, Text,
|
||||
Keyword.Reserved, Operator, Number, Text),
|
||||
'headers'),
|
||||
(r'(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
|
||||
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
|
||||
Text, Name.Exception, Text),
|
||||
'headers'),
|
||||
"root": [
|
||||
(
|
||||
r"(GET|POST|PUT|PATCH|DELETE|HEAD|OPTIONS|TRACE)( +)([^ ]+)( +)"
|
||||
r"(HTTPS?)(/)(1\.[01])(\r?\n|$)",
|
||||
bygroups(
|
||||
Name.Function,
|
||||
Text,
|
||||
Name.Namespace,
|
||||
Text,
|
||||
Keyword.Reserved,
|
||||
Operator,
|
||||
Number,
|
||||
Text,
|
||||
),
|
||||
"headers",
|
||||
),
|
||||
(
|
||||
r"(HTTPS?)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)",
|
||||
bygroups(
|
||||
Keyword.Reserved,
|
||||
Operator,
|
||||
Number,
|
||||
Text,
|
||||
Number,
|
||||
Text,
|
||||
Name.Exception,
|
||||
Text,
|
||||
),
|
||||
"headers",
|
||||
),
|
||||
],
|
||||
'headers': [
|
||||
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
|
||||
(r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
|
||||
(r'\r?\n', Text, 'content')
|
||||
"headers": [
|
||||
(r"([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)", header_callback),
|
||||
(r"([\t ]+)([^\r\n]+)(\r?\n|$)", continuous_header_callback),
|
||||
(r"\r?\n", Text, "content"),
|
||||
],
|
||||
'content': [
|
||||
(r'.+', content_callback)
|
||||
]
|
||||
"content": [(r".+", content_callback)],
|
||||
}
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_domain(HTTPDomain)
|
||||
try:
|
||||
get_lexer_by_name('http')
|
||||
get_lexer_by_name("http")
|
||||
except ClassNotFound:
|
||||
app.add_lexer('http', HTTPLexer())
|
||||
app.add_config_value('http_index_ignore_prefixes', [], None)
|
||||
|
||||
app.add_lexer("http", HTTPLexer())
|
||||
app.add_config_value("http_index_ignore_prefixes", [], None)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
|
@ -7,23 +7,23 @@
|
|||
~~~~~~~~~~~~~~
|
||||
|
||||
Properly handle ``__func_alias__``
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Sphinx libs
|
||||
from sphinx.ext.autodoc import FunctionDocumenter as FunctionDocumenter
|
||||
|
||||
|
||||
class SaltFunctionDocumenter(FunctionDocumenter):
|
||||
'''
|
||||
"""
|
||||
Simple override of sphinx.ext.autodoc.FunctionDocumenter to properly render
|
||||
salt's aliased function names.
|
||||
'''
|
||||
"""
|
||||
|
||||
def format_name(self):
|
||||
'''
|
||||
"""
|
||||
Format the function name
|
||||
'''
|
||||
if not hasattr(self.module, '__func_alias__'):
|
||||
"""
|
||||
if not hasattr(self.module, "__func_alias__"):
|
||||
# Resume normal sphinx.ext.autodoc operation
|
||||
return super(FunctionDocumenter, self).format_name()
|
||||
|
||||
|
@ -46,4 +46,4 @@ def setup(app):
|
|||
# add_autodocumenter() must be called after the initial setup and the
|
||||
# 'builder-inited' event, as sphinx.ext.autosummary will restore the
|
||||
# original documenter on 'builder-inited'
|
||||
app.connect('env-before-read-docs', add_documenter)
|
||||
app.connect("env-before-read-docs", add_documenter)
|
||||
|
|
|
@ -2,70 +2,66 @@ import itertools
|
|||
import os
|
||||
import re
|
||||
|
||||
import salt
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
|
||||
from docutils.statemachine import ViewList
|
||||
from sphinx import addnodes
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.domains import python as python_domain
|
||||
from sphinx.domains.python import PyObject
|
||||
from sphinx.locale import _
|
||||
from sphinx.roles import XRefRole
|
||||
from sphinx.util.nodes import make_refnode
|
||||
from sphinx.util.nodes import nested_parse_with_titles
|
||||
from sphinx.util.nodes import set_source_info
|
||||
|
||||
from sphinx.domains import python as python_domain
|
||||
|
||||
import salt
|
||||
from sphinx.util.nodes import make_refnode, nested_parse_with_titles, set_source_info
|
||||
|
||||
|
||||
class Event(PyObject):
|
||||
'''
|
||||
"""
|
||||
Document Salt events
|
||||
'''
|
||||
domain = 'salt'
|
||||
"""
|
||||
|
||||
domain = "salt"
|
||||
|
||||
|
||||
class LiterateCoding(Directive):
|
||||
'''
|
||||
"""
|
||||
Auto-doc SLS files using literate-style comment/code separation
|
||||
'''
|
||||
"""
|
||||
|
||||
has_content = False
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
final_argument_whitespace = False
|
||||
|
||||
def parse_file(self, fpath):
|
||||
'''
|
||||
"""
|
||||
Read a file on the file system (relative to salt's base project dir)
|
||||
|
||||
:returns: A file-like object.
|
||||
:raises IOError: If the file cannot be found or read.
|
||||
'''
|
||||
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__),
|
||||
os.pardir))
|
||||
with open(os.path.join(sdir, fpath), 'rb') as f:
|
||||
"""
|
||||
sdir = os.path.abspath(os.path.join(os.path.dirname(salt.__file__), os.pardir))
|
||||
with open(os.path.join(sdir, fpath), "rb") as f:
|
||||
return f.readlines()
|
||||
|
||||
def parse_lit(self, lines):
|
||||
'''
|
||||
"""
|
||||
Parse a string line-by-line delineating comments and code
|
||||
|
||||
:returns: An tuple of boolean/list-of-string pairs. True designates a
|
||||
comment; False designates code.
|
||||
'''
|
||||
comment_char = '#' # TODO: move this into a directive option
|
||||
comment = re.compile(r'^\s*{0}[ \n]'.format(comment_char))
|
||||
"""
|
||||
comment_char = "#" # TODO: move this into a directive option
|
||||
comment = re.compile(r"^\s*{0}[ \n]".format(comment_char))
|
||||
section_test = lambda val: bool(comment.match(val))
|
||||
|
||||
sections = []
|
||||
for is_doc, group in itertools.groupby(lines, section_test):
|
||||
if is_doc:
|
||||
text = [comment.sub('', i).rstrip('\r\n') for i in group]
|
||||
text = [comment.sub("", i).rstrip("\r\n") for i in group]
|
||||
else:
|
||||
text = [i.rstrip('\r\n') for i in group]
|
||||
text = [i.rstrip("\r\n") for i in group]
|
||||
|
||||
sections.append((is_doc, text))
|
||||
|
||||
|
@ -79,33 +75,33 @@ class LiterateCoding(Directive):
|
|||
return [document.reporter.warning(str(exc), line=self.lineno)]
|
||||
|
||||
node = nodes.container()
|
||||
node['classes'] = ['lit-container']
|
||||
node["classes"] = ["lit-container"]
|
||||
node.document = self.state.document
|
||||
|
||||
enum = nodes.enumerated_list()
|
||||
enum['classes'] = ['lit-docs']
|
||||
enum["classes"] = ["lit-docs"]
|
||||
node.append(enum)
|
||||
|
||||
# make first list item
|
||||
list_item = nodes.list_item()
|
||||
list_item['classes'] = ['lit-item']
|
||||
list_item["classes"] = ["lit-item"]
|
||||
|
||||
for is_doc, line in lines:
|
||||
if is_doc and line == ['']:
|
||||
if is_doc and line == [""]:
|
||||
continue
|
||||
|
||||
section = nodes.section()
|
||||
|
||||
if is_doc:
|
||||
section['classes'] = ['lit-annotation']
|
||||
section["classes"] = ["lit-annotation"]
|
||||
|
||||
nested_parse_with_titles(self.state, ViewList(line), section)
|
||||
else:
|
||||
section['classes'] = ['lit-content']
|
||||
section["classes"] = ["lit-content"]
|
||||
|
||||
code = '\n'.join(line)
|
||||
code = "\n".join(line)
|
||||
literal = nodes.literal_block(code, code)
|
||||
literal['language'] = 'yaml'
|
||||
literal["language"] = "yaml"
|
||||
set_source_info(self, literal)
|
||||
section.append(literal)
|
||||
|
||||
|
@ -116,42 +112,41 @@ class LiterateCoding(Directive):
|
|||
if len(list_item.children) == 2:
|
||||
enum.append(list_item)
|
||||
list_item = nodes.list_item()
|
||||
list_item['classes'] = ['lit-item']
|
||||
list_item["classes"] = ["lit-item"]
|
||||
|
||||
# Non-semantic div for styling
|
||||
bg = nodes.container()
|
||||
bg['classes'] = ['lit-background']
|
||||
bg["classes"] = ["lit-background"]
|
||||
node.append(bg)
|
||||
|
||||
return [node]
|
||||
|
||||
|
||||
class LiterateFormula(LiterateCoding):
|
||||
'''
|
||||
"""
|
||||
Customizations to handle finding and parsing SLS files
|
||||
'''
|
||||
"""
|
||||
|
||||
def parse_file(self, sls_path):
|
||||
'''
|
||||
"""
|
||||
Given a typical Salt SLS path (e.g.: apache.vhosts.standard), find the
|
||||
file on the file system and parse it
|
||||
'''
|
||||
"""
|
||||
config = self.state.document.settings.env.config
|
||||
formulas_dirs = config.formulas_dirs
|
||||
fpath = sls_path.replace('.', '/')
|
||||
fpath = sls_path.replace(".", "/")
|
||||
|
||||
name_options = (
|
||||
'{0}.sls'.format(fpath),
|
||||
os.path.join(fpath, 'init.sls')
|
||||
)
|
||||
name_options = ("{0}.sls".format(fpath), os.path.join(fpath, "init.sls"))
|
||||
|
||||
paths = [os.path.join(fdir, fname)
|
||||
for fname in name_options
|
||||
for fdir in formulas_dirs]
|
||||
paths = [
|
||||
os.path.join(fdir, fname)
|
||||
for fname in name_options
|
||||
for fdir in formulas_dirs
|
||||
]
|
||||
|
||||
for i in paths:
|
||||
try:
|
||||
with open(i, 'rb') as f:
|
||||
with open(i, "rb") as f:
|
||||
return f.readlines()
|
||||
except IOError:
|
||||
pass
|
||||
|
@ -160,7 +155,7 @@ class LiterateFormula(LiterateCoding):
|
|||
|
||||
|
||||
class CurrentFormula(Directive):
|
||||
domain = 'salt'
|
||||
domain = "salt"
|
||||
has_content = False
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
|
@ -170,15 +165,15 @@ class CurrentFormula(Directive):
|
|||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
modname = self.arguments[0].strip()
|
||||
if modname == 'None':
|
||||
env.temp_data['salt:formula'] = None
|
||||
if modname == "None":
|
||||
env.temp_data["salt:formula"] = None
|
||||
else:
|
||||
env.temp_data['salt:formula'] = modname
|
||||
env.temp_data["salt:formula"] = modname
|
||||
return []
|
||||
|
||||
|
||||
class Formula(Directive):
|
||||
domain = 'salt'
|
||||
domain = "salt"
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
|
||||
|
@ -186,30 +181,31 @@ class Formula(Directive):
|
|||
env = self.state.document.settings.env
|
||||
formname = self.arguments[0].strip()
|
||||
|
||||
env.temp_data['salt:formula'] = formname
|
||||
env.temp_data["salt:formula"] = formname
|
||||
|
||||
if 'noindex' in self.options:
|
||||
if "noindex" in self.options:
|
||||
return []
|
||||
|
||||
env.domaindata['salt']['formulas'][formname] = (
|
||||
env.docname,
|
||||
self.options.get('synopsis', ''),
|
||||
self.options.get('platform', ''),
|
||||
'deprecated' in self.options)
|
||||
env.domaindata["salt"]["formulas"][formname] = (
|
||||
env.docname,
|
||||
self.options.get("synopsis", ""),
|
||||
self.options.get("platform", ""),
|
||||
"deprecated" in self.options,
|
||||
)
|
||||
|
||||
targetnode = nodes.target('', '', ids=['module-' + formname],
|
||||
ismod=True)
|
||||
targetnode = nodes.target("", "", ids=["module-" + formname], ismod=True)
|
||||
self.state.document.note_explicit_target(targetnode)
|
||||
|
||||
indextext = u'{0}-formula)'.format(formname)
|
||||
inode = addnodes.index(entries=[('single', indextext,
|
||||
'module-' + formname, '')])
|
||||
indextext = u"{0}-formula)".format(formname)
|
||||
inode = addnodes.index(
|
||||
entries=[("single", indextext, "module-" + formname, "")]
|
||||
)
|
||||
|
||||
return [targetnode, inode]
|
||||
|
||||
|
||||
class State(Directive):
|
||||
domain = 'salt'
|
||||
domain = "salt"
|
||||
has_content = True
|
||||
required_arguments = 1
|
||||
|
||||
|
@ -217,19 +213,18 @@ class State(Directive):
|
|||
env = self.state.document.settings.env
|
||||
statename = self.arguments[0].strip()
|
||||
|
||||
if 'noindex' in self.options:
|
||||
if "noindex" in self.options:
|
||||
return []
|
||||
|
||||
targetnode = nodes.target('', '', ids=['module-' + statename],
|
||||
ismod=True)
|
||||
targetnode = nodes.target("", "", ids=["module-" + statename], ismod=True)
|
||||
self.state.document.note_explicit_target(targetnode)
|
||||
|
||||
formula = env.temp_data.get('salt:formula')
|
||||
formula = env.temp_data.get("salt:formula")
|
||||
|
||||
indextext = u'{1} ({0}-formula)'.format(formula, statename)
|
||||
inode = addnodes.index(entries=[
|
||||
('single', indextext, 'module-{0}'.format(statename), ''),
|
||||
])
|
||||
indextext = u"{1} ({0}-formula)".format(formula, statename)
|
||||
inode = addnodes.index(
|
||||
entries=[("single", indextext, "module-{0}".format(statename), ""),]
|
||||
)
|
||||
|
||||
return [targetnode, inode]
|
||||
|
||||
|
@ -239,55 +234,56 @@ class SLSXRefRole(XRefRole):
|
|||
|
||||
|
||||
class SaltModuleIndex(python_domain.PythonModuleIndex):
|
||||
name = 'modindex'
|
||||
localname = _('Salt Module Index')
|
||||
shortname = _('all salt modules')
|
||||
name = "modindex"
|
||||
localname = _("Salt Module Index")
|
||||
shortname = _("all salt modules")
|
||||
|
||||
|
||||
class SaltDomain(python_domain.PythonDomain):
|
||||
name = 'salt'
|
||||
label = 'Salt'
|
||||
name = "salt"
|
||||
label = "Salt"
|
||||
data_version = 2
|
||||
|
||||
object_types = python_domain.PythonDomain.object_types
|
||||
object_types.update({
|
||||
'state': ObjType(_('state'), 'state'),
|
||||
})
|
||||
object_types.update(
|
||||
{"state": ObjType(_("state"), "state"),}
|
||||
)
|
||||
|
||||
directives = python_domain.PythonDomain.directives
|
||||
directives.update({
|
||||
'event': Event,
|
||||
'state': State,
|
||||
'formula': LiterateFormula,
|
||||
'currentformula': CurrentFormula,
|
||||
'saltconfig': LiterateCoding,
|
||||
})
|
||||
|
||||
directives.update(
|
||||
{
|
||||
"event": Event,
|
||||
"state": State,
|
||||
"formula": LiterateFormula,
|
||||
"currentformula": CurrentFormula,
|
||||
"saltconfig": LiterateCoding,
|
||||
}
|
||||
)
|
||||
|
||||
roles = python_domain.PythonDomain.roles
|
||||
roles.update({
|
||||
'formula': SLSXRefRole(),
|
||||
})
|
||||
roles.update(
|
||||
{"formula": SLSXRefRole(),}
|
||||
)
|
||||
|
||||
initial_data = python_domain.PythonDomain.initial_data
|
||||
initial_data.update({
|
||||
'formulas': {},
|
||||
})
|
||||
initial_data.update(
|
||||
{"formulas": {},}
|
||||
)
|
||||
|
||||
indices = [
|
||||
SaltModuleIndex,
|
||||
]
|
||||
|
||||
def resolve_xref(self, env, fromdocname, builder, type, target, node,
|
||||
contnode):
|
||||
if type == 'formula' and target in self.data['formulas']:
|
||||
doc, _, _, _ = self.data['formulas'].get(target, (None, None))
|
||||
def resolve_xref(self, env, fromdocname, builder, type, target, node, contnode):
|
||||
if type == "formula" and target in self.data["formulas"]:
|
||||
doc, _, _, _ = self.data["formulas"].get(target, (None, None))
|
||||
if doc:
|
||||
return make_refnode(builder, fromdocname, doc, target,
|
||||
contnode, target)
|
||||
return make_refnode(builder, fromdocname, doc, target, contnode, target)
|
||||
else:
|
||||
super(SaltDomain, self).resolve_xref(env, fromdocname, builder,
|
||||
type, target, node, contnode)
|
||||
super(SaltDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, type, target, node, contnode
|
||||
)
|
||||
|
||||
|
||||
# Monkey-patch the Python domain remove the python module index
|
||||
python_domain.PythonDomain.indices = [SaltModuleIndex]
|
||||
|
@ -296,18 +292,34 @@ python_domain.PythonDomain.indices = [SaltModuleIndex]
|
|||
def setup(app):
|
||||
app.add_domain(SaltDomain)
|
||||
|
||||
formulas_path = 'templates/formulas'
|
||||
formulas_dir = os.path.join(os.path.abspath(os.path.dirname(salt.__file__)),
|
||||
formulas_path)
|
||||
app.add_config_value('formulas_dirs', [formulas_dir], 'env')
|
||||
formulas_path = "templates/formulas"
|
||||
formulas_dir = os.path.join(
|
||||
os.path.abspath(os.path.dirname(salt.__file__)), formulas_path
|
||||
)
|
||||
app.add_config_value("formulas_dirs", [formulas_dir], "env")
|
||||
|
||||
app.add_crossref_type(directivename="conf_master", rolename="conf_master",
|
||||
indextemplate="pair: %s; conf/master")
|
||||
app.add_crossref_type(directivename="conf_minion", rolename="conf_minion",
|
||||
indextemplate="pair: %s; conf/minion")
|
||||
app.add_crossref_type(directivename="conf_proxy", rolename="conf_proxy",
|
||||
indextemplate="pair: %s; conf/proxy")
|
||||
app.add_crossref_type(directivename="conf_log", rolename="conf_log",
|
||||
indextemplate="pair: %s; conf/logging")
|
||||
app.add_crossref_type(directivename="jinja_ref", rolename="jinja_ref",
|
||||
indextemplate="pair: %s; jinja filters")
|
||||
app.add_crossref_type(
|
||||
directivename="conf_master",
|
||||
rolename="conf_master",
|
||||
indextemplate="pair: %s; conf/master",
|
||||
)
|
||||
app.add_crossref_type(
|
||||
directivename="conf_minion",
|
||||
rolename="conf_minion",
|
||||
indextemplate="pair: %s; conf/minion",
|
||||
)
|
||||
app.add_crossref_type(
|
||||
directivename="conf_proxy",
|
||||
rolename="conf_proxy",
|
||||
indextemplate="pair: %s; conf/proxy",
|
||||
)
|
||||
app.add_crossref_type(
|
||||
directivename="conf_log",
|
||||
rolename="conf_log",
|
||||
indextemplate="pair: %s; conf/logging",
|
||||
)
|
||||
app.add_crossref_type(
|
||||
directivename="jinja_ref",
|
||||
rolename="jinja_ref",
|
||||
indextemplate="pair: %s; jinja filters",
|
||||
)
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
saltrepo
|
||||
~~~~~~~~
|
||||
|
||||
SaltStack Repository Sphinx directives
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
def source_read_handler(app, docname, source):
|
||||
if '|repo_primary_branch|' in source[0]:
|
||||
if "|repo_primary_branch|" in source[0]:
|
||||
source[0] = source[0].replace(
|
||||
'|repo_primary_branch|',
|
||||
app.config.html_context['repo_primary_branch']
|
||||
"|repo_primary_branch|", app.config.html_context["repo_primary_branch"]
|
||||
)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.connect('source-read', source_read_handler)
|
||||
app.connect("source-read", source_read_handler)
|
||||
|
||||
return {
|
||||
'version': 'builtin',
|
||||
'parallel_read_safe': True,
|
||||
'parallel_write_safe': True,
|
||||
"version": "builtin",
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
'''
|
||||
"""
|
||||
Short-URL redirects
|
||||
'''
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
|
||||
import sphinx.ext.intersphinx
|
||||
|
||||
DOCS_URL = 'http://docs.saltstack.com/en/latest/'
|
||||
DOCS_URL = "http://docs.saltstack.com/en/latest/"
|
||||
|
||||
|
||||
def write_urls_index(app, exc):
|
||||
'''
|
||||
"""
|
||||
Generate a JSON file to serve as an index for short-URL lookups
|
||||
'''
|
||||
inventory = os.path.join(app.builder.outdir, 'objects.inv')
|
||||
"""
|
||||
inventory = os.path.join(app.builder.outdir, "objects.inv")
|
||||
objects = sphinx.ext.intersphinx.fetch_inventory(app, DOCS_URL, inventory)
|
||||
|
||||
with open(os.path.join(app.builder.outdir, 'shorturls.json'), 'w') as f:
|
||||
with open(os.path.join(app.builder.outdir, "shorturls.json"), "w") as f:
|
||||
json.dump(objects, f)
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.connect('build-finished', write_urls_index)
|
||||
app.connect("build-finished", write_urls_index)
|
||||
|
|
|
@ -36,8 +36,10 @@
|
|||
from __future__ import division
|
||||
|
||||
import re
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import directives
|
||||
|
||||
try:
|
||||
from sphinx.util.compat import Directive
|
||||
except ImportError:
|
||||
|
@ -140,7 +142,9 @@ class YouTube(Directive):
|
|||
aspect = None
|
||||
width = get_size(self.options, "width")
|
||||
height = get_size(self.options, "height")
|
||||
return [youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)]
|
||||
return [
|
||||
youtube(id=self.arguments[0], aspect=aspect, width=width, height=height)
|
||||
]
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
|
618
doc/conf.py
618
doc/conf.py
|
@ -1,19 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# pylint: disable=C0103,W0622
|
||||
'''
|
||||
"""
|
||||
Sphinx documentation for Salt
|
||||
'''
|
||||
import sys
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import types
|
||||
import sys
|
||||
import time
|
||||
import types
|
||||
|
||||
from sphinx.directives import TocTree
|
||||
|
||||
|
||||
class Mock(object):
|
||||
'''
|
||||
"""
|
||||
Mock out specified imports.
|
||||
|
||||
This allows autodoc to do its thing without having oodles of req'd
|
||||
|
@ -22,8 +22,11 @@ class Mock(object):
|
|||
This Mock class can be configured to return a specific values at specific names, if required.
|
||||
|
||||
http://read-the-docs.readthedocs.org/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules
|
||||
'''
|
||||
def __init__(self, mapping=None, *args, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, mapping=None, *args, **kwargs
|
||||
): # pylint: disable=unused-argument
|
||||
"""
|
||||
Mapping allows autodoc to bypass the Mock object, but actually assign
|
||||
a specific value, expected by a specific attribute returned.
|
||||
|
@ -41,9 +44,9 @@ class Mock(object):
|
|||
def __getattr__(self, name):
|
||||
if name in self.__mapping:
|
||||
data = self.__mapping.get(name)
|
||||
elif name in ('__file__', '__path__'):
|
||||
data = '/dev/null'
|
||||
elif name in ('__mro_entries__', '__qualname__'):
|
||||
elif name in ("__file__", "__path__"):
|
||||
data = "/dev/null"
|
||||
elif name in ("__mro_entries__", "__qualname__"):
|
||||
raise AttributeError("'Mock' object has no attribute '%s'" % (name))
|
||||
else:
|
||||
data = Mock(mapping=self.__mapping)
|
||||
|
@ -61,7 +64,7 @@ class Mock(object):
|
|||
|
||||
|
||||
def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-argument
|
||||
'''
|
||||
"""
|
||||
Optionally mock a decorator that takes parameters
|
||||
|
||||
E.g.:
|
||||
|
@ -69,153 +72,146 @@ def mock_decorator_with_params(*oargs, **okwargs): # pylint: disable=unused-arg
|
|||
@blah(stuff=True)
|
||||
def things():
|
||||
pass
|
||||
'''
|
||||
"""
|
||||
|
||||
def inner(fn, *iargs, **ikwargs): # pylint: disable=unused-argument
|
||||
if hasattr(fn, '__call__'):
|
||||
if hasattr(fn, "__call__"):
|
||||
return fn
|
||||
return Mock()
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
MOCK_MODULES = [
|
||||
# Python stdlib
|
||||
'user',
|
||||
|
||||
"user",
|
||||
# salt core
|
||||
'concurrent',
|
||||
'Crypto',
|
||||
'Crypto.Signature',
|
||||
'Crypto.Cipher',
|
||||
'Crypto.Hash',
|
||||
'Crypto.PublicKey',
|
||||
'Crypto.Random',
|
||||
'Crypto.Signature',
|
||||
'Crypto.Signature.PKCS1_v1_5',
|
||||
'M2Crypto',
|
||||
'msgpack',
|
||||
'yaml',
|
||||
'yaml.constructor',
|
||||
'yaml.nodes',
|
||||
'yaml.parser',
|
||||
'yaml.scanner',
|
||||
'zmq',
|
||||
'zmq.eventloop',
|
||||
'zmq.eventloop.ioloop',
|
||||
|
||||
"concurrent",
|
||||
"Crypto",
|
||||
"Crypto.Signature",
|
||||
"Crypto.Cipher",
|
||||
"Crypto.Hash",
|
||||
"Crypto.PublicKey",
|
||||
"Crypto.Random",
|
||||
"Crypto.Signature",
|
||||
"Crypto.Signature.PKCS1_v1_5",
|
||||
"M2Crypto",
|
||||
"msgpack",
|
||||
"yaml",
|
||||
"yaml.constructor",
|
||||
"yaml.nodes",
|
||||
"yaml.parser",
|
||||
"yaml.scanner",
|
||||
"zmq",
|
||||
"zmq.eventloop",
|
||||
"zmq.eventloop.ioloop",
|
||||
# third-party libs for cloud modules
|
||||
'libcloud',
|
||||
'libcloud.compute',
|
||||
'libcloud.compute.base',
|
||||
'libcloud.compute.deployment',
|
||||
'libcloud.compute.providers',
|
||||
'libcloud.compute.types',
|
||||
'libcloud.loadbalancer',
|
||||
'libcloud.loadbalancer.types',
|
||||
'libcloud.loadbalancer.providers',
|
||||
'libcloud.common',
|
||||
'libcloud.common.google',
|
||||
|
||||
"libcloud",
|
||||
"libcloud.compute",
|
||||
"libcloud.compute.base",
|
||||
"libcloud.compute.deployment",
|
||||
"libcloud.compute.providers",
|
||||
"libcloud.compute.types",
|
||||
"libcloud.loadbalancer",
|
||||
"libcloud.loadbalancer.types",
|
||||
"libcloud.loadbalancer.providers",
|
||||
"libcloud.common",
|
||||
"libcloud.common.google",
|
||||
# third-party libs for netapi modules
|
||||
'cherrypy',
|
||||
'cherrypy.lib',
|
||||
'cherrypy.process',
|
||||
'cherrypy.wsgiserver',
|
||||
'cherrypy.wsgiserver.ssl_builtin',
|
||||
|
||||
'tornado',
|
||||
'tornado.concurrent',
|
||||
'tornado.escape',
|
||||
'tornado.gen',
|
||||
'tornado.httpclient',
|
||||
'tornado.httpserver',
|
||||
'tornado.httputil',
|
||||
'tornado.ioloop',
|
||||
'tornado.iostream',
|
||||
'tornado.netutil',
|
||||
'tornado.simple_httpclient',
|
||||
'tornado.stack_context',
|
||||
'tornado.web',
|
||||
'tornado.websocket',
|
||||
'tornado.locks',
|
||||
|
||||
'ws4py',
|
||||
'ws4py.server',
|
||||
'ws4py.server.cherrypyserver',
|
||||
'ws4py.websocket',
|
||||
|
||||
"cherrypy",
|
||||
"cherrypy.lib",
|
||||
"cherrypy.process",
|
||||
"cherrypy.wsgiserver",
|
||||
"cherrypy.wsgiserver.ssl_builtin",
|
||||
"tornado",
|
||||
"tornado.concurrent",
|
||||
"tornado.escape",
|
||||
"tornado.gen",
|
||||
"tornado.httpclient",
|
||||
"tornado.httpserver",
|
||||
"tornado.httputil",
|
||||
"tornado.ioloop",
|
||||
"tornado.iostream",
|
||||
"tornado.netutil",
|
||||
"tornado.simple_httpclient",
|
||||
"tornado.stack_context",
|
||||
"tornado.web",
|
||||
"tornado.websocket",
|
||||
"tornado.locks",
|
||||
"ws4py",
|
||||
"ws4py.server",
|
||||
"ws4py.server.cherrypyserver",
|
||||
"ws4py.websocket",
|
||||
# modules, renderers, states, returners, et al
|
||||
'ClusterShell',
|
||||
'ClusterShell.NodeSet',
|
||||
'MySQLdb',
|
||||
'MySQLdb.cursors',
|
||||
'OpenSSL',
|
||||
'avahi',
|
||||
'boto.regioninfo',
|
||||
'concurrent',
|
||||
'dbus',
|
||||
'django',
|
||||
'dns',
|
||||
'dns.resolver',
|
||||
'dson',
|
||||
'hjson',
|
||||
'jnpr',
|
||||
'jnpr.junos',
|
||||
'jnpr.junos.utils',
|
||||
'jnpr.junos.utils.config',
|
||||
'jnpr.junos.utils.sw',
|
||||
'keyring',
|
||||
'libvirt',
|
||||
'lxml',
|
||||
'lxml.etree',
|
||||
'msgpack',
|
||||
'nagios_json',
|
||||
'napalm',
|
||||
'netaddr',
|
||||
'netaddr.IPAddress',
|
||||
'netaddr.core',
|
||||
'netaddr.core.AddrFormatError',
|
||||
'ntsecuritycon',
|
||||
'psutil',
|
||||
'pycassa',
|
||||
'pyconnman',
|
||||
'pyiface',
|
||||
'pymongo',
|
||||
'pyroute2',
|
||||
'pyroute2.ipdb',
|
||||
'rabbitmq_server',
|
||||
'redis',
|
||||
'rpm',
|
||||
'rpmUtils',
|
||||
'rpmUtils.arch',
|
||||
'salt.ext.six.moves.winreg',
|
||||
'twisted',
|
||||
'twisted.internet',
|
||||
'twisted.internet.protocol',
|
||||
'twisted.internet.protocol.DatagramProtocol',
|
||||
'win32security',
|
||||
'yum',
|
||||
'zfs',
|
||||
"ClusterShell",
|
||||
"ClusterShell.NodeSet",
|
||||
"MySQLdb",
|
||||
"MySQLdb.cursors",
|
||||
"OpenSSL",
|
||||
"avahi",
|
||||
"boto.regioninfo",
|
||||
"concurrent",
|
||||
"dbus",
|
||||
"django",
|
||||
"dns",
|
||||
"dns.resolver",
|
||||
"dson",
|
||||
"hjson",
|
||||
"jnpr",
|
||||
"jnpr.junos",
|
||||
"jnpr.junos.utils",
|
||||
"jnpr.junos.utils.config",
|
||||
"jnpr.junos.utils.sw",
|
||||
"keyring",
|
||||
"libvirt",
|
||||
"lxml",
|
||||
"lxml.etree",
|
||||
"msgpack",
|
||||
"nagios_json",
|
||||
"napalm",
|
||||
"netaddr",
|
||||
"netaddr.IPAddress",
|
||||
"netaddr.core",
|
||||
"netaddr.core.AddrFormatError",
|
||||
"ntsecuritycon",
|
||||
"psutil",
|
||||
"pycassa",
|
||||
"pyconnman",
|
||||
"pyiface",
|
||||
"pymongo",
|
||||
"pyroute2",
|
||||
"pyroute2.ipdb",
|
||||
"rabbitmq_server",
|
||||
"redis",
|
||||
"rpm",
|
||||
"rpmUtils",
|
||||
"rpmUtils.arch",
|
||||
"salt.ext.six.moves.winreg",
|
||||
"twisted",
|
||||
"twisted.internet",
|
||||
"twisted.internet.protocol",
|
||||
"twisted.internet.protocol.DatagramProtocol",
|
||||
"win32security",
|
||||
"yum",
|
||||
"zfs",
|
||||
]
|
||||
|
||||
MOCK_MODULES_MAPPING = {
|
||||
'cherrypy': {'config': mock_decorator_with_params},
|
||||
'ntsecuritycon': {
|
||||
'STANDARD_RIGHTS_REQUIRED': 0,
|
||||
'SYNCHRONIZE': 0,
|
||||
},
|
||||
'psutil': {'total': 0}, # Otherwise it will crash Sphinx
|
||||
"cherrypy": {"config": mock_decorator_with_params},
|
||||
"ntsecuritycon": {"STANDARD_RIGHTS_REQUIRED": 0, "SYNCHRONIZE": 0,},
|
||||
"psutil": {"total": 0}, # Otherwise it will crash Sphinx
|
||||
}
|
||||
|
||||
for mod_name in MOCK_MODULES:
|
||||
sys.modules[mod_name] = Mock(mapping=MOCK_MODULES_MAPPING.get(mod_name))
|
||||
|
||||
# Define a fake version attribute for the following libs.
|
||||
sys.modules['libcloud'].__version__ = '0.0.0'
|
||||
sys.modules['msgpack'].version = (1, 0, 0)
|
||||
sys.modules['psutil'].version_info = (3, 0, 0)
|
||||
sys.modules['pymongo'].version = '0.0.0'
|
||||
sys.modules['tornado'].version_info = (0, 0, 0)
|
||||
sys.modules['boto.regioninfo']._load_json_file = {'endpoints': None}
|
||||
sys.modules["libcloud"].__version__ = "0.0.0"
|
||||
sys.modules["msgpack"].version = (1, 0, 0)
|
||||
sys.modules["psutil"].version_info = (3, 0, 0)
|
||||
sys.modules["pymongo"].version = "0.0.0"
|
||||
sys.modules["tornado"].version_info = (0, 0, 0)
|
||||
sys.modules["boto.regioninfo"]._load_json_file = {"endpoints": None}
|
||||
|
||||
|
||||
# -- Add paths to PYTHONPATH ---------------------------------------------------
|
||||
|
@ -224,102 +220,114 @@ try:
|
|||
except NameError:
|
||||
# sphinx-intl and six execute some code which will raise this NameError
|
||||
# assume we're in the doc/ directory
|
||||
docs_basepath = os.path.abspath(os.path.dirname('.'))
|
||||
docs_basepath = os.path.abspath(os.path.dirname("."))
|
||||
|
||||
addtl_paths = (
|
||||
os.pardir, # salt itself (for autodoc)
|
||||
'_ext', # custom Sphinx extensions
|
||||
"_ext", # custom Sphinx extensions
|
||||
)
|
||||
|
||||
for addtl_path in addtl_paths:
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(docs_basepath, addtl_path)))
|
||||
|
||||
|
||||
# We're now able to import salt
|
||||
import salt.version
|
||||
import salt.version # isort:skip
|
||||
|
||||
|
||||
formulas_dir = os.path.join(os.pardir, docs_basepath, 'formulas')
|
||||
formulas_dir = os.path.join(os.pardir, docs_basepath, "formulas")
|
||||
|
||||
# ----- Intersphinx Settings ------------------------------------------------>
|
||||
intersphinx_mapping = {
|
||||
'python': ('https://docs.python.org/3', None)
|
||||
}
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
# <---- Intersphinx Settings -------------------------------------------------
|
||||
|
||||
# -- General Configuration -----------------------------------------------------
|
||||
|
||||
# Set a var if we're building docs for the live site or not
|
||||
on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
|
||||
on_saltstack = "SALT_ON_SALTSTACK" in os.environ
|
||||
|
||||
project = 'Salt'
|
||||
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
|
||||
project = "Salt"
|
||||
repo_primary_branch = (
|
||||
"master" # This is the default branch on GitHub for the Salt project
|
||||
)
|
||||
version = salt.version.__version__
|
||||
latest_release = os.environ.get('LATEST_RELEASE', 'latest_release') # latest release (2019.2.3)
|
||||
previous_release = os.environ.get('PREVIOUS_RELEASE', 'previous_release') # latest release from previous branch (2018.3.5)
|
||||
previous_release_dir = os.environ.get('PREVIOUS_RELEASE_DIR', 'previous_release_dir') # path on web server for previous branch (2018.3)
|
||||
next_release = '' # next release
|
||||
next_release_dir = '' # path on web server for next release branch
|
||||
latest_release = os.environ.get(
|
||||
"LATEST_RELEASE", "latest_release"
|
||||
) # latest release (2019.2.3)
|
||||
previous_release = os.environ.get(
|
||||
"PREVIOUS_RELEASE", "previous_release"
|
||||
) # latest release from previous branch (2018.3.5)
|
||||
previous_release_dir = os.environ.get(
|
||||
"PREVIOUS_RELEASE_DIR", "previous_release_dir"
|
||||
) # path on web server for previous branch (2018.3)
|
||||
next_release = "" # next release
|
||||
next_release_dir = "" # path on web server for next release branch
|
||||
|
||||
today = ''
|
||||
copyright = ''
|
||||
today = ""
|
||||
copyright = ""
|
||||
if on_saltstack:
|
||||
today = "Generated on " + time.strftime("%B %d, %Y") + " at " + time.strftime("%X %Z") + "."
|
||||
today = (
|
||||
"Generated on "
|
||||
+ time.strftime("%B %d, %Y")
|
||||
+ " at "
|
||||
+ time.strftime("%X %Z")
|
||||
+ "."
|
||||
)
|
||||
copyright = time.strftime("%Y")
|
||||
|
||||
# < --- START do not merge these settings to other branches START ---> #
|
||||
build_type = os.environ.get('BUILD_TYPE', repo_primary_branch) # latest, previous, master, next
|
||||
build_type = os.environ.get(
|
||||
"BUILD_TYPE", repo_primary_branch
|
||||
) # latest, previous, master, next
|
||||
# < --- END do not merge these settings to other branches END ---> #
|
||||
|
||||
# Set google custom search engine
|
||||
|
||||
if build_type == repo_primary_branch:
|
||||
release = latest_release
|
||||
search_cx = '011515552685726825874:v1had6i279q' # master
|
||||
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
|
||||
elif build_type == 'next':
|
||||
search_cx = "011515552685726825874:v1had6i279q" # master
|
||||
# search_cx = '011515552685726825874:x17j5zl74g8' # develop
|
||||
elif build_type == "next":
|
||||
release = next_release
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
elif build_type == 'previous':
|
||||
search_cx = "011515552685726825874:ht0p8miksrm" # latest
|
||||
elif build_type == "previous":
|
||||
release = previous_release
|
||||
if release.startswith('3000'):
|
||||
search_cx = '011515552685726825874:3skhaozjtyn' # 3000
|
||||
elif release.startswith('2019.2'):
|
||||
search_cx = '011515552685726825874:huvjhlpptnm' # 2019.2
|
||||
elif release.startswith('2018.3'):
|
||||
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
|
||||
elif release.startswith('2017.7'):
|
||||
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
|
||||
elif release.startswith('2016.11'):
|
||||
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
|
||||
if release.startswith("3000"):
|
||||
search_cx = "011515552685726825874:3skhaozjtyn" # 3000
|
||||
elif release.startswith("2019.2"):
|
||||
search_cx = "011515552685726825874:huvjhlpptnm" # 2019.2
|
||||
elif release.startswith("2018.3"):
|
||||
search_cx = "011515552685726825874:vadptdpvyyu" # 2018.3
|
||||
elif release.startswith("2017.7"):
|
||||
search_cx = "011515552685726825874:w-hxmnbcpou" # 2017.7
|
||||
elif release.startswith("2016.11"):
|
||||
search_cx = "011515552685726825874:dlsj745pvhq" # 2016.11
|
||||
else:
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
else: # latest or something else
|
||||
search_cx = "011515552685726825874:ht0p8miksrm" # latest
|
||||
else: # latest or something else
|
||||
release = latest_release
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
search_cx = "011515552685726825874:ht0p8miksrm" # latest
|
||||
|
||||
needs_sphinx = '1.3'
|
||||
needs_sphinx = "1.3"
|
||||
|
||||
spelling_lang = 'en_US'
|
||||
language = 'en'
|
||||
spelling_lang = "en_US"
|
||||
language = "en"
|
||||
locale_dirs = [
|
||||
'_locale',
|
||||
"_locale",
|
||||
]
|
||||
|
||||
master_doc = 'contents'
|
||||
templates_path = ['_templates']
|
||||
exclude_patterns = ['_build', '_incl/*', 'ref/cli/_includes/*.rst']
|
||||
master_doc = "contents"
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "_incl/*", "ref/cli/_includes/*.rst"]
|
||||
|
||||
extensions = [
|
||||
'saltdomain', # Must come early
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx.ext.autosummary',
|
||||
'sphinx.ext.extlinks',
|
||||
'sphinx.ext.intersphinx',
|
||||
'httpdomain',
|
||||
'youtube',
|
||||
'saltrepo'
|
||||
"saltdomain", # Must come early
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.autosummary",
|
||||
"sphinx.ext.extlinks",
|
||||
"sphinx.ext.intersphinx",
|
||||
"httpdomain",
|
||||
"youtube",
|
||||
"saltrepo"
|
||||
#'saltautodoc', # Must be AFTER autodoc
|
||||
#'shorturls',
|
||||
]
|
||||
|
@ -329,14 +337,14 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
extensions += ['sphinxcontrib.spelling']
|
||||
extensions += ["sphinxcontrib.spelling"]
|
||||
|
||||
modindex_common_prefix = ['salt.']
|
||||
modindex_common_prefix = ["salt."]
|
||||
|
||||
autosummary_generate = True
|
||||
|
||||
# strip git rev as there won't necessarily be a release based on it
|
||||
stripped_release = re.sub(r'-\d+-g[0-9a-f]+$', '', release)
|
||||
stripped_release = re.sub(r"-\d+-g[0-9a-f]+$", "", release)
|
||||
|
||||
# Define a substitution for linking to the latest release tarball
|
||||
rst_prolog = """\
|
||||
|
@ -374,87 +382,91 @@ rst_prolog = """\
|
|||
<p>x86_64: <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
|
||||
| <a href="https://repo.saltstack.com/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
|
||||
|
||||
""".format(release=stripped_release)
|
||||
""".format(
|
||||
release=stripped_release
|
||||
)
|
||||
|
||||
# A shortcut for linking to tickets on the GitHub issue tracker
|
||||
extlinks = {
|
||||
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
|
||||
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
|
||||
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
|
||||
'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
|
||||
"blob": (
|
||||
"https://github.com/saltstack/salt/blob/%s/%%s" % repo_primary_branch,
|
||||
None,
|
||||
),
|
||||
"issue": ("https://github.com/saltstack/salt/issues/%s", "issue #"),
|
||||
"pull": ("https://github.com/saltstack/salt/pull/%s", "PR #"),
|
||||
"formula_url": ("https://github.com/saltstack-formulas/%s", ""),
|
||||
}
|
||||
|
||||
|
||||
# ----- Localization -------------------------------------------------------->
|
||||
locale_dirs = ['locale/']
|
||||
locale_dirs = ["locale/"]
|
||||
gettext_compact = False
|
||||
# <---- Localization ---------------------------------------------------------
|
||||
|
||||
|
||||
### HTML options
|
||||
# set 'HTML_THEME=saltstack' to use previous theme
|
||||
html_theme = os.environ.get('HTML_THEME', 'saltstack2')
|
||||
html_theme_path = ['_themes']
|
||||
html_title = u''
|
||||
html_short_title = 'Salt'
|
||||
html_theme = os.environ.get("HTML_THEME", "saltstack2")
|
||||
html_theme_path = ["_themes"]
|
||||
html_title = u""
|
||||
html_short_title = "Salt"
|
||||
|
||||
html_static_path = ['_static']
|
||||
html_logo = None # specified in the theme layout.html
|
||||
html_favicon = 'favicon.ico'
|
||||
html_static_path = ["_static"]
|
||||
html_logo = None # specified in the theme layout.html
|
||||
html_favicon = "favicon.ico"
|
||||
smartquotes = False
|
||||
|
||||
# Use Google customized search or use Sphinx built-in JavaScript search
|
||||
if on_saltstack:
|
||||
html_search_template = 'googlesearch.html'
|
||||
html_search_template = "googlesearch.html"
|
||||
else:
|
||||
html_search_template = 'searchbox.html'
|
||||
html_search_template = "searchbox.html"
|
||||
|
||||
html_additional_pages = {
|
||||
'404': '404.html',
|
||||
"404": "404.html",
|
||||
}
|
||||
|
||||
html_default_sidebars = [
|
||||
html_search_template,
|
||||
'version.html',
|
||||
'localtoc.html',
|
||||
'relations.html',
|
||||
'sourcelink.html',
|
||||
'saltstack.html',
|
||||
"version.html",
|
||||
"localtoc.html",
|
||||
"relations.html",
|
||||
"sourcelink.html",
|
||||
"saltstack.html",
|
||||
]
|
||||
html_sidebars = {
|
||||
'ref/**/all/salt.*': [
|
||||
"ref/**/all/salt.*": [
|
||||
html_search_template,
|
||||
'version.html',
|
||||
'modules-sidebar.html',
|
||||
'localtoc.html',
|
||||
'relations.html',
|
||||
'sourcelink.html',
|
||||
'saltstack.html',
|
||||
],
|
||||
'ref/formula/all/*': [
|
||||
"version.html",
|
||||
"modules-sidebar.html",
|
||||
"localtoc.html",
|
||||
"relations.html",
|
||||
"sourcelink.html",
|
||||
"saltstack.html",
|
||||
],
|
||||
"ref/formula/all/*": [],
|
||||
}
|
||||
|
||||
html_context = {
|
||||
'on_saltstack': on_saltstack,
|
||||
'html_default_sidebars': html_default_sidebars,
|
||||
'github_base': 'https://github.com/saltstack/salt',
|
||||
'github_issues': 'https://github.com/saltstack/salt/issues',
|
||||
'github_downloads': 'https://github.com/saltstack/salt/downloads',
|
||||
'latest_release': latest_release,
|
||||
'previous_release': previous_release,
|
||||
'previous_release_dir': previous_release_dir,
|
||||
'next_release': next_release,
|
||||
'next_release_dir': next_release_dir,
|
||||
'search_cx': search_cx,
|
||||
'build_type': build_type,
|
||||
'today': today,
|
||||
'copyright': copyright,
|
||||
'repo_primary_branch': repo_primary_branch
|
||||
"on_saltstack": on_saltstack,
|
||||
"html_default_sidebars": html_default_sidebars,
|
||||
"github_base": "https://github.com/saltstack/salt",
|
||||
"github_issues": "https://github.com/saltstack/salt/issues",
|
||||
"github_downloads": "https://github.com/saltstack/salt/downloads",
|
||||
"latest_release": latest_release,
|
||||
"previous_release": previous_release,
|
||||
"previous_release_dir": previous_release_dir,
|
||||
"next_release": next_release,
|
||||
"next_release_dir": next_release_dir,
|
||||
"search_cx": search_cx,
|
||||
"build_type": build_type,
|
||||
"today": today,
|
||||
"copyright": copyright,
|
||||
"repo_primary_branch": repo_primary_branch,
|
||||
}
|
||||
|
||||
html_use_index = True
|
||||
html_last_updated_fmt = '%b %d, %Y'
|
||||
html_last_updated_fmt = "%b %d, %Y"
|
||||
html_show_sourcelink = False
|
||||
html_show_sphinx = True
|
||||
html_show_copyright = True
|
||||
|
@ -462,20 +474,20 @@ html_show_copyright = True
|
|||
### Latex options
|
||||
|
||||
latex_documents = [
|
||||
('contents', 'Salt.tex', 'Salt Documentation', 'SaltStack, Inc.', 'manual'),
|
||||
("contents", "Salt.tex", "Salt Documentation", "SaltStack, Inc.", "manual"),
|
||||
]
|
||||
|
||||
latex_logo = '_static/salt-logo.png'
|
||||
latex_logo = "_static/salt-logo.png"
|
||||
|
||||
latex_elements = {
|
||||
'inputenc': '', # use XeTeX instead of the inputenc LaTeX package.
|
||||
'utf8extra': '',
|
||||
'preamble': r'''
|
||||
"inputenc": "", # use XeTeX instead of the inputenc LaTeX package.
|
||||
"utf8extra": "",
|
||||
"preamble": r"""
|
||||
\usepackage{fontspec}
|
||||
\setsansfont{Linux Biolinum O}
|
||||
\setromanfont{Linux Libertine O}
|
||||
\setmonofont{Source Code Pro}
|
||||
''',
|
||||
""",
|
||||
}
|
||||
### Linux Biolinum, Linux Libertine: http://www.linuxlibertine.org/
|
||||
### Source Code Pro: https://github.com/adobe-fonts/source-code-pro/releases
|
||||
|
@ -483,34 +495,34 @@ latex_elements = {
|
|||
|
||||
### Linkcheck options
|
||||
linkcheck_ignore = [
|
||||
r'http://127.0.0.1',
|
||||
r'http://salt:\d+',
|
||||
r'http://local:\d+',
|
||||
r'https://console.aws.amazon.com',
|
||||
r'http://192.168.33.10',
|
||||
r'http://domain:\d+',
|
||||
r'http://123.456.789.012:\d+',
|
||||
r'http://localhost',
|
||||
r'https://groups.google.com/forum/#!forum/salt-users',
|
||||
r'http://logstash.net/docs/latest/inputs/udp',
|
||||
r'http://logstash.net/docs/latest/inputs/zeromq',
|
||||
r'http://www.youtube.com/saltstack',
|
||||
r'https://raven.readthedocs.io',
|
||||
r'https://getsentry.com',
|
||||
r'https://salt-cloud.readthedocs.io',
|
||||
r'https://salt.readthedocs.io',
|
||||
r'http://www.pip-installer.org/',
|
||||
r'http://www.windowsazure.com/',
|
||||
r'https://github.com/watching',
|
||||
r'dash-feed://',
|
||||
r'https://github.com/saltstack/salt/',
|
||||
r'http://bootstrap.saltstack.org',
|
||||
r'https://bootstrap.saltstack.com',
|
||||
r'https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh',
|
||||
r'media.readthedocs.org/dash/salt/latest/salt.xml',
|
||||
r'https://portal.aws.amazon.com/gp/aws/securityCredentials',
|
||||
r'https://help.github.com/articles/fork-a-repo',
|
||||
r'dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml',
|
||||
r"http://127.0.0.1",
|
||||
r"http://salt:\d+",
|
||||
r"http://local:\d+",
|
||||
r"https://console.aws.amazon.com",
|
||||
r"http://192.168.33.10",
|
||||
r"http://domain:\d+",
|
||||
r"http://123.456.789.012:\d+",
|
||||
r"http://localhost",
|
||||
r"https://groups.google.com/forum/#!forum/salt-users",
|
||||
r"http://logstash.net/docs/latest/inputs/udp",
|
||||
r"http://logstash.net/docs/latest/inputs/zeromq",
|
||||
r"http://www.youtube.com/saltstack",
|
||||
r"https://raven.readthedocs.io",
|
||||
r"https://getsentry.com",
|
||||
r"https://salt-cloud.readthedocs.io",
|
||||
r"https://salt.readthedocs.io",
|
||||
r"http://www.pip-installer.org/",
|
||||
r"http://www.windowsazure.com/",
|
||||
r"https://github.com/watching",
|
||||
r"dash-feed://",
|
||||
r"https://github.com/saltstack/salt/",
|
||||
r"http://bootstrap.saltstack.org",
|
||||
r"https://bootstrap.saltstack.com",
|
||||
r"https://raw.githubusercontent.com/saltstack/salt-bootstrap/stable/bootstrap-salt.sh",
|
||||
r"media.readthedocs.org/dash/salt/latest/salt.xml",
|
||||
r"https://portal.aws.amazon.com/gp/aws/securityCredentials",
|
||||
r"https://help.github.com/articles/fork-a-repo",
|
||||
r"dash-feed://https%3A//media.readthedocs.org/dash/salt/latest/salt.xml",
|
||||
]
|
||||
|
||||
linkcheck_anchors = False
|
||||
|
@ -519,53 +531,53 @@ linkcheck_anchors = False
|
|||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
authors = [
|
||||
'Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file',
|
||||
"Thomas S. Hatch <thatch45@gmail.com> and many others, please see the Authors file",
|
||||
]
|
||||
|
||||
man_pages = [
|
||||
('contents', 'salt', 'Salt Documentation', authors, 7),
|
||||
('ref/cli/salt', 'salt', 'salt', authors, 1),
|
||||
('ref/cli/salt-master', 'salt-master', 'salt-master Documentation', authors, 1),
|
||||
('ref/cli/salt-minion', 'salt-minion', 'salt-minion Documentation', authors, 1),
|
||||
('ref/cli/salt-key', 'salt-key', 'salt-key Documentation', authors, 1),
|
||||
('ref/cli/salt-cp', 'salt-cp', 'salt-cp Documentation', authors, 1),
|
||||
('ref/cli/salt-call', 'salt-call', 'salt-call Documentation', authors, 1),
|
||||
('ref/cli/salt-proxy', 'salt-proxy', 'salt-proxy Documentation', authors, 1),
|
||||
('ref/cli/salt-syndic', 'salt-syndic', 'salt-syndic Documentation', authors, 1),
|
||||
('ref/cli/salt-run', 'salt-run', 'salt-run Documentation', authors, 1),
|
||||
('ref/cli/salt-ssh', 'salt-ssh', 'salt-ssh Documentation', authors, 1),
|
||||
('ref/cli/salt-cloud', 'salt-cloud', 'Salt Cloud Command', authors, 1),
|
||||
('ref/cli/salt-api', 'salt-api', 'salt-api Command', authors, 1),
|
||||
('ref/cli/salt-unity', 'salt-unity', 'salt-unity Command', authors, 1),
|
||||
('ref/cli/spm', 'spm', 'Salt Package Manager Command', authors, 1),
|
||||
("contents", "salt", "Salt Documentation", authors, 7),
|
||||
("ref/cli/salt", "salt", "salt", authors, 1),
|
||||
("ref/cli/salt-master", "salt-master", "salt-master Documentation", authors, 1),
|
||||
("ref/cli/salt-minion", "salt-minion", "salt-minion Documentation", authors, 1),
|
||||
("ref/cli/salt-key", "salt-key", "salt-key Documentation", authors, 1),
|
||||
("ref/cli/salt-cp", "salt-cp", "salt-cp Documentation", authors, 1),
|
||||
("ref/cli/salt-call", "salt-call", "salt-call Documentation", authors, 1),
|
||||
("ref/cli/salt-proxy", "salt-proxy", "salt-proxy Documentation", authors, 1),
|
||||
("ref/cli/salt-syndic", "salt-syndic", "salt-syndic Documentation", authors, 1),
|
||||
("ref/cli/salt-run", "salt-run", "salt-run Documentation", authors, 1),
|
||||
("ref/cli/salt-ssh", "salt-ssh", "salt-ssh Documentation", authors, 1),
|
||||
("ref/cli/salt-cloud", "salt-cloud", "Salt Cloud Command", authors, 1),
|
||||
("ref/cli/salt-api", "salt-api", "salt-api Command", authors, 1),
|
||||
("ref/cli/salt-unity", "salt-unity", "salt-unity Command", authors, 1),
|
||||
("ref/cli/spm", "spm", "Salt Package Manager Command", authors, 1),
|
||||
]
|
||||
|
||||
|
||||
### epub options
|
||||
epub_title = 'Salt Documentation'
|
||||
epub_author = 'SaltStack, Inc.'
|
||||
epub_title = "Salt Documentation"
|
||||
epub_author = "SaltStack, Inc."
|
||||
epub_publisher = epub_author
|
||||
epub_copyright = copyright
|
||||
|
||||
epub_scheme = 'URL'
|
||||
epub_identifier = 'http://saltstack.com/'
|
||||
epub_scheme = "URL"
|
||||
epub_identifier = "http://saltstack.com/"
|
||||
|
||||
epub_tocdup = False
|
||||
#epub_tocdepth = 3
|
||||
# epub_tocdepth = 3
|
||||
|
||||
|
||||
def skip_mod_init_member(app, what, name, obj, skip, options):
|
||||
# pylint: disable=too-many-arguments,unused-argument
|
||||
if name.startswith('_'):
|
||||
if name.startswith("_"):
|
||||
return True
|
||||
if isinstance(obj, types.FunctionType) and obj.__name__ == 'mod_init':
|
||||
if isinstance(obj, types.FunctionType) and obj.__name__ == "mod_init":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _normalize_version(args):
|
||||
_, path = args
|
||||
return '.'.join([x.zfill(4) for x in (path.split('/')[-1].split('.'))])
|
||||
return ".".join([x.zfill(4) for x in (path.split("/")[-1].split("."))])
|
||||
|
||||
|
||||
class ReleasesTree(TocTree):
|
||||
|
@ -573,12 +585,12 @@ class ReleasesTree(TocTree):
|
|||
|
||||
def run(self):
|
||||
rst = super(ReleasesTree, self).run()
|
||||
entries = rst[0][0]['entries'][:]
|
||||
entries = rst[0][0]["entries"][:]
|
||||
entries.sort(key=_normalize_version, reverse=True)
|
||||
rst[0][0]['entries'][:] = entries
|
||||
rst[0][0]["entries"][:] = entries
|
||||
return rst
|
||||
|
||||
|
||||
def setup(app):
|
||||
app.add_directive('releasestree', ReleasesTree)
|
||||
app.connect('autodoc-skip-member', skip_mod_init_member)
|
||||
app.add_directive("releasestree", ReleasesTree)
|
||||
app.connect("autodoc-skip-member", skip_mod_init_member)
|
||||
|
|
956
noxfile.py
956
noxfile.py
File diff suppressed because it is too large
Load diff
|
@ -2,11 +2,11 @@
|
|||
|
||||
from bbfreeze import Freezer
|
||||
|
||||
includes = ['zmq', 'zmq.utils.strtypes', 'zmq.utils.jsonapi']
|
||||
excludes = ['Tkinter', 'tcl', 'Tkconstants']
|
||||
includes = ["zmq", "zmq.utils.strtypes", "zmq.utils.jsonapi"]
|
||||
excludes = ["Tkinter", "tcl", "Tkconstants"]
|
||||
|
||||
fre = Freezer(distdir="bb_salt", includes=includes, excludes=excludes)
|
||||
fre.addScript('/usr/bin/salt-minion')
|
||||
fre.addScript("/usr/bin/salt-minion")
|
||||
fre.use_compression = 0
|
||||
fre.include_py = True
|
||||
fre()
|
||||
|
|
|
@ -1,50 +1,55 @@
|
|||
#! /bin/env python
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
import argparse
|
||||
from os.path import dirname, join, abspath
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
from os.path import abspath, dirname, join
|
||||
from shutil import copy
|
||||
from subprocess import check_call
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Build salt rpms',
|
||||
)
|
||||
parser.add_argument('buildid',
|
||||
help='The build id to use i.e. the bit after the salt version in the package name',
|
||||
)
|
||||
parser = argparse.ArgumentParser(description="Build salt rpms",)
|
||||
parser.add_argument(
|
||||
"buildid",
|
||||
help="The build id to use i.e. the bit after the salt version in the package name",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
src = abspath(join(dirname(__file__), '../..'))
|
||||
src = abspath(join(dirname(__file__), "../.."))
|
||||
|
||||
sys.path.append(src)
|
||||
|
||||
import salt.version
|
||||
import salt.version # isort:skip
|
||||
|
||||
salt_version = salt.version.__saltstack_version__.string
|
||||
|
||||
rpmbuild = join(os.environ['HOME'], 'rpmbuild')
|
||||
copy(join(src, 'pkg/rpm/salt.spec'), join(rpmbuild, 'SPECS'))
|
||||
for f in os.listdir(join(src, 'pkg/rpm')):
|
||||
if f in ['salt.spec', 'build.py']:
|
||||
rpmbuild = join(os.environ["HOME"], "rpmbuild")
|
||||
copy(join(src, "pkg/rpm/salt.spec"), join(rpmbuild, "SPECS"))
|
||||
for f in os.listdir(join(src, "pkg/rpm")):
|
||||
if f in ["salt.spec", "build.py"]:
|
||||
continue
|
||||
copy(join(src, 'pkg/rpm', f), join(rpmbuild, 'SOURCES'))
|
||||
copy(join(src, "pkg/rpm", f), join(rpmbuild, "SOURCES"))
|
||||
|
||||
|
||||
def srcfilter(ti):
|
||||
if '/.git' in ti.name:
|
||||
if "/.git" in ti.name:
|
||||
return None
|
||||
return ti
|
||||
|
||||
with tarfile.open(join(rpmbuild, 'SOURCES/salt-%s.tar.gz' % salt_version), 'w|gz') as tf:
|
||||
tf.add(src, arcname='salt-%s' % salt_version,
|
||||
filter=srcfilter)
|
||||
|
||||
with tarfile.open(
|
||||
join(rpmbuild, "SOURCES/salt-%s.tar.gz" % salt_version), "w|gz"
|
||||
) as tf:
|
||||
tf.add(src, arcname="salt-%s" % salt_version, filter=srcfilter)
|
||||
|
||||
|
||||
cmd = ['rpmbuild', '-bb',
|
||||
'--define=salt_version %s' % salt_version,
|
||||
'--define=buildid %s' % args.buildid,
|
||||
'salt.spec']
|
||||
print('Executing: %s' % ' '.join('"%s"' % c for c in cmd))
|
||||
check_call(cmd, cwd=join(rpmbuild, 'SPECS'))
|
||||
cmd = [
|
||||
"rpmbuild",
|
||||
"-bb",
|
||||
"--define=salt_version %s" % salt_version,
|
||||
"--define=buildid %s" % args.buildid,
|
||||
"salt.spec",
|
||||
]
|
||||
print("Executing: %s" % " ".join('"%s"' % c for c in cmd))
|
||||
check_call(cmd, cwd=join(rpmbuild, "SPECS"))
|
||||
|
|
|
@ -3,26 +3,26 @@ import sys
|
|||
|
||||
# http://stackoverflow.com/a/404750
|
||||
# determine if application is a script file or frozen exe
|
||||
if getattr(sys, 'frozen', False):
|
||||
if getattr(sys, "frozen", False):
|
||||
application_path = os.path.dirname(sys.executable)
|
||||
elif __file__:
|
||||
application_path = os.path.dirname(__file__)
|
||||
|
||||
ROOT_DIR=application_path.split("bin/appdata")[0]
|
||||
ROOT_DIR = application_path.split("bin/appdata")[0]
|
||||
|
||||
# Copied from syspaths.py
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, 'usr', 'share', 'salt')
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, 'etc')
|
||||
CACHE_DIR = os.path.join(ROOT_DIR, 'var', 'cache', 'salt')
|
||||
SOCK_DIR = os.path.join(ROOT_DIR, 'var', 'run', 'salt')
|
||||
SRV_ROOT_DIR = os.path.join(ROOT_DIR, 'srv')
|
||||
BASE_FILE_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'salt')
|
||||
BASE_PILLAR_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'pillar')
|
||||
BASE_THORIUM_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'thorium')
|
||||
BASE_MASTER_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, 'salt-master')
|
||||
LOGS_DIR = os.path.join(ROOT_DIR, 'var', 'log', 'salt')
|
||||
PIDFILE_DIR = os.path.join(ROOT_DIR, 'var', 'run')
|
||||
SPM_PARENT_PATH = os.path.join(ROOT_DIR, 'spm')
|
||||
SPM_FORMULA_PATH = os.path.join(SPM_PARENT_PATH, 'salt')
|
||||
SPM_PILLAR_PATH = os.path.join(SPM_PARENT_PATH, 'pillar')
|
||||
SPM_REACTOR_PATH = os.path.join(SPM_PARENT_PATH, 'reactor')
|
||||
SHARE_DIR = os.path.join(ROOT_DIR, "usr", "share", "salt")
|
||||
CONFIG_DIR = os.path.join(ROOT_DIR, "etc")
|
||||
CACHE_DIR = os.path.join(ROOT_DIR, "var", "cache", "salt")
|
||||
SOCK_DIR = os.path.join(ROOT_DIR, "var", "run", "salt")
|
||||
SRV_ROOT_DIR = os.path.join(ROOT_DIR, "srv")
|
||||
BASE_FILE_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, "salt")
|
||||
BASE_PILLAR_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, "pillar")
|
||||
BASE_THORIUM_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, "thorium")
|
||||
BASE_MASTER_ROOTS_DIR = os.path.join(SRV_ROOT_DIR, "salt-master")
|
||||
LOGS_DIR = os.path.join(ROOT_DIR, "var", "log", "salt")
|
||||
PIDFILE_DIR = os.path.join(ROOT_DIR, "var", "run")
|
||||
SPM_PARENT_PATH = os.path.join(ROOT_DIR, "spm")
|
||||
SPM_FORMULA_PATH = os.path.join(SPM_PARENT_PATH, "salt")
|
||||
SPM_PILLAR_PATH = os.path.join(SPM_PARENT_PATH, "pillar")
|
||||
SPM_REACTOR_PATH = os.path.join(SPM_PARENT_PATH, "reactor")
|
||||
|
|
|
@ -1,23 +1,24 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The setup script for sodium_grabber
|
||||
'''
|
||||
"""
|
||||
|
||||
# pylint: disable=C0111,E1101,E1103,F0401,W0611
|
||||
|
||||
from distutils.core import setup, Extension
|
||||
from distutils.core import Extension, setup
|
||||
from os import path
|
||||
|
||||
HERE = path.dirname(__file__)
|
||||
|
||||
SETUP_KWARGS = {}
|
||||
sodium_grabber = Extension('sodium_grabber',
|
||||
sources=[path.join(HERE, 'sodium_grabber.c')],
|
||||
libraries=['sodium'],
|
||||
sodium_grabber = Extension(
|
||||
"sodium_grabber",
|
||||
sources=[path.join(HERE, "sodium_grabber.c")],
|
||||
libraries=["sodium"],
|
||||
)
|
||||
SETUP_KWARGS['ext_modules'] = [sodium_grabber]
|
||||
SETUP_KWARGS['name'] = "sodium_grabber"
|
||||
SETUP_KWARGS["ext_modules"] = [sodium_grabber]
|
||||
SETUP_KWARGS["name"] = "sodium_grabber"
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
setup(**SETUP_KWARGS)
|
||||
|
|
|
@ -1,46 +1,44 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def display_help():
|
||||
print('####################################################################')
|
||||
print('# #')
|
||||
print('# File: portable.py #')
|
||||
print('# Description: #')
|
||||
print('# - search and replace within a binary file #')
|
||||
print('# #')
|
||||
print('# Parameters: #')
|
||||
print('# -f, --file : target file #')
|
||||
print('# -s, --search : term to search for #')
|
||||
print('# Default is the base path for the python #')
|
||||
print('# executable that is running this script. #')
|
||||
print('# In Py2 that would be C:\\Python27 #')
|
||||
print('# -r, --replace : replace with this #')
|
||||
print("####################################################################")
|
||||
print("# #")
|
||||
print("# File: portable.py #")
|
||||
print("# Description: #")
|
||||
print("# - search and replace within a binary file #")
|
||||
print("# #")
|
||||
print("# Parameters: #")
|
||||
print("# -f, --file : target file #")
|
||||
print("# -s, --search : term to search for #")
|
||||
print("# Default is the base path for the python #")
|
||||
print("# executable that is running this script. #")
|
||||
print("# In Py2 that would be C:\\Python27 #")
|
||||
print("# -r, --replace : replace with this #")
|
||||
print('# default is ".." #')
|
||||
print('# #')
|
||||
print('# example: #')
|
||||
print('# portable.py -f <target_file> -s <search_term> -r <replace_term> #')
|
||||
print('# #')
|
||||
print('####################################################################')
|
||||
print("# #")
|
||||
print("# example: #")
|
||||
print("# portable.py -f <target_file> -s <search_term> -r <replace_term> #")
|
||||
print("# #")
|
||||
print("####################################################################")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def main(argv):
|
||||
target = ''
|
||||
target = ""
|
||||
search = os.path.dirname(sys.executable)
|
||||
replace = '..'
|
||||
replace = ".."
|
||||
try:
|
||||
opts, args = getopt.getopt(argv,
|
||||
"hf:s:r:",
|
||||
["file=", "search=", "replace="])
|
||||
opts, args = getopt.getopt(argv, "hf:s:r:", ["file=", "search=", "replace="])
|
||||
except getopt.GetoptError:
|
||||
display_help()
|
||||
for opt, arg in opts:
|
||||
if opt == '-h':
|
||||
if opt == "-h":
|
||||
display_help()
|
||||
elif opt in ("-f", "--file"):
|
||||
target = arg
|
||||
|
@ -48,16 +46,16 @@ def main(argv):
|
|||
search = arg
|
||||
elif opt in ("-r", "--replace"):
|
||||
replace = arg
|
||||
if target == '':
|
||||
if target == "":
|
||||
display_help()
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
search = search.encode('utf-8')
|
||||
replace = replace.encode('utf-8')
|
||||
f = open(target, 'rb').read()
|
||||
search = search.encode("utf-8")
|
||||
replace = replace.encode("utf-8")
|
||||
f = open(target, "rb").read()
|
||||
f = f.replace(search, replace)
|
||||
f = f.replace(search.lower(), replace)
|
||||
open(target, 'wb').write(f)
|
||||
open(target, "wb").write(f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -3,4 +3,5 @@ exclude= '''
|
|||
\(
|
||||
salt/ext
|
||||
| tests/kitchen
|
||||
| templates
|
||||
)/
|
||||
|
|
|
@ -1,24 +1,24 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Salt package
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import warnings
|
||||
import sys
|
||||
|
||||
import importlib
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
class TornadoImporter(object):
|
||||
|
||||
def find_module(self, module_name, package_path=None):
|
||||
if module_name.startswith('tornado'):
|
||||
if module_name.startswith("tornado"):
|
||||
return self
|
||||
return None
|
||||
|
||||
def load_module(self, name):
|
||||
mod = importlib.import_module('salt.ext.{}'.format(name))
|
||||
mod = importlib.import_module("salt.ext.{}".format(name))
|
||||
sys.modules[name] = mod
|
||||
return mod
|
||||
|
||||
|
@ -29,35 +29,36 @@ sys.meta_path = [TornadoImporter()] + sys.meta_path
|
|||
|
||||
# All salt related deprecation warnings should be shown once each!
|
||||
warnings.filterwarnings(
|
||||
'once', # Show once
|
||||
'', # No deprecation message match
|
||||
"once", # Show once
|
||||
"", # No deprecation message match
|
||||
DeprecationWarning, # This filter is for DeprecationWarnings
|
||||
r'^(salt|salt\.(.*))$' # Match module(s) 'salt' and 'salt.<whatever>'
|
||||
r"^(salt|salt\.(.*))$", # Match module(s) 'salt' and 'salt.<whatever>'
|
||||
)
|
||||
|
||||
# While we are supporting Python2.6, hide nested with-statements warnings
|
||||
warnings.filterwarnings(
|
||||
'ignore',
|
||||
'With-statements now directly support multiple context managers',
|
||||
DeprecationWarning
|
||||
"ignore",
|
||||
"With-statements now directly support multiple context managers",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
# Filter the backports package UserWarning about being re-imported
|
||||
warnings.filterwarnings(
|
||||
'ignore',
|
||||
'^Module backports was already imported from (.*), but (.*) is being added to sys.path$',
|
||||
UserWarning
|
||||
"ignore",
|
||||
"^Module backports was already imported from (.*), but (.*) is being added to sys.path$",
|
||||
UserWarning,
|
||||
)
|
||||
|
||||
|
||||
def __define_global_system_encoding_variable__():
|
||||
import sys
|
||||
|
||||
# This is the most trustworthy source of the system encoding, though, if
|
||||
# salt is being imported after being daemonized, this information is lost
|
||||
# and reset to None
|
||||
encoding = None
|
||||
|
||||
if not sys.platform.startswith('win') and sys.stdin is not None:
|
||||
if not sys.platform.startswith("win") and sys.stdin is not None:
|
||||
# On linux we can rely on sys.stdin for the encoding since it
|
||||
# most commonly matches the filesystem encoding. This however
|
||||
# does not apply to windows
|
||||
|
@ -68,6 +69,7 @@ def __define_global_system_encoding_variable__():
|
|||
# encoding. MS Windows has problems with this and reports the wrong
|
||||
# encoding
|
||||
import locale
|
||||
|
||||
try:
|
||||
encoding = locale.getdefaultlocale()[-1]
|
||||
except ValueError:
|
||||
|
@ -83,16 +85,16 @@ def __define_global_system_encoding_variable__():
|
|||
# the way back to ascii
|
||||
encoding = sys.getdefaultencoding()
|
||||
if not encoding:
|
||||
if sys.platform.startswith('darwin'):
|
||||
if sys.platform.startswith("darwin"):
|
||||
# Mac OS X uses UTF-8
|
||||
encoding = 'utf-8'
|
||||
elif sys.platform.startswith('win'):
|
||||
encoding = "utf-8"
|
||||
elif sys.platform.startswith("win"):
|
||||
# Windows uses a configurable encoding; on Windows, Python uses the name “mbcs”
|
||||
# to refer to whatever the currently configured encoding is.
|
||||
encoding = 'mbcs'
|
||||
encoding = "mbcs"
|
||||
else:
|
||||
# On linux default to ascii as a last resort
|
||||
encoding = 'ascii'
|
||||
encoding = "ascii"
|
||||
|
||||
# We can't use six.moves.builtins because these builtins get deleted sooner
|
||||
# than expected. See:
|
||||
|
@ -103,7 +105,7 @@ def __define_global_system_encoding_variable__():
|
|||
import builtins # pylint: disable=import-error
|
||||
|
||||
# Define the detected encoding as a built-in variable for ease of use
|
||||
setattr(builtins, '__salt_system_encoding__', encoding)
|
||||
setattr(builtins, "__salt_system_encoding__", encoding)
|
||||
|
||||
# This is now garbage collectable
|
||||
del sys
|
||||
|
|
103
salt/_compat.py
103
salt/_compat.py
|
@ -1,19 +1,20 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Salt compatibility code
|
||||
'''
|
||||
"""
|
||||
# pylint: disable=import-error,unused-import,invalid-name,W0231,W0233
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import sys
|
||||
import logging
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import binascii
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.exceptions import SaltException
|
||||
from salt.ext.six import binary_type, string_types, text_type, integer_types
|
||||
from salt.ext.six.moves import cStringIO, StringIO
|
||||
from salt.ext.six import binary_type, integer_types, string_types, text_type
|
||||
from salt.ext.six.moves import StringIO, cStringIO
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -42,59 +43,61 @@ PY3 = sys.version_info.major == 3
|
|||
|
||||
if PY3:
|
||||
import builtins
|
||||
|
||||
exceptions = builtins
|
||||
else:
|
||||
import exceptions
|
||||
|
||||
|
||||
if ElementTree is not None:
|
||||
if not hasattr(ElementTree, 'ParseError'):
|
||||
if not hasattr(ElementTree, "ParseError"):
|
||||
|
||||
class ParseError(Exception):
|
||||
'''
|
||||
"""
|
||||
older versions of ElementTree do not have ParseError
|
||||
'''
|
||||
"""
|
||||
|
||||
ElementTree.ParseError = ParseError
|
||||
|
||||
|
||||
def text_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
def text_(s, encoding="latin-1", errors="strict"):
|
||||
"""
|
||||
If ``s`` is an instance of ``binary_type``, return
|
||||
``s.decode(encoding, errors)``, otherwise return ``s``
|
||||
'''
|
||||
"""
|
||||
return s.decode(encoding, errors) if isinstance(s, binary_type) else s
|
||||
|
||||
|
||||
def bytes_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
def bytes_(s, encoding="latin-1", errors="strict"):
|
||||
"""
|
||||
If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(encoding, errors)``, otherwise return ``s``
|
||||
'''
|
||||
"""
|
||||
return s.encode(encoding, errors) if isinstance(s, text_type) else s
|
||||
|
||||
|
||||
def ascii_native_(s):
|
||||
'''
|
||||
"""
|
||||
Python 3: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
|
||||
|
||||
Python 2: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode('ascii')``, otherwise return ``str(s)``
|
||||
'''
|
||||
"""
|
||||
if isinstance(s, text_type):
|
||||
s = s.encode('ascii')
|
||||
s = s.encode("ascii")
|
||||
|
||||
return str(s, 'ascii', 'strict') if PY3 else s
|
||||
return str(s, "ascii", "strict") if PY3 else s
|
||||
|
||||
|
||||
def native_(s, encoding='latin-1', errors='strict'):
|
||||
'''
|
||||
def native_(s, encoding="latin-1", errors="strict"):
|
||||
"""
|
||||
Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise
|
||||
return ``str(s, encoding, errors)``
|
||||
|
||||
Python 2: If ``s`` is an instance of ``text_type``, return
|
||||
``s.encode(encoding, errors)``, otherwise return ``str(s)``
|
||||
'''
|
||||
"""
|
||||
if PY3:
|
||||
out = s if isinstance(s, text_type) else str(s, encoding, errors)
|
||||
else:
|
||||
|
@ -104,9 +107,9 @@ def native_(s, encoding='latin-1', errors='strict'):
|
|||
|
||||
|
||||
def string_io(data=None): # cStringIO can't handle unicode
|
||||
'''
|
||||
"""
|
||||
Pass data through to stringIO module and return result
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
return cStringIO(bytes(data))
|
||||
except (UnicodeEncodeError, TypeError):
|
||||
|
@ -123,12 +126,13 @@ except ImportError:
|
|||
|
||||
|
||||
class IPv6AddressScoped(ipaddress.IPv6Address):
|
||||
'''
|
||||
"""
|
||||
Represent and manipulate single IPv6 Addresses.
|
||||
Scope-aware version
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, address):
|
||||
'''
|
||||
"""
|
||||
Instantiate a new IPv6 address object. Scope is moved to an attribute 'scope'.
|
||||
|
||||
Args:
|
||||
|
@ -143,15 +147,15 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
|
|||
AddressValueError: If address isn't a valid IPv6 address.
|
||||
|
||||
:param address:
|
||||
'''
|
||||
"""
|
||||
# pylint: disable-all
|
||||
if not hasattr(self, '_is_packed_binary'):
|
||||
if not hasattr(self, "_is_packed_binary"):
|
||||
# This method (below) won't be around for some Python 3 versions
|
||||
# and we need check this differently anyway
|
||||
self._is_packed_binary = lambda p: isinstance(p, bytes)
|
||||
# pylint: enable-all
|
||||
if isinstance(address, string_types) and '%' in address:
|
||||
buff = address.split('%')
|
||||
if isinstance(address, string_types) and "%" in address:
|
||||
buff = address.split("%")
|
||||
if len(buff) != 2:
|
||||
raise SaltException('Invalid IPv6 address: "{}"'.format(address))
|
||||
address, self.__scope = buff
|
||||
|
@ -176,19 +180,21 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
|
|||
self._ip = int(binascii.hexlify(address), 16)
|
||||
else:
|
||||
address = str(address)
|
||||
if '/' in address:
|
||||
raise ipaddress.AddressValueError("Unexpected '/' in {}".format(address))
|
||||
if "/" in address:
|
||||
raise ipaddress.AddressValueError(
|
||||
"Unexpected '/' in {}".format(address)
|
||||
)
|
||||
self._ip = self._ip_int_from_string(address)
|
||||
|
||||
def _is_packed_binary(self, data):
|
||||
'''
|
||||
"""
|
||||
Check if data is hexadecimal packed
|
||||
|
||||
:param data:
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
packed = False
|
||||
if isinstance(data, bytes) and len(data) == 16 and b':' not in data:
|
||||
if isinstance(data, bytes) and len(data) == 16 and b":" not in data:
|
||||
try:
|
||||
packed = bool(int(binascii.hexlify(data), 16))
|
||||
except ValueError:
|
||||
|
@ -198,25 +204,32 @@ class IPv6AddressScoped(ipaddress.IPv6Address):
|
|||
|
||||
@property
|
||||
def scope(self):
|
||||
'''
|
||||
"""
|
||||
Return scope of IPv6 address.
|
||||
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
return self.__scope
|
||||
|
||||
def __str__(self):
|
||||
return text_type(self._string_from_ip_int(self._ip) +
|
||||
('%' + self.scope if self.scope is not None else ''))
|
||||
return text_type(
|
||||
self._string_from_ip_int(self._ip)
|
||||
+ ("%" + self.scope if self.scope is not None else "")
|
||||
)
|
||||
|
||||
|
||||
class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped):
|
||||
'''
|
||||
"""
|
||||
Update
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, address):
|
||||
if PY3 and isinstance(address, (bytes, int)) or \
|
||||
not PY3 and isinstance(address, int):
|
||||
if (
|
||||
PY3
|
||||
and isinstance(address, (bytes, int))
|
||||
or not PY3
|
||||
and isinstance(address, int)
|
||||
):
|
||||
IPv6AddressScoped.__init__(self, address)
|
||||
self.network = ipaddress.IPv6Network(self._ip)
|
||||
self._prefixlen = self._max_prefixlen
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt._logging
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
|
@ -9,10 +9,9 @@
|
|||
|
||||
The ``salt._logging`` package should be imported as soon as possible since salt tweaks
|
||||
the python's logging system.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import salt libs
|
||||
from salt._logging.impl import * # pylint: disable=wildcard-import
|
||||
|
|
|
@ -1,29 +1,31 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt._logging.handlers
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Salt's logging handlers
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import logging.handlers
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
# Import salt libs
|
||||
from salt._logging.mixins import NewStyleClassMixin, ExcInfoOnLogLevelFormatMixin
|
||||
from salt._logging.mixins import ExcInfoOnLogLevelFormatMixin, NewStyleClassMixin
|
||||
from salt.ext.six.moves import queue # pylint: disable=import-error,no-name-in-module
|
||||
#from salt.utils.versions import warn_until_date
|
||||
|
||||
# from salt.utils.versions import warn_until_date
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TemporaryLoggingHandler(logging.NullHandler):
|
||||
'''
|
||||
"""
|
||||
This logging handler will store all the log records up to its maximum
|
||||
queue size at which stage the first messages stored will be dropped.
|
||||
|
||||
|
@ -36,15 +38,15 @@ class TemporaryLoggingHandler(logging.NullHandler):
|
|||
records will be dispatched to the provided handlers.
|
||||
|
||||
.. versionadded:: 0.17.0
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, level=logging.NOTSET, max_queue_size=10000):
|
||||
#warn_until_date(
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.TemporaryLoggingHandler\'. '
|
||||
# '\'{name}.TemporaryLoggingHandler\' will go away after '
|
||||
# '{{date}}.'.format(name=__name__)
|
||||
#)
|
||||
# )
|
||||
self.__max_queue_size = max_queue_size
|
||||
super(TemporaryLoggingHandler, self).__init__(level=level)
|
||||
self.__messages = deque(maxlen=max_queue_size)
|
||||
|
@ -55,9 +57,9 @@ class TemporaryLoggingHandler(logging.NullHandler):
|
|||
self.release()
|
||||
|
||||
def sync_with_handlers(self, handlers=()):
|
||||
'''
|
||||
"""
|
||||
Sync the stored log records to the provided log handlers.
|
||||
'''
|
||||
"""
|
||||
if not handlers:
|
||||
return
|
||||
|
||||
|
@ -71,41 +73,42 @@ class TemporaryLoggingHandler(logging.NullHandler):
|
|||
handler.handle(record)
|
||||
|
||||
|
||||
class StreamHandler(ExcInfoOnLogLevelFormatMixin,
|
||||
logging.StreamHandler,
|
||||
NewStyleClassMixin):
|
||||
'''
|
||||
class StreamHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.StreamHandler, NewStyleClassMixin
|
||||
):
|
||||
"""
|
||||
Stream handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class FileHandler(ExcInfoOnLogLevelFormatMixin,
|
||||
logging.FileHandler,
|
||||
NewStyleClassMixin):
|
||||
'''
|
||||
class FileHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.FileHandler, NewStyleClassMixin
|
||||
):
|
||||
"""
|
||||
File handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class SysLogHandler(ExcInfoOnLogLevelFormatMixin,
|
||||
logging.handlers.SysLogHandler,
|
||||
NewStyleClassMixin):
|
||||
'''
|
||||
class SysLogHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.handlers.SysLogHandler, NewStyleClassMixin
|
||||
):
|
||||
"""
|
||||
Syslog handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
def handleError(self, record):
|
||||
'''
|
||||
"""
|
||||
Override the default error handling mechanism for py3
|
||||
Deal with syslog os errors when the log file does not exist
|
||||
'''
|
||||
"""
|
||||
handled = False
|
||||
if sys.stderr and sys.version_info >= (3, 5, 4):
|
||||
exc_type, exc, exc_traceback = sys.exc_info()
|
||||
try:
|
||||
if exc_type.__name__ in 'FileNotFoundError':
|
||||
if exc_type.__name__ in "FileNotFoundError":
|
||||
sys.stderr.write(
|
||||
'[WARNING ] The log_file does not exist. Logging not '
|
||||
'setup correctly or syslog service not started.\n'
|
||||
"[WARNING ] The log_file does not exist. Logging not "
|
||||
"setup correctly or syslog service not started.\n"
|
||||
)
|
||||
handled = True
|
||||
finally:
|
||||
|
@ -117,34 +120,40 @@ class SysLogHandler(ExcInfoOnLogLevelFormatMixin,
|
|||
super(SysLogHandler, self).handleError(record)
|
||||
|
||||
|
||||
class RotatingFileHandler(ExcInfoOnLogLevelFormatMixin,
|
||||
logging.handlers.RotatingFileHandler,
|
||||
NewStyleClassMixin):
|
||||
'''
|
||||
class RotatingFileHandler(
|
||||
ExcInfoOnLogLevelFormatMixin,
|
||||
logging.handlers.RotatingFileHandler,
|
||||
NewStyleClassMixin,
|
||||
):
|
||||
"""
|
||||
Rotating file handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
def handleError(self, record):
|
||||
'''
|
||||
"""
|
||||
Override the default error handling mechanism
|
||||
|
||||
Deal with log file rotation errors due to log file in use
|
||||
more softly.
|
||||
'''
|
||||
"""
|
||||
handled = False
|
||||
|
||||
# Can't use "salt.utils.platform.is_windows()" in this file
|
||||
if (sys.platform.startswith('win') and
|
||||
logging.raiseExceptions and
|
||||
sys.stderr): # see Python issue 13807
|
||||
if (
|
||||
sys.platform.startswith("win") and logging.raiseExceptions and sys.stderr
|
||||
): # see Python issue 13807
|
||||
exc_type, exc, exc_traceback = sys.exc_info()
|
||||
try:
|
||||
# PermissionError is used since Python 3.3.
|
||||
# OSError is used for previous versions of Python.
|
||||
if exc_type.__name__ in ('PermissionError', 'OSError') and exc.winerror == 32:
|
||||
if (
|
||||
exc_type.__name__ in ("PermissionError", "OSError")
|
||||
and exc.winerror == 32
|
||||
):
|
||||
if self.level <= logging.WARNING:
|
||||
sys.stderr.write(
|
||||
'[WARNING ] Unable to rotate the log file "{0}" '
|
||||
'because it is in use\n'.format(self.baseFilename)
|
||||
"because it is in use\n".format(self.baseFilename)
|
||||
)
|
||||
handled = True
|
||||
finally:
|
||||
|
@ -156,17 +165,22 @@ class RotatingFileHandler(ExcInfoOnLogLevelFormatMixin,
|
|||
super(RotatingFileHandler, self).handleError(record)
|
||||
|
||||
|
||||
class WatchedFileHandler(ExcInfoOnLogLevelFormatMixin,
|
||||
logging.handlers.WatchedFileHandler,
|
||||
NewStyleClassMixin):
|
||||
'''
|
||||
class WatchedFileHandler(
|
||||
ExcInfoOnLogLevelFormatMixin,
|
||||
logging.handlers.WatchedFileHandler,
|
||||
NewStyleClassMixin,
|
||||
):
|
||||
"""
|
||||
Watched file handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
if sys.version_info < (3, 2):
|
||||
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.Handler, NewStyleClassMixin):
|
||||
'''
|
||||
|
||||
class QueueHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.Handler, NewStyleClassMixin
|
||||
):
|
||||
"""
|
||||
This handler sends events to a queue. Typically, it would be used together
|
||||
with a multiprocessing Queue to centralise logging to file in one process
|
||||
(in a multi-process application), so as to avoid file write contention
|
||||
|
@ -174,38 +188,40 @@ if sys.version_info < (3, 2):
|
|||
|
||||
This code is new in Python 3.2, but this class can be copy pasted into
|
||||
user code for use with earlier Python versions.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, queue):
|
||||
'''
|
||||
"""
|
||||
Initialise an instance, using the passed queue.
|
||||
'''
|
||||
#warn_until_date(
|
||||
"""
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||
# 'use \'logging.handlers.QueueHandler\'. '
|
||||
# '\'{name}.QueueHandler\' will go away after '
|
||||
# '{{date}}.'.format(name=__name__)
|
||||
#)
|
||||
# )
|
||||
logging.Handler.__init__(self)
|
||||
self.queue = queue
|
||||
|
||||
def enqueue(self, record):
|
||||
'''
|
||||
"""
|
||||
Enqueue a record.
|
||||
|
||||
The base implementation uses put_nowait. You may want to override
|
||||
this method if you want to use blocking, timeouts or custom queue
|
||||
implementations.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self.queue.put_nowait(record)
|
||||
except queue.Full:
|
||||
sys.stderr.write('[WARNING ] Message queue is full, '
|
||||
'unable to write "{0}" to log'.format(record))
|
||||
sys.stderr.write(
|
||||
"[WARNING ] Message queue is full, "
|
||||
'unable to write "{0}" to log'.format(record)
|
||||
)
|
||||
|
||||
def prepare(self, record):
|
||||
'''
|
||||
"""
|
||||
Prepares a record for queuing. The object returned by this method is
|
||||
enqueued.
|
||||
The base implementation formats the record to merge the message
|
||||
|
@ -214,7 +230,7 @@ if sys.version_info < (3, 2):
|
|||
You might want to override this method if you want to convert
|
||||
the record to a dict or JSON string, or send a modified copy
|
||||
of the record while leaving the original intact.
|
||||
'''
|
||||
"""
|
||||
# The format operation gets traceback text into record.exc_text
|
||||
# (if there's exception data), and also returns the formatted
|
||||
# message. We can then use this to replace the original
|
||||
|
@ -232,46 +248,51 @@ if sys.version_info < (3, 2):
|
|||
return record
|
||||
|
||||
def emit(self, record):
|
||||
'''
|
||||
"""
|
||||
Emit a record.
|
||||
|
||||
Writes the LogRecord to the queue, preparing it for pickling first.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self.enqueue(self.prepare(record))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
self.handleError(record)
|
||||
|
||||
|
||||
elif sys.version_info < (3, 7):
|
||||
# On python versions lower than 3.7, we sill subclass and overwrite prepare to include the fix for:
|
||||
# https://bugs.python.org/issue35726
|
||||
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member,inconsistent-mro
|
||||
|
||||
class QueueHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||
): # pylint: disable=no-member,inconsistent-mro
|
||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||
super(QueueHandler, self).__init__(queue)
|
||||
#warn_until_date(
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||
# 'use \'logging.handlers.QueueHandler\'. '
|
||||
# '\'{name}.QueueHandler\' will go away after '
|
||||
# '{{date}}.'.format(name=__name__)
|
||||
#)
|
||||
# )
|
||||
|
||||
def enqueue(self, record):
|
||||
'''
|
||||
"""
|
||||
Enqueue a record.
|
||||
|
||||
The base implementation uses put_nowait. You may want to override
|
||||
this method if you want to use blocking, timeouts or custom queue
|
||||
implementations.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self.queue.put_nowait(record)
|
||||
except queue.Full:
|
||||
sys.stderr.write('[WARNING ] Message queue is full, '
|
||||
'unable to write "{}" to log.\n'.format(record))
|
||||
sys.stderr.write(
|
||||
"[WARNING ] Message queue is full, "
|
||||
'unable to write "{}" to log.\n'.format(record)
|
||||
)
|
||||
|
||||
def prepare(self, record):
|
||||
'''
|
||||
"""
|
||||
Prepares a record for queuing. The object returned by this method is
|
||||
enqueued.
|
||||
The base implementation formats the record to merge the message
|
||||
|
@ -280,7 +301,7 @@ elif sys.version_info < (3, 7):
|
|||
You might want to override this method if you want to convert
|
||||
the record to a dict or JSON string, or send a modified copy
|
||||
of the record while leaving the original intact.
|
||||
'''
|
||||
"""
|
||||
# The format operation gets traceback text into record.exc_text
|
||||
# (if there's exception data), and also returns the formatted
|
||||
# message. We can then use this to replace the original
|
||||
|
@ -296,29 +317,35 @@ elif sys.version_info < (3, 7):
|
|||
record.exc_info = None
|
||||
record.exc_text = None
|
||||
return record
|
||||
else:
|
||||
class QueueHandler(ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler): # pylint: disable=no-member,inconsistent-mro
|
||||
|
||||
|
||||
else:
|
||||
|
||||
class QueueHandler(
|
||||
ExcInfoOnLogLevelFormatMixin, logging.handlers.QueueHandler
|
||||
): # pylint: disable=no-member,inconsistent-mro
|
||||
def __init__(self, queue): # pylint: disable=useless-super-delegation
|
||||
super(QueueHandler, self).__init__(queue)
|
||||
#warn_until_date(
|
||||
# warn_until_date(
|
||||
# '20220101',
|
||||
# 'Please stop using \'{name}.QueueHandler\' and instead '
|
||||
# 'use \'logging.handlers.QueueHandler\'. '
|
||||
# '\'{name}.QueueHandler\' will go away after '
|
||||
# '{{date}}.'.format(name=__name__)
|
||||
#)
|
||||
# )
|
||||
|
||||
def enqueue(self, record):
|
||||
'''
|
||||
"""
|
||||
Enqueue a record.
|
||||
|
||||
The base implementation uses put_nowait. You may want to override
|
||||
this method if you want to use blocking, timeouts or custom queue
|
||||
implementations.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self.queue.put_nowait(record)
|
||||
except queue.Full:
|
||||
sys.stderr.write('[WARNING ] Message queue is full, '
|
||||
'unable to write "{0}" to log.\n'.format(record))
|
||||
sys.stderr.write(
|
||||
"[WARNING ] Message queue is full, "
|
||||
'unable to write "{0}" to log.\n'.format(record)
|
||||
)
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt._logging.impl
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Salt's logging implementation classes/functionality
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import types
|
||||
import logging
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
|
||||
# Let's define these custom logging levels before importing the salt._logging.mixins
|
||||
# since they will be used there
|
||||
|
@ -21,67 +25,74 @@ GARBAGE = logging.GARBAGE = 1
|
|||
QUIET = logging.QUIET = 1000
|
||||
|
||||
# Import Salt libs
|
||||
from salt._logging.handlers import StreamHandler
|
||||
#from salt._logging.handlers import SysLogHandler
|
||||
#from salt._logging.handlers import RotatingFileHandler
|
||||
#from salt._logging.handlers import WatchedFileHandler
|
||||
from salt._logging.handlers import TemporaryLoggingHandler
|
||||
from salt._logging.mixins import LoggingMixinMeta
|
||||
from salt._logging.mixins import NewStyleClassMixin
|
||||
from salt.exceptions import LoggingRuntimeError
|
||||
from salt.utils.ctx import RequestContext
|
||||
from salt.utils.textformat import TextFormat
|
||||
from salt._logging.handlers import StreamHandler # isort:skip
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
#from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||
# from salt._logging.handlers import SysLogHandler # isort:skip
|
||||
# from salt._logging.handlers import RotatingFileHandler # isort:skip
|
||||
# from salt._logging.handlers import WatchedFileHandler # isort:skip
|
||||
from salt._logging.handlers import TemporaryLoggingHandler # isort:skip
|
||||
from salt._logging.mixins import LoggingMixinMeta # isort:skip
|
||||
from salt._logging.mixins import NewStyleClassMixin # isort:skip
|
||||
from salt.exceptions import LoggingRuntimeError # isort:skip
|
||||
from salt.utils.ctx import RequestContext # isort:skip
|
||||
from salt.utils.textformat import TextFormat # isort:skip
|
||||
|
||||
# from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
LOG_LEVELS = {
|
||||
'all': logging.NOTSET,
|
||||
'debug': logging.DEBUG,
|
||||
'error': logging.ERROR,
|
||||
'critical': logging.CRITICAL,
|
||||
'garbage': GARBAGE,
|
||||
'info': logging.INFO,
|
||||
'profile': PROFILE,
|
||||
'quiet': QUIET,
|
||||
'trace': TRACE,
|
||||
'warning': logging.WARNING,
|
||||
"all": logging.NOTSET,
|
||||
"debug": logging.DEBUG,
|
||||
"error": logging.ERROR,
|
||||
"critical": logging.CRITICAL,
|
||||
"garbage": GARBAGE,
|
||||
"info": logging.INFO,
|
||||
"profile": PROFILE,
|
||||
"quiet": QUIET,
|
||||
"trace": TRACE,
|
||||
"warning": logging.WARNING,
|
||||
}
|
||||
|
||||
LOG_VALUES_TO_LEVELS = dict((v, k) for (k, v) in LOG_LEVELS.items())
|
||||
|
||||
LOG_COLORS = {
|
||||
'levels': {
|
||||
'QUIET': TextFormat('reset'),
|
||||
'CRITICAL': TextFormat('bold', 'red'),
|
||||
'ERROR': TextFormat('bold', 'red'),
|
||||
'WARNING': TextFormat('bold', 'yellow'),
|
||||
'INFO': TextFormat('bold', 'green'),
|
||||
'PROFILE': TextFormat('bold', 'cyan'),
|
||||
'DEBUG': TextFormat('bold', 'cyan'),
|
||||
'TRACE': TextFormat('bold', 'magenta'),
|
||||
'GARBAGE': TextFormat('bold', 'blue'),
|
||||
'NOTSET': TextFormat('reset'),
|
||||
'SUBDEBUG': TextFormat('bold', 'cyan'), # used by multiprocessing.log_to_stderr()
|
||||
'SUBWARNING': TextFormat('bold', 'yellow'), # used by multiprocessing.log_to_stderr()
|
||||
"levels": {
|
||||
"QUIET": TextFormat("reset"),
|
||||
"CRITICAL": TextFormat("bold", "red"),
|
||||
"ERROR": TextFormat("bold", "red"),
|
||||
"WARNING": TextFormat("bold", "yellow"),
|
||||
"INFO": TextFormat("bold", "green"),
|
||||
"PROFILE": TextFormat("bold", "cyan"),
|
||||
"DEBUG": TextFormat("bold", "cyan"),
|
||||
"TRACE": TextFormat("bold", "magenta"),
|
||||
"GARBAGE": TextFormat("bold", "blue"),
|
||||
"NOTSET": TextFormat("reset"),
|
||||
"SUBDEBUG": TextFormat(
|
||||
"bold", "cyan"
|
||||
), # used by multiprocessing.log_to_stderr()
|
||||
"SUBWARNING": TextFormat(
|
||||
"bold", "yellow"
|
||||
), # used by multiprocessing.log_to_stderr()
|
||||
},
|
||||
'msgs': {
|
||||
'QUIET': TextFormat('reset'),
|
||||
'CRITICAL': TextFormat('bold', 'red'),
|
||||
'ERROR': TextFormat('red'),
|
||||
'WARNING': TextFormat('yellow'),
|
||||
'INFO': TextFormat('green'),
|
||||
'PROFILE': TextFormat('bold', 'cyan'),
|
||||
'DEBUG': TextFormat('cyan'),
|
||||
'TRACE': TextFormat('magenta'),
|
||||
'GARBAGE': TextFormat('blue'),
|
||||
'NOTSET': TextFormat('reset'),
|
||||
'SUBDEBUG': TextFormat('bold', 'cyan'), # used by multiprocessing.log_to_stderr()
|
||||
'SUBWARNING': TextFormat('bold', 'yellow'), # used by multiprocessing.log_to_stderr()
|
||||
"msgs": {
|
||||
"QUIET": TextFormat("reset"),
|
||||
"CRITICAL": TextFormat("bold", "red"),
|
||||
"ERROR": TextFormat("red"),
|
||||
"WARNING": TextFormat("yellow"),
|
||||
"INFO": TextFormat("green"),
|
||||
"PROFILE": TextFormat("bold", "cyan"),
|
||||
"DEBUG": TextFormat("cyan"),
|
||||
"TRACE": TextFormat("magenta"),
|
||||
"GARBAGE": TextFormat("blue"),
|
||||
"NOTSET": TextFormat("reset"),
|
||||
"SUBDEBUG": TextFormat(
|
||||
"bold", "cyan"
|
||||
), # used by multiprocessing.log_to_stderr()
|
||||
"SUBWARNING": TextFormat(
|
||||
"bold", "yellow"
|
||||
), # used by multiprocessing.log_to_stderr()
|
||||
},
|
||||
'name': TextFormat('bold', 'green'),
|
||||
'process': TextFormat('bold', 'blue'),
|
||||
"name": TextFormat("bold", "green"),
|
||||
"process": TextFormat("bold", "blue"),
|
||||
}
|
||||
|
||||
# Make a list of log level names sorted by log level
|
||||
|
@ -89,22 +100,22 @@ SORTED_LEVEL_NAMES = [
|
|||
l[0] for l in sorted(six.iteritems(LOG_LEVELS), key=lambda x: x[1])
|
||||
]
|
||||
|
||||
MODNAME_PATTERN = re.compile(r'(?P<name>%%\(name\)(?:\-(?P<digits>[\d]+))?s)')
|
||||
MODNAME_PATTERN = re.compile(r"(?P<name>%%\(name\)(?:\-(?P<digits>[\d]+))?s)")
|
||||
|
||||
|
||||
# ----- REMOVE ME ON REFACTOR COMPLETE ------------------------------------------------------------------------------>
|
||||
class __NullLoggingHandler(TemporaryLoggingHandler):
|
||||
'''
|
||||
"""
|
||||
This class exists just to better identify which temporary logging
|
||||
handler is being used for what.
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class __StoreLoggingHandler(TemporaryLoggingHandler):
|
||||
'''
|
||||
"""
|
||||
This class exists just to better identify which temporary logging
|
||||
handler is being used for what.
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
# Store a reference to the temporary queue logging handler
|
||||
|
@ -121,35 +132,31 @@ LOGGING_STORE_HANDLER = __StoreLoggingHandler()
|
|||
class SaltLogRecord(logging.LogRecord):
|
||||
def __init__(self, *args, **kwargs):
|
||||
logging.LogRecord.__init__(self, *args, **kwargs)
|
||||
self.bracketname = '[{:<17}]'.format(str(self.name))
|
||||
self.bracketlevel = '[{:<8}]'.format(str(self.levelname))
|
||||
self.bracketprocess = '[{:>5}]'.format(str(self.process))
|
||||
self.bracketname = "[{:<17}]".format(str(self.name))
|
||||
self.bracketlevel = "[{:<8}]".format(str(self.levelname))
|
||||
self.bracketprocess = "[{:>5}]".format(str(self.process))
|
||||
|
||||
|
||||
class SaltColorLogRecord(SaltLogRecord):
|
||||
def __init__(self, *args, **kwargs):
|
||||
SaltLogRecord.__init__(self, *args, **kwargs)
|
||||
|
||||
reset = TextFormat('reset')
|
||||
clevel = LOG_COLORS['levels'].get(self.levelname, reset)
|
||||
cmsg = LOG_COLORS['msgs'].get(self.levelname, reset)
|
||||
reset = TextFormat("reset")
|
||||
clevel = LOG_COLORS["levels"].get(self.levelname, reset)
|
||||
cmsg = LOG_COLORS["msgs"].get(self.levelname, reset)
|
||||
|
||||
self.colorname = '{}[{:<17}]{}'.format(LOG_COLORS['name'],
|
||||
self.name,
|
||||
reset)
|
||||
self.colorlevel = '{}[{:<8}]{}'.format(clevel,
|
||||
self.levelname,
|
||||
reset)
|
||||
self.colorprocess = '{}[{:>5}]{}'.format(LOG_COLORS['process'],
|
||||
self.process,
|
||||
reset)
|
||||
self.colormsg = '{}{}{}'.format(cmsg, self.getMessage(), reset)
|
||||
self.colorname = "{}[{:<17}]{}".format(LOG_COLORS["name"], self.name, reset)
|
||||
self.colorlevel = "{}[{:<8}]{}".format(clevel, self.levelname, reset)
|
||||
self.colorprocess = "{}[{:>5}]{}".format(
|
||||
LOG_COLORS["process"], self.process, reset
|
||||
)
|
||||
self.colormsg = "{}{}{}".format(cmsg, self.getMessage(), reset)
|
||||
|
||||
|
||||
def get_log_record_factory():
|
||||
'''
|
||||
"""
|
||||
Get the logging log record factory
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
return get_log_record_factory.__factory__
|
||||
except AttributeError:
|
||||
|
@ -157,9 +164,9 @@ def get_log_record_factory():
|
|||
|
||||
|
||||
def set_log_record_factory(factory):
|
||||
'''
|
||||
"""
|
||||
Set the logging log record factory
|
||||
'''
|
||||
"""
|
||||
get_log_record_factory.__factory__ = factory
|
||||
if not six.PY2:
|
||||
logging.setLogRecordFactory(factory)
|
||||
|
@ -172,9 +179,11 @@ set_log_record_factory(SaltLogRecord)
|
|||
LOGGING_LOGGER_CLASS = logging.getLoggerClass()
|
||||
|
||||
|
||||
class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS, NewStyleClassMixin)):
|
||||
class SaltLoggingClass(
|
||||
six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS, NewStyleClassMixin)
|
||||
):
|
||||
def __new__(cls, *args):
|
||||
'''
|
||||
"""
|
||||
We override `__new__` in our logging logger class in order to provide
|
||||
some additional features like expand the module name padding if length
|
||||
is being used, and also some Unicode fixes.
|
||||
|
@ -184,17 +193,19 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
|
||||
logging.getLogger(__name__)
|
||||
|
||||
'''
|
||||
"""
|
||||
instance = super(SaltLoggingClass, cls).__new__(cls)
|
||||
|
||||
try:
|
||||
max_logger_length = len(max(
|
||||
list(logging.Logger.manager.loggerDict), key=len
|
||||
))
|
||||
max_logger_length = len(
|
||||
max(list(logging.Logger.manager.loggerDict), key=len)
|
||||
)
|
||||
for handler in logging.root.handlers:
|
||||
if handler in (LOGGING_NULL_HANDLER,
|
||||
LOGGING_STORE_HANDLER,
|
||||
LOGGING_TEMP_HANDLER):
|
||||
if handler in (
|
||||
LOGGING_NULL_HANDLER,
|
||||
LOGGING_STORE_HANDLER,
|
||||
LOGGING_TEMP_HANDLER,
|
||||
):
|
||||
continue
|
||||
|
||||
formatter = handler.formatter
|
||||
|
@ -205,7 +216,7 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
handler.createLock()
|
||||
handler.acquire()
|
||||
|
||||
fmt = formatter._fmt.replace('%', '%%')
|
||||
fmt = formatter._fmt.replace("%", "%%")
|
||||
|
||||
match = MODNAME_PATTERN.search(fmt)
|
||||
if not match:
|
||||
|
@ -213,12 +224,12 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
handler.release()
|
||||
return instance
|
||||
|
||||
if 'digits' not in match.groupdict():
|
||||
if "digits" not in match.groupdict():
|
||||
# No digits group. Release handler and return.
|
||||
handler.release()
|
||||
return instance
|
||||
|
||||
digits = match.group('digits')
|
||||
digits = match.group("digits")
|
||||
if not digits or not (digits and digits.isdigit()):
|
||||
# No valid digits. Release handler and return.
|
||||
handler.release()
|
||||
|
@ -226,10 +237,9 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
|
||||
if int(digits) < max_logger_length:
|
||||
# Formatter digits value is lower than current max, update.
|
||||
fmt = fmt.replace(match.group('name'), '%%(name)-%ds')
|
||||
fmt = fmt.replace(match.group("name"), "%%(name)-%ds")
|
||||
formatter = logging.Formatter(
|
||||
fmt % max_logger_length,
|
||||
datefmt=formatter.datefmt
|
||||
fmt % max_logger_length, datefmt=formatter.datefmt
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
handler.release()
|
||||
|
@ -238,45 +248,52 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
pass
|
||||
return instance
|
||||
|
||||
def _log(self, level, msg, args, exc_info=None,
|
||||
extra=None, # pylint: disable=arguments-differ
|
||||
stack_info=False,
|
||||
stacklevel=1,
|
||||
exc_info_on_loglevel=None):
|
||||
def _log(
|
||||
self,
|
||||
level,
|
||||
msg,
|
||||
args,
|
||||
exc_info=None,
|
||||
extra=None, # pylint: disable=arguments-differ
|
||||
stack_info=False,
|
||||
stacklevel=1,
|
||||
exc_info_on_loglevel=None,
|
||||
):
|
||||
if extra is None:
|
||||
extra = {}
|
||||
|
||||
# pylint: disable=no-member
|
||||
current_jid = RequestContext.current.get('data', {}).get('jid', None)
|
||||
log_fmt_jid = RequestContext.current.get('opts', {}).get('log_fmt_jid', None)
|
||||
current_jid = RequestContext.current.get("data", {}).get("jid", None)
|
||||
log_fmt_jid = RequestContext.current.get("opts", {}).get("log_fmt_jid", None)
|
||||
# pylint: enable=no-member
|
||||
|
||||
if current_jid is not None:
|
||||
extra['jid'] = current_jid
|
||||
extra["jid"] = current_jid
|
||||
|
||||
if log_fmt_jid is not None:
|
||||
extra['log_fmt_jid'] = log_fmt_jid
|
||||
extra["log_fmt_jid"] = log_fmt_jid
|
||||
|
||||
# If both exc_info and exc_info_on_loglevel are both passed, let's fail
|
||||
if exc_info and exc_info_on_loglevel:
|
||||
raise LoggingRuntimeError(
|
||||
'Only one of \'exc_info\' and \'exc_info_on_loglevel\' is '
|
||||
'permitted'
|
||||
"Only one of 'exc_info' and 'exc_info_on_loglevel' is " "permitted"
|
||||
)
|
||||
if exc_info_on_loglevel is not None:
|
||||
if isinstance(exc_info_on_loglevel, six.string_types):
|
||||
exc_info_on_loglevel = LOG_LEVELS.get(exc_info_on_loglevel,
|
||||
logging.ERROR)
|
||||
exc_info_on_loglevel = LOG_LEVELS.get(
|
||||
exc_info_on_loglevel, logging.ERROR
|
||||
)
|
||||
elif not isinstance(exc_info_on_loglevel, int):
|
||||
raise RuntimeError(
|
||||
'The value of \'exc_info_on_loglevel\' needs to be a '
|
||||
'logging level or a logging level name, not \'{}\''
|
||||
.format(exc_info_on_loglevel)
|
||||
"The value of 'exc_info_on_loglevel' needs to be a "
|
||||
"logging level or a logging level name, not '{}'".format(
|
||||
exc_info_on_loglevel
|
||||
)
|
||||
)
|
||||
if extra is None:
|
||||
extra = {'exc_info_on_loglevel': exc_info_on_loglevel}
|
||||
extra = {"exc_info_on_loglevel": exc_info_on_loglevel}
|
||||
else:
|
||||
extra['exc_info_on_loglevel'] = exc_info_on_loglevel
|
||||
extra["exc_info_on_loglevel"] = exc_info_on_loglevel
|
||||
|
||||
if sys.version_info < (3,):
|
||||
LOGGING_LOGGER_CLASS._log(
|
||||
|
@ -284,24 +301,46 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
)
|
||||
elif sys.version_info < (3, 8):
|
||||
LOGGING_LOGGER_CLASS._log(
|
||||
self, level, msg, args, exc_info=exc_info, extra=extra,
|
||||
stack_info=stack_info
|
||||
self,
|
||||
level,
|
||||
msg,
|
||||
args,
|
||||
exc_info=exc_info,
|
||||
extra=extra,
|
||||
stack_info=stack_info,
|
||||
)
|
||||
else:
|
||||
LOGGING_LOGGER_CLASS._log(
|
||||
self, level, msg, args, exc_info=exc_info, extra=extra,
|
||||
stack_info=stack_info, stacklevel=stacklevel
|
||||
self,
|
||||
level,
|
||||
msg,
|
||||
args,
|
||||
exc_info=exc_info,
|
||||
extra=extra,
|
||||
stack_info=stack_info,
|
||||
stacklevel=stacklevel,
|
||||
)
|
||||
|
||||
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
|
||||
func=None, extra=None, sinfo=None):
|
||||
def makeRecord(
|
||||
self,
|
||||
name,
|
||||
level,
|
||||
fn,
|
||||
lno,
|
||||
msg,
|
||||
args,
|
||||
exc_info,
|
||||
func=None,
|
||||
extra=None,
|
||||
sinfo=None,
|
||||
):
|
||||
# Let's remove exc_info_on_loglevel from extra
|
||||
exc_info_on_loglevel = extra.pop('exc_info_on_loglevel')
|
||||
exc_info_on_loglevel = extra.pop("exc_info_on_loglevel")
|
||||
|
||||
jid = extra.pop('jid', '')
|
||||
jid = extra.pop("jid", "")
|
||||
if jid:
|
||||
log_fmt_jid = extra.pop('log_fmt_jid')
|
||||
jid = log_fmt_jid % {'jid': jid}
|
||||
log_fmt_jid = extra.pop("log_fmt_jid")
|
||||
jid = log_fmt_jid % {"jid": jid}
|
||||
|
||||
if not extra:
|
||||
# If nothing else is in extra, make it None
|
||||
|
@ -310,30 +349,31 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
# Let's try to make every logging message unicode
|
||||
try:
|
||||
salt_system_encoding = __salt_system_encoding__
|
||||
if salt_system_encoding == 'ascii':
|
||||
if salt_system_encoding == "ascii":
|
||||
# Encoding detection most likely failed, let's use the utf-8
|
||||
# value which we defaulted before __salt_system_encoding__ was
|
||||
# implemented
|
||||
salt_system_encoding = 'utf-8'
|
||||
salt_system_encoding = "utf-8"
|
||||
except NameError:
|
||||
salt_system_encoding = 'utf-8'
|
||||
salt_system_encoding = "utf-8"
|
||||
|
||||
if isinstance(msg, six.string_types) and not isinstance(msg, six.text_type):
|
||||
try:
|
||||
_msg = msg.decode(salt_system_encoding, 'replace')
|
||||
_msg = msg.decode(salt_system_encoding, "replace")
|
||||
except UnicodeDecodeError:
|
||||
_msg = msg.decode(salt_system_encoding, 'ignore')
|
||||
_msg = msg.decode(salt_system_encoding, "ignore")
|
||||
else:
|
||||
_msg = msg
|
||||
|
||||
_args = []
|
||||
for item in args:
|
||||
if isinstance(item, six.string_types) \
|
||||
and not isinstance(item, six.text_type):
|
||||
if isinstance(item, six.string_types) and not isinstance(
|
||||
item, six.text_type
|
||||
):
|
||||
try:
|
||||
_args.append(item.decode(salt_system_encoding, 'replace'))
|
||||
_args.append(item.decode(salt_system_encoding, "replace"))
|
||||
except UnicodeDecodeError:
|
||||
_args.append(item.decode(salt_system_encoding, 'ignore'))
|
||||
_args.append(item.decode(salt_system_encoding, "ignore"))
|
||||
else:
|
||||
_args.append(item)
|
||||
_args = tuple(_args)
|
||||
|
@ -342,34 +382,20 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
# Recreate what's done for Py >= 3.5
|
||||
_log_record_factory = get_log_record_factory()
|
||||
logrecord = _log_record_factory(
|
||||
name,
|
||||
level,
|
||||
fn,
|
||||
lno,
|
||||
_msg,
|
||||
_args,
|
||||
exc_info,
|
||||
func)
|
||||
name, level, fn, lno, _msg, _args, exc_info, func
|
||||
)
|
||||
|
||||
if extra is not None:
|
||||
for key in extra:
|
||||
if (key in ['message', 'asctime']) or (key in logrecord.__dict__):
|
||||
if (key in ["message", "asctime"]) or (key in logrecord.__dict__):
|
||||
raise KeyError(
|
||||
'Attempt to overwrite \'{}\' in LogRecord'.format(key)
|
||||
"Attempt to overwrite '{}' in LogRecord".format(key)
|
||||
)
|
||||
logrecord.__dict__[key] = extra[key]
|
||||
else:
|
||||
logrecord = LOGGING_LOGGER_CLASS.makeRecord(
|
||||
self,
|
||||
name,
|
||||
level,
|
||||
fn,
|
||||
lno,
|
||||
_msg,
|
||||
_args,
|
||||
exc_info,
|
||||
func,
|
||||
sinfo)
|
||||
self, name, level, fn, lno, _msg, _args, exc_info, func, sinfo
|
||||
)
|
||||
|
||||
if exc_info_on_loglevel is not None:
|
||||
# Let's add some custom attributes to the LogRecord class in order
|
||||
|
@ -389,10 +415,10 @@ class SaltLoggingClass(six.with_metaclass(LoggingMixinMeta, LOGGING_LOGGER_CLASS
|
|||
if logging.getLoggerClass() is not SaltLoggingClass:
|
||||
|
||||
logging.setLoggerClass(SaltLoggingClass)
|
||||
logging.addLevelName(QUIET, 'QUIET')
|
||||
logging.addLevelName(PROFILE, 'PROFILE')
|
||||
logging.addLevelName(TRACE, 'TRACE')
|
||||
logging.addLevelName(GARBAGE, 'GARBAGE')
|
||||
logging.addLevelName(QUIET, "QUIET")
|
||||
logging.addLevelName(PROFILE, "PROFILE")
|
||||
logging.addLevelName(TRACE, "TRACE")
|
||||
logging.addLevelName(GARBAGE, "GARBAGE")
|
||||
|
||||
# ----- REMOVE ON REFACTORING COMPLETE -------------------------------------------------------------------------->
|
||||
if not logging.root.handlers:
|
||||
|
@ -417,18 +443,18 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def __get_exposed_module_attributes():
|
||||
'''
|
||||
"""
|
||||
This function just ``dir()``'s this module and filters out any functions
|
||||
or variables which should not be available when wildcard importing it
|
||||
'''
|
||||
"""
|
||||
exposed = []
|
||||
module = sys.modules[__name__]
|
||||
for name in dir(module):
|
||||
if name.startswith('_'):
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
obj = getattr(module, name)
|
||||
if not isinstance(obj, types.FunctionType):
|
||||
if name.startswith(('LOG_', 'SORTED_')):
|
||||
if name.startswith(("LOG_", "SORTED_")):
|
||||
exposed.append(name)
|
||||
continue
|
||||
if obj.__module__ != __name__:
|
||||
|
|
|
@ -1,70 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt._logging.mixins
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Logging related mix-ins
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
|
||||
class NewStyleClassMixin(object):
|
||||
'''
|
||||
"""
|
||||
Simple new style class to make pylint shut up!
|
||||
This is required because SaltLoggingClass can't subclass object directly:
|
||||
|
||||
'Cannot create a consistent method resolution order (MRO) for bases'
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class LoggingProfileMixin(object):
|
||||
'''
|
||||
"""
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
'''
|
||||
"""
|
||||
|
||||
def profile(self, msg, *args, **kwargs):
|
||||
self.log(getattr(logging, 'PROFILE', 15), msg, *args, **kwargs)
|
||||
self.log(getattr(logging, "PROFILE", 15), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingTraceMixin(object):
|
||||
'''
|
||||
"""
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
'''
|
||||
"""
|
||||
|
||||
def trace(self, msg, *args, **kwargs):
|
||||
self.log(getattr(logging, 'TRACE', 5), msg, *args, **kwargs)
|
||||
self.log(getattr(logging, "TRACE", 5), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingGarbageMixin(object):
|
||||
'''
|
||||
"""
|
||||
Simple mix-in class to add a garbage method to python's logging.
|
||||
'''
|
||||
"""
|
||||
|
||||
def garbage(self, msg, *args, **kwargs):
|
||||
self.log(getattr(logging, 'GARBAGE', 5), msg, *args, **kwargs)
|
||||
self.log(getattr(logging, "GARBAGE", 5), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingMixinMeta(type):
|
||||
'''
|
||||
"""
|
||||
This class is called whenever a new instance of ``SaltLoggingClass`` is
|
||||
created.
|
||||
|
||||
What this class does is check if any of the bases have a `trace()` or a
|
||||
`garbage()` method defined, if they don't we add the respective mix-ins to
|
||||
the bases.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
include_profile = include_trace = include_garbage = True
|
||||
bases = list(bases)
|
||||
if name == 'SaltLoggingClass':
|
||||
if name == "SaltLoggingClass":
|
||||
for base in bases:
|
||||
if hasattr(base, 'trace'):
|
||||
if hasattr(base, "trace"):
|
||||
include_trace = False
|
||||
if hasattr(base, 'garbage'):
|
||||
if hasattr(base, "garbage"):
|
||||
include_garbage = False
|
||||
if include_profile:
|
||||
bases.append(LoggingProfileMixin)
|
||||
|
@ -76,17 +78,19 @@ class LoggingMixinMeta(type):
|
|||
|
||||
|
||||
class ExcInfoOnLogLevelFormatMixin(object):
|
||||
'''
|
||||
"""
|
||||
Logging handler class mixin to properly handle including exc_info on a per logging handler basis
|
||||
'''
|
||||
"""
|
||||
|
||||
def format(self, record):
|
||||
'''
|
||||
"""
|
||||
Format the log record to include exc_info if the handler is enabled for a specific log level
|
||||
'''
|
||||
"""
|
||||
formatted_record = super(ExcInfoOnLogLevelFormatMixin, self).format(record)
|
||||
exc_info_on_loglevel = getattr(record, 'exc_info_on_loglevel', None)
|
||||
exc_info_on_loglevel_formatted = getattr(record, 'exc_info_on_loglevel_formatted', None)
|
||||
exc_info_on_loglevel = getattr(record, "exc_info_on_loglevel", None)
|
||||
exc_info_on_loglevel_formatted = getattr(
|
||||
record, "exc_info_on_loglevel_formatted", None
|
||||
)
|
||||
if exc_info_on_loglevel is None and exc_info_on_loglevel_formatted is None:
|
||||
return formatted_record
|
||||
|
||||
|
@ -98,21 +102,26 @@ class ExcInfoOnLogLevelFormatMixin(object):
|
|||
return formatted_record
|
||||
|
||||
# If we reached this far it means we should include exc_info
|
||||
if not record.exc_info_on_loglevel_instance and not exc_info_on_loglevel_formatted:
|
||||
if (
|
||||
not record.exc_info_on_loglevel_instance
|
||||
and not exc_info_on_loglevel_formatted
|
||||
):
|
||||
# This should actually never occur
|
||||
return formatted_record
|
||||
|
||||
if record.exc_info_on_loglevel_formatted is None:
|
||||
# Let's cache the formatted exception to avoid recurring conversions and formatting calls
|
||||
if self.formatter is None: # pylint: disable=access-member-before-definition
|
||||
if (
|
||||
self.formatter is None
|
||||
): # pylint: disable=access-member-before-definition
|
||||
self.formatter = logging._defaultFormatter
|
||||
record.exc_info_on_loglevel_formatted = self.formatter.formatException(
|
||||
record.exc_info_on_loglevel_instance
|
||||
)
|
||||
|
||||
# Let's format the record to include exc_info just like python's logging formatted does
|
||||
if formatted_record[-1:] != '\n':
|
||||
formatted_record += '\n'
|
||||
if formatted_record[-1:] != "\n":
|
||||
formatted_record += "\n"
|
||||
|
||||
try:
|
||||
formatted_record += record.exc_info_on_loglevel_formatted
|
||||
|
@ -126,8 +135,7 @@ class ExcInfoOnLogLevelFormatMixin(object):
|
|||
# encodings, e.g. UTF-8 for the filesystem and latin-1
|
||||
# for a script. See issue 13232.
|
||||
formatted_record += record.exc_info_on_loglevel_formatted.decode(
|
||||
sys.getfilesystemencoding(),
|
||||
'replace'
|
||||
sys.getfilesystemencoding(), "replace"
|
||||
)
|
||||
# Reset the record.exc_info_on_loglevel_instance because it might need
|
||||
# to "travel" through a multiprocessing process and it might contain
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The acl module handles publisher_acl operations
|
||||
|
||||
Additional information on publisher_acl can be
|
||||
found by reading the salt documentation:
|
||||
|
||||
http://docs.saltstack.com/en/latest/ref/publisheracl.html
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libraries
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
@ -19,37 +19,46 @@ from salt.ext import six
|
|||
|
||||
|
||||
class PublisherACL(object):
|
||||
'''
|
||||
"""
|
||||
Represents the publisher ACL and provides methods
|
||||
to query the ACL for given operations
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, blacklist):
|
||||
self.blacklist = blacklist
|
||||
|
||||
def user_is_blacklisted(self, user):
|
||||
'''
|
||||
"""
|
||||
Takes a username as a string and returns a boolean. True indicates that
|
||||
the provided user has been blacklisted
|
||||
'''
|
||||
return not salt.utils.stringutils.check_whitelist_blacklist(user, blacklist=self.blacklist.get('users', []))
|
||||
"""
|
||||
return not salt.utils.stringutils.check_whitelist_blacklist(
|
||||
user, blacklist=self.blacklist.get("users", [])
|
||||
)
|
||||
|
||||
def cmd_is_blacklisted(self, cmd):
|
||||
# If this is a regular command, it is a single function
|
||||
if isinstance(cmd, six.string_types):
|
||||
cmd = [cmd]
|
||||
for fun in cmd:
|
||||
if not salt.utils.stringutils.check_whitelist_blacklist(fun, blacklist=self.blacklist.get('modules', [])):
|
||||
if not salt.utils.stringutils.check_whitelist_blacklist(
|
||||
fun, blacklist=self.blacklist.get("modules", [])
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def user_is_whitelisted(self, user):
|
||||
return salt.utils.stringutils.check_whitelist_blacklist(user, whitelist=self.blacklist.get('users', []))
|
||||
return salt.utils.stringutils.check_whitelist_blacklist(
|
||||
user, whitelist=self.blacklist.get("users", [])
|
||||
)
|
||||
|
||||
def cmd_is_whitelisted(self, cmd):
|
||||
# If this is a regular command, it is a single function
|
||||
if isinstance(cmd, str):
|
||||
cmd = [cmd]
|
||||
for fun in cmd:
|
||||
if salt.utils.stringutils.check_whitelist_blacklist(fun, whitelist=self.blacklist.get('modules', [])):
|
||||
if salt.utils.stringutils.check_whitelist_blacklist(
|
||||
fun, whitelist=self.blacklist.get("modules", [])
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Salt's pluggable authentication system
|
||||
|
||||
This system allows for authentication to be managed in a module pluggable way
|
||||
so that any external authentication system can be used inside of Salt
|
||||
'''
|
||||
"""
|
||||
|
||||
# 1. Create auth loader instance
|
||||
# 2. Accept arguments as a dict
|
||||
|
@ -15,13 +15,12 @@ so that any external authentication system can be used inside of Salt
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import collections
|
||||
import time
|
||||
import getpass
|
||||
import logging
|
||||
import random
|
||||
import getpass
|
||||
from salt.ext.six.moves import input
|
||||
from salt.ext import six
|
||||
import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.config
|
||||
|
@ -36,28 +35,33 @@ import salt.utils.minions
|
|||
import salt.utils.user
|
||||
import salt.utils.versions
|
||||
import salt.utils.zeromq
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import input
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
AUTH_INTERNAL_KEYWORDS = frozenset([
|
||||
'client',
|
||||
'cmd',
|
||||
'eauth',
|
||||
'fun',
|
||||
'gather_job_timeout',
|
||||
'kwarg',
|
||||
'match',
|
||||
'metadata',
|
||||
'print_event',
|
||||
'raw',
|
||||
'yield_pub_data'
|
||||
])
|
||||
AUTH_INTERNAL_KEYWORDS = frozenset(
|
||||
[
|
||||
"client",
|
||||
"cmd",
|
||||
"eauth",
|
||||
"fun",
|
||||
"gather_job_timeout",
|
||||
"kwarg",
|
||||
"match",
|
||||
"metadata",
|
||||
"print_event",
|
||||
"raw",
|
||||
"yield_pub_data",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class LoadAuth(object):
|
||||
'''
|
||||
"""
|
||||
Wrap the authentication system to handle peripheral components
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, ckminions=None):
|
||||
self.opts = opts
|
||||
self.max_fail = 1.0
|
||||
|
@ -67,55 +71,54 @@ class LoadAuth(object):
|
|||
self.ckminions = ckminions or salt.utils.minions.CkMinions(opts)
|
||||
|
||||
def load_name(self, load):
|
||||
'''
|
||||
"""
|
||||
Return the primary name associate with the load, if an empty string
|
||||
is returned then the load does not match the function
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
return ''
|
||||
fstr = '{0}.auth'.format(load['eauth'])
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
return ""
|
||||
fstr = "{0}.auth".format(load["eauth"])
|
||||
if fstr not in self.auth:
|
||||
return ''
|
||||
return ""
|
||||
try:
|
||||
pname_arg = salt.utils.args.arg_lookup(self.auth[fstr])['args'][0]
|
||||
pname_arg = salt.utils.args.arg_lookup(self.auth[fstr])["args"][0]
|
||||
return load[pname_arg]
|
||||
except IndexError:
|
||||
return ''
|
||||
return ""
|
||||
|
||||
def __auth_call(self, load):
|
||||
'''
|
||||
"""
|
||||
Return the token and set the cache data for use
|
||||
|
||||
Do not call this directly! Use the time_auth method to overcome timing
|
||||
attacks
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
return False
|
||||
fstr = '{0}.auth'.format(load['eauth'])
|
||||
fstr = "{0}.auth".format(load["eauth"])
|
||||
if fstr not in self.auth:
|
||||
return False
|
||||
# When making auth calls, only username, password, auth, and token
|
||||
# are valid, so we strip anything else out.
|
||||
_valid = ['username', 'password', 'eauth', 'token']
|
||||
_valid = ["username", "password", "eauth", "token"]
|
||||
_load = {key: value for (key, value) in load.items() if key in _valid}
|
||||
|
||||
fcall = salt.utils.args.format_call(
|
||||
self.auth[fstr],
|
||||
_load,
|
||||
expected_extra_kws=AUTH_INTERNAL_KEYWORDS)
|
||||
self.auth[fstr], _load, expected_extra_kws=AUTH_INTERNAL_KEYWORDS
|
||||
)
|
||||
try:
|
||||
if 'kwargs' in fcall:
|
||||
return self.auth[fstr](*fcall['args'], **fcall['kwargs'])
|
||||
if "kwargs" in fcall:
|
||||
return self.auth[fstr](*fcall["args"], **fcall["kwargs"])
|
||||
else:
|
||||
return self.auth[fstr](*fcall['args'])
|
||||
return self.auth[fstr](*fcall["args"])
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
log.debug('Authentication module threw %s', e)
|
||||
log.debug("Authentication module threw %s", e)
|
||||
return False
|
||||
|
||||
def time_auth(self, load):
|
||||
'''
|
||||
"""
|
||||
Make sure that all failures happen in the same amount of time
|
||||
'''
|
||||
"""
|
||||
start = time.time()
|
||||
ret = self.__auth_call(load)
|
||||
if ret:
|
||||
|
@ -125,127 +128,130 @@ class LoadAuth(object):
|
|||
self.max_fail = f_time
|
||||
deviation = self.max_fail / 4
|
||||
r_time = random.SystemRandom().uniform(
|
||||
self.max_fail - deviation,
|
||||
self.max_fail + deviation
|
||||
)
|
||||
self.max_fail - deviation, self.max_fail + deviation
|
||||
)
|
||||
while start + r_time > time.time():
|
||||
time.sleep(0.001)
|
||||
return False
|
||||
|
||||
def __get_acl(self, load):
|
||||
'''
|
||||
"""
|
||||
Returns ACL for a specific user.
|
||||
Returns None if eauth doesn't provide any for the user. I. e. None means: use acl declared
|
||||
in master config.
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
return None
|
||||
mod = self.opts['eauth_acl_module']
|
||||
mod = self.opts["eauth_acl_module"]
|
||||
if not mod:
|
||||
mod = load['eauth']
|
||||
fstr = '{0}.acl'.format(mod)
|
||||
mod = load["eauth"]
|
||||
fstr = "{0}.acl".format(mod)
|
||||
if fstr not in self.auth:
|
||||
return None
|
||||
fcall = salt.utils.args.format_call(
|
||||
self.auth[fstr],
|
||||
load,
|
||||
expected_extra_kws=AUTH_INTERNAL_KEYWORDS)
|
||||
self.auth[fstr], load, expected_extra_kws=AUTH_INTERNAL_KEYWORDS
|
||||
)
|
||||
try:
|
||||
return self.auth[fstr](*fcall['args'], **fcall['kwargs'])
|
||||
return self.auth[fstr](*fcall["args"], **fcall["kwargs"])
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
log.debug('Authentication module threw %s', e)
|
||||
log.debug("Authentication module threw %s", e)
|
||||
return None
|
||||
|
||||
def __process_acl(self, load, auth_list):
|
||||
'''
|
||||
"""
|
||||
Allows eauth module to modify the access list right before it'll be applied to the request.
|
||||
For example ldap auth module expands entries
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
return auth_list
|
||||
fstr = '{0}.process_acl'.format(load['eauth'])
|
||||
fstr = "{0}.process_acl".format(load["eauth"])
|
||||
if fstr not in self.auth:
|
||||
return auth_list
|
||||
try:
|
||||
return self.auth[fstr](auth_list, self.opts)
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
log.debug('Authentication module threw %s', e)
|
||||
log.debug("Authentication module threw %s", e)
|
||||
return auth_list
|
||||
|
||||
def get_groups(self, load):
|
||||
'''
|
||||
"""
|
||||
Read in a load and return the groups a user is a member of
|
||||
by asking the appropriate provider
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
return False
|
||||
fstr = '{0}.groups'.format(load['eauth'])
|
||||
fstr = "{0}.groups".format(load["eauth"])
|
||||
if fstr not in self.auth:
|
||||
return False
|
||||
fcall = salt.utils.args.format_call(
|
||||
self.auth[fstr],
|
||||
load,
|
||||
expected_extra_kws=AUTH_INTERNAL_KEYWORDS)
|
||||
self.auth[fstr], load, expected_extra_kws=AUTH_INTERNAL_KEYWORDS
|
||||
)
|
||||
try:
|
||||
return self.auth[fstr](*fcall['args'], **fcall['kwargs'])
|
||||
return self.auth[fstr](*fcall["args"], **fcall["kwargs"])
|
||||
except IndexError:
|
||||
return False
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return None
|
||||
|
||||
def _allow_custom_expire(self, load):
|
||||
'''
|
||||
"""
|
||||
Return bool if requesting user is allowed to set custom expire
|
||||
'''
|
||||
expire_override = self.opts.get('token_expire_user_override', False)
|
||||
"""
|
||||
expire_override = self.opts.get("token_expire_user_override", False)
|
||||
|
||||
if expire_override is True:
|
||||
return True
|
||||
|
||||
if isinstance(expire_override, collections.Mapping):
|
||||
expire_whitelist = expire_override.get(load['eauth'], [])
|
||||
expire_whitelist = expire_override.get(load["eauth"], [])
|
||||
if isinstance(expire_whitelist, collections.Iterable):
|
||||
if load.get('username') in expire_whitelist:
|
||||
if load.get("username") in expire_whitelist:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def mk_token(self, load):
|
||||
'''
|
||||
"""
|
||||
Run time_auth and create a token. Return False or the token
|
||||
'''
|
||||
"""
|
||||
if not self.authenticate_eauth(load):
|
||||
return {}
|
||||
|
||||
if self._allow_custom_expire(load):
|
||||
token_expire = load.pop('token_expire', self.opts['token_expire'])
|
||||
token_expire = load.pop("token_expire", self.opts["token_expire"])
|
||||
else:
|
||||
_ = load.pop('token_expire', None)
|
||||
token_expire = self.opts['token_expire']
|
||||
_ = load.pop("token_expire", None)
|
||||
token_expire = self.opts["token_expire"]
|
||||
|
||||
tdata = {'start': time.time(),
|
||||
'expire': time.time() + token_expire,
|
||||
'name': self.load_name(load),
|
||||
'eauth': load['eauth']}
|
||||
tdata = {
|
||||
"start": time.time(),
|
||||
"expire": time.time() + token_expire,
|
||||
"name": self.load_name(load),
|
||||
"eauth": load["eauth"],
|
||||
}
|
||||
|
||||
if self.opts['keep_acl_in_token']:
|
||||
if self.opts["keep_acl_in_token"]:
|
||||
acl_ret = self.__get_acl(load)
|
||||
tdata['auth_list'] = acl_ret
|
||||
tdata["auth_list"] = acl_ret
|
||||
|
||||
groups = self.get_groups(load)
|
||||
if groups:
|
||||
tdata['groups'] = groups
|
||||
tdata["groups"] = groups
|
||||
|
||||
return self.tokens["{0}.mk_token".format(self.opts['eauth_tokens'])](self.opts, tdata)
|
||||
return self.tokens["{0}.mk_token".format(self.opts["eauth_tokens"])](
|
||||
self.opts, tdata
|
||||
)
|
||||
|
||||
def get_tok(self, tok):
|
||||
'''
|
||||
"""
|
||||
Return the name associated with the token, or False if the token is
|
||||
not valid
|
||||
'''
|
||||
"""
|
||||
tdata = {}
|
||||
try:
|
||||
tdata = self.tokens["{0}.get_token".format(self.opts['eauth_tokens'])](self.opts, tok)
|
||||
tdata = self.tokens["{0}.get_token".format(self.opts["eauth_tokens"])](
|
||||
self.opts, tok
|
||||
)
|
||||
except salt.exceptions.SaltDeserializationError:
|
||||
log.warning("Failed to load token %r - removing broken/empty file.", tok)
|
||||
rm_tok = True
|
||||
|
@ -254,7 +260,7 @@ class LoadAuth(object):
|
|||
return {}
|
||||
rm_tok = False
|
||||
|
||||
if tdata.get('expire', 0) < time.time():
|
||||
if tdata.get("expire", 0) < time.time():
|
||||
# If expire isn't present in the token it's invalid and needs
|
||||
# to be removed. Also, if it's present and has expired - in
|
||||
# other words, the expiration is before right now, it should
|
||||
|
@ -267,42 +273,44 @@ class LoadAuth(object):
|
|||
return tdata
|
||||
|
||||
def list_tokens(self):
|
||||
'''
|
||||
"""
|
||||
List all tokens in eauth_tokn storage.
|
||||
'''
|
||||
return self.tokens["{0}.list_tokens".format(self.opts['eauth_tokens'])](self.opts)
|
||||
"""
|
||||
return self.tokens["{0}.list_tokens".format(self.opts["eauth_tokens"])](
|
||||
self.opts
|
||||
)
|
||||
|
||||
def rm_token(self, tok):
|
||||
'''
|
||||
"""
|
||||
Remove the given token from token storage.
|
||||
'''
|
||||
self.tokens["{0}.rm_token".format(self.opts['eauth_tokens'])](self.opts, tok)
|
||||
"""
|
||||
self.tokens["{0}.rm_token".format(self.opts["eauth_tokens"])](self.opts, tok)
|
||||
|
||||
def authenticate_token(self, load):
|
||||
'''
|
||||
"""
|
||||
Authenticate a user by the token specified in load.
|
||||
Return the token object or False if auth failed.
|
||||
'''
|
||||
token = self.get_tok(load['token'])
|
||||
"""
|
||||
token = self.get_tok(load["token"])
|
||||
|
||||
# Bail if the token is empty or if the eauth type specified is not allowed
|
||||
if not token or token['eauth'] not in self.opts['external_auth']:
|
||||
if not token or token["eauth"] not in self.opts["external_auth"]:
|
||||
log.warning('Authentication failure of type "token" occurred.')
|
||||
return False
|
||||
|
||||
return token
|
||||
|
||||
def authenticate_eauth(self, load):
|
||||
'''
|
||||
"""
|
||||
Authenticate a user by the external auth module specified in load.
|
||||
Return True on success or False on failure.
|
||||
'''
|
||||
if 'eauth' not in load:
|
||||
"""
|
||||
if "eauth" not in load:
|
||||
log.warning('Authentication failure of type "eauth" occurred.')
|
||||
return False
|
||||
|
||||
if load['eauth'] not in self.opts['external_auth']:
|
||||
log.debug('The eauth system "%s" is not enabled', load['eauth'])
|
||||
if load["eauth"] not in self.opts["external_auth"]:
|
||||
log.debug('The eauth system "%s" is not enabled', load["eauth"])
|
||||
log.warning('Authentication failure of type "eauth" occurred.')
|
||||
return False
|
||||
|
||||
|
@ -315,43 +323,45 @@ class LoadAuth(object):
|
|||
return True
|
||||
|
||||
def authenticate_key(self, load, key):
|
||||
'''
|
||||
"""
|
||||
Authenticate a user by the key passed in load.
|
||||
Return the effective user id (name) if it's different from the specified one (for sudo).
|
||||
If the effective user id is the same as the passed one, return True on success or False on
|
||||
failure.
|
||||
'''
|
||||
"""
|
||||
error_msg = 'Authentication failure of type "user" occurred.'
|
||||
auth_key = load.pop('key', None)
|
||||
auth_key = load.pop("key", None)
|
||||
if auth_key is None:
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
|
||||
if 'user' in load:
|
||||
auth_user = AuthUser(load['user'])
|
||||
if "user" in load:
|
||||
auth_user = AuthUser(load["user"])
|
||||
if auth_user.is_sudo():
|
||||
# If someone sudos check to make sure there is no ACL's around their username
|
||||
if auth_key != key[self.opts.get('user', 'root')]:
|
||||
if auth_key != key[self.opts.get("user", "root")]:
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
return auth_user.sudo_name()
|
||||
elif load['user'] == self.opts.get('user', 'root') or load['user'] == 'root':
|
||||
if auth_key != key[self.opts.get('user', 'root')]:
|
||||
elif (
|
||||
load["user"] == self.opts.get("user", "root") or load["user"] == "root"
|
||||
):
|
||||
if auth_key != key[self.opts.get("user", "root")]:
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
elif auth_user.is_running_user():
|
||||
if auth_key != key.get(load['user']):
|
||||
if auth_key != key.get(load["user"]):
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
elif auth_key == key.get('root'):
|
||||
elif auth_key == key.get("root"):
|
||||
pass
|
||||
else:
|
||||
if load['user'] in key:
|
||||
if load["user"] in key:
|
||||
# User is authorised, check key and check perms
|
||||
if auth_key != key[load['user']]:
|
||||
if auth_key != key[load["user"]]:
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
return load['user']
|
||||
return load["user"]
|
||||
else:
|
||||
log.warning(error_msg)
|
||||
return False
|
||||
|
@ -362,35 +372,35 @@ class LoadAuth(object):
|
|||
return True
|
||||
|
||||
def get_auth_list(self, load, token=None):
|
||||
'''
|
||||
"""
|
||||
Retrieve access list for the user specified in load.
|
||||
The list is built by eauth module or from master eauth configuration.
|
||||
Return None if current configuration doesn't provide any ACL for the user. Return an empty
|
||||
list if the user has no rights to execute anything on this master and returns non-empty list
|
||||
if user is allowed to execute particular functions.
|
||||
'''
|
||||
"""
|
||||
# Get auth list from token
|
||||
if token and self.opts['keep_acl_in_token'] and 'auth_list' in token:
|
||||
return token['auth_list']
|
||||
if token and self.opts["keep_acl_in_token"] and "auth_list" in token:
|
||||
return token["auth_list"]
|
||||
# Get acl from eauth module.
|
||||
auth_list = self.__get_acl(load)
|
||||
if auth_list is not None:
|
||||
return auth_list
|
||||
|
||||
eauth = token['eauth'] if token else load['eauth']
|
||||
if eauth not in self.opts['external_auth']:
|
||||
eauth = token["eauth"] if token else load["eauth"]
|
||||
if eauth not in self.opts["external_auth"]:
|
||||
# No matching module is allowed in config
|
||||
log.debug('The eauth system "%s" is not enabled', eauth)
|
||||
log.warning('Authorization failure occurred.')
|
||||
log.warning("Authorization failure occurred.")
|
||||
return None
|
||||
|
||||
if token:
|
||||
name = token['name']
|
||||
groups = token.get('groups')
|
||||
name = token["name"]
|
||||
groups = token.get("groups")
|
||||
else:
|
||||
name = self.load_name(load) # The username we are attempting to auth with
|
||||
groups = self.get_groups(load) # The groups this user belongs to
|
||||
eauth_config = self.opts['external_auth'][eauth]
|
||||
eauth_config = self.opts["external_auth"][eauth]
|
||||
if not eauth_config:
|
||||
log.debug('eauth "%s" configuration is empty', eauth)
|
||||
|
||||
|
@ -399,19 +409,16 @@ class LoadAuth(object):
|
|||
|
||||
# We now have an authenticated session and it is time to determine
|
||||
# what the user has access to.
|
||||
auth_list = self.ckminions.fill_auth_list(
|
||||
eauth_config,
|
||||
name,
|
||||
groups)
|
||||
auth_list = self.ckminions.fill_auth_list(eauth_config, name, groups)
|
||||
|
||||
auth_list = self.__process_acl(load, auth_list)
|
||||
|
||||
log.trace('Compiled auth_list: %s', auth_list)
|
||||
log.trace("Compiled auth_list: %s", auth_list)
|
||||
|
||||
return auth_list
|
||||
|
||||
def check_authentication(self, load, auth_type, key=None, show_username=False):
|
||||
'''
|
||||
"""
|
||||
.. versionadded:: 2018.3.0
|
||||
|
||||
Go through various checks to see if the token/eauth/user can be authenticated.
|
||||
|
@ -424,193 +431,208 @@ class LoadAuth(object):
|
|||
|
||||
If an error is encountered, return immediately with the relevant error dictionary
|
||||
as authentication has failed. Otherwise, return the username and valid auth_list.
|
||||
'''
|
||||
"""
|
||||
auth_list = []
|
||||
username = load.get('username', 'UNKNOWN')
|
||||
ret = {'auth_list': auth_list,
|
||||
'username': username,
|
||||
'error': {}}
|
||||
username = load.get("username", "UNKNOWN")
|
||||
ret = {"auth_list": auth_list, "username": username, "error": {}}
|
||||
|
||||
# Authenticate
|
||||
if auth_type == 'token':
|
||||
if auth_type == "token":
|
||||
token = self.authenticate_token(load)
|
||||
if not token:
|
||||
ret['error'] = {'name': 'TokenAuthenticationError',
|
||||
'message': 'Authentication failure of type "token" occurred.'}
|
||||
ret["error"] = {
|
||||
"name": "TokenAuthenticationError",
|
||||
"message": 'Authentication failure of type "token" occurred.',
|
||||
}
|
||||
return ret
|
||||
|
||||
# Update username for token
|
||||
username = token['name']
|
||||
ret['username'] = username
|
||||
username = token["name"]
|
||||
ret["username"] = username
|
||||
auth_list = self.get_auth_list(load, token=token)
|
||||
elif auth_type == 'eauth':
|
||||
elif auth_type == "eauth":
|
||||
if not self.authenticate_eauth(load):
|
||||
ret['error'] = {'name': 'EauthAuthenticationError',
|
||||
'message': 'Authentication failure of type "eauth" occurred for '
|
||||
'user {0}.'.format(username)}
|
||||
ret["error"] = {
|
||||
"name": "EauthAuthenticationError",
|
||||
"message": 'Authentication failure of type "eauth" occurred for '
|
||||
"user {0}.".format(username),
|
||||
}
|
||||
return ret
|
||||
|
||||
auth_list = self.get_auth_list(load)
|
||||
elif auth_type == 'user':
|
||||
elif auth_type == "user":
|
||||
auth_ret = self.authenticate_key(load, key)
|
||||
msg = 'Authentication failure of type "user" occurred'
|
||||
if not auth_ret: # auth_ret can be a boolean or the effective user id
|
||||
if show_username:
|
||||
msg = '{0} for user {1}.'.format(msg, username)
|
||||
ret['error'] = {'name': 'UserAuthenticationError', 'message': msg}
|
||||
msg = "{0} for user {1}.".format(msg, username)
|
||||
ret["error"] = {"name": "UserAuthenticationError", "message": msg}
|
||||
return ret
|
||||
|
||||
# Verify that the caller has root on master
|
||||
if auth_ret is not True:
|
||||
if AuthUser(load['user']).is_sudo():
|
||||
if not self.opts['sudo_acl'] or not self.opts['publisher_acl']:
|
||||
if AuthUser(load["user"]).is_sudo():
|
||||
if not self.opts["sudo_acl"] or not self.opts["publisher_acl"]:
|
||||
auth_ret = True
|
||||
|
||||
if auth_ret is not True:
|
||||
# Avoid a circular import
|
||||
import salt.utils.master
|
||||
|
||||
auth_list = salt.utils.master.get_values_of_matching_keys(
|
||||
self.opts['publisher_acl'], auth_ret)
|
||||
self.opts["publisher_acl"], auth_ret
|
||||
)
|
||||
if not auth_list:
|
||||
ret['error'] = {'name': 'UserAuthenticationError', 'message': msg}
|
||||
ret["error"] = {"name": "UserAuthenticationError", "message": msg}
|
||||
return ret
|
||||
else:
|
||||
ret['error'] = {'name': 'SaltInvocationError',
|
||||
'message': 'Authentication type not supported.'}
|
||||
ret["error"] = {
|
||||
"name": "SaltInvocationError",
|
||||
"message": "Authentication type not supported.",
|
||||
}
|
||||
return ret
|
||||
|
||||
# Authentication checks passed
|
||||
ret['auth_list'] = auth_list
|
||||
ret["auth_list"] = auth_list
|
||||
return ret
|
||||
|
||||
|
||||
class Resolver(object):
|
||||
'''
|
||||
"""
|
||||
The class used to resolve options for the command line and for generic
|
||||
interactive interfaces
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
self.auth = salt.loader.auth(opts)
|
||||
|
||||
def _send_token_request(self, load):
|
||||
master_uri = 'tcp://{}:{}'.format(
|
||||
salt.utils.zeromq.ip_bracket(self.opts['interface']),
|
||||
six.text_type(self.opts['ret_port'])
|
||||
master_uri = "tcp://{}:{}".format(
|
||||
salt.utils.zeromq.ip_bracket(self.opts["interface"]),
|
||||
six.text_type(self.opts["ret_port"]),
|
||||
)
|
||||
with salt.transport.client.ReqChannel.factory(self.opts,
|
||||
crypt='clear',
|
||||
master_uri=master_uri) as channel:
|
||||
with salt.transport.client.ReqChannel.factory(
|
||||
self.opts, crypt="clear", master_uri=master_uri
|
||||
) as channel:
|
||||
return channel.send(load)
|
||||
|
||||
def cli(self, eauth):
|
||||
'''
|
||||
"""
|
||||
Execute the CLI options to fill in the extra data needed for the
|
||||
defined eauth system
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
if not eauth:
|
||||
print('External authentication system has not been specified')
|
||||
print("External authentication system has not been specified")
|
||||
return ret
|
||||
fstr = '{0}.auth'.format(eauth)
|
||||
fstr = "{0}.auth".format(eauth)
|
||||
if fstr not in self.auth:
|
||||
print(('The specified external authentication system "{0}" is '
|
||||
'not available').format(eauth))
|
||||
print("Available eauth types: {0}".format(", ".join(self.auth.file_mapping.keys())))
|
||||
print(
|
||||
(
|
||||
'The specified external authentication system "{0}" is '
|
||||
"not available"
|
||||
).format(eauth)
|
||||
)
|
||||
print(
|
||||
"Available eauth types: {0}".format(
|
||||
", ".join(self.auth.file_mapping.keys())
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
args = salt.utils.args.arg_lookup(self.auth[fstr])
|
||||
for arg in args['args']:
|
||||
for arg in args["args"]:
|
||||
if arg in self.opts:
|
||||
ret[arg] = self.opts[arg]
|
||||
elif arg.startswith('pass'):
|
||||
ret[arg] = getpass.getpass('{0}: '.format(arg))
|
||||
elif arg.startswith("pass"):
|
||||
ret[arg] = getpass.getpass("{0}: ".format(arg))
|
||||
else:
|
||||
ret[arg] = input('{0}: '.format(arg))
|
||||
for kwarg, default in list(args['kwargs'].items()):
|
||||
ret[arg] = input("{0}: ".format(arg))
|
||||
for kwarg, default in list(args["kwargs"].items()):
|
||||
if kwarg in self.opts:
|
||||
ret['kwarg'] = self.opts[kwarg]
|
||||
ret["kwarg"] = self.opts[kwarg]
|
||||
else:
|
||||
ret[kwarg] = input('{0} [{1}]: '.format(kwarg, default))
|
||||
ret[kwarg] = input("{0} [{1}]: ".format(kwarg, default))
|
||||
|
||||
# Use current user if empty
|
||||
if 'username' in ret and not ret['username']:
|
||||
ret['username'] = salt.utils.user.get_user()
|
||||
if "username" in ret and not ret["username"]:
|
||||
ret["username"] = salt.utils.user.get_user()
|
||||
|
||||
return ret
|
||||
|
||||
def token_cli(self, eauth, load):
|
||||
'''
|
||||
"""
|
||||
Create the token from the CLI and request the correct data to
|
||||
authenticate via the passed authentication mechanism
|
||||
'''
|
||||
load['cmd'] = 'mk_token'
|
||||
load['eauth'] = eauth
|
||||
"""
|
||||
load["cmd"] = "mk_token"
|
||||
load["eauth"] = eauth
|
||||
tdata = self._send_token_request(load)
|
||||
if 'token' not in tdata:
|
||||
if "token" not in tdata:
|
||||
return tdata
|
||||
try:
|
||||
with salt.utils.files.set_umask(0o177):
|
||||
with salt.utils.files.fopen(self.opts['token_file'], 'w+') as fp_:
|
||||
fp_.write(tdata['token'])
|
||||
with salt.utils.files.fopen(self.opts["token_file"], "w+") as fp_:
|
||||
fp_.write(tdata["token"])
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
return tdata
|
||||
|
||||
def mk_token(self, load):
|
||||
'''
|
||||
"""
|
||||
Request a token from the master
|
||||
'''
|
||||
load['cmd'] = 'mk_token'
|
||||
"""
|
||||
load["cmd"] = "mk_token"
|
||||
tdata = self._send_token_request(load)
|
||||
return tdata
|
||||
|
||||
def get_token(self, token):
|
||||
'''
|
||||
"""
|
||||
Request a token from the master
|
||||
'''
|
||||
"""
|
||||
load = {}
|
||||
load['token'] = token
|
||||
load['cmd'] = 'get_token'
|
||||
load["token"] = token
|
||||
load["cmd"] = "get_token"
|
||||
tdata = self._send_token_request(load)
|
||||
return tdata
|
||||
|
||||
|
||||
class AuthUser(object):
|
||||
'''
|
||||
"""
|
||||
Represents a user requesting authentication to the salt master
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, user):
|
||||
'''
|
||||
"""
|
||||
Instantiate an AuthUser object.
|
||||
|
||||
Takes a user to reprsent, as a string.
|
||||
'''
|
||||
"""
|
||||
self.user = user
|
||||
|
||||
def is_sudo(self):
|
||||
'''
|
||||
"""
|
||||
Determines if the user is running with sudo
|
||||
|
||||
Returns True if the user is running with sudo and False if the
|
||||
user is not running with sudo
|
||||
'''
|
||||
return self.user.startswith('sudo_')
|
||||
"""
|
||||
return self.user.startswith("sudo_")
|
||||
|
||||
def is_running_user(self):
|
||||
'''
|
||||
"""
|
||||
Determines if the user is the same user as the one running
|
||||
this process
|
||||
|
||||
Returns True if the user is the same user as the one running
|
||||
this process and False if not.
|
||||
'''
|
||||
"""
|
||||
return self.user == salt.utils.user.get_user()
|
||||
|
||||
def sudo_name(self):
|
||||
'''
|
||||
"""
|
||||
Returns the username of the sudoer, i.e. self.user without the
|
||||
'sudo_' prefix.
|
||||
'''
|
||||
return self.user.split('_', 1)[-1]
|
||||
"""
|
||||
return self.user.split("_", 1)[-1]
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
An "Always Approved" eauth interface to test against, not intended for
|
||||
production use
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
def auth(username, password): # pylint: disable=unused-argument
|
||||
'''
|
||||
"""
|
||||
Authenticate!
|
||||
'''
|
||||
"""
|
||||
return True
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Provide authentication using Django Web Framework
|
||||
|
||||
:depends: - Django Web Framework
|
||||
|
@ -45,26 +45,28 @@ When a user attempts to authenticate via Django, Salt will import the package
|
|||
indicated via the keyword ``^model``. That model must have the fields
|
||||
indicated above, though the model DOES NOT have to be named
|
||||
'SaltExternalAuthModel'.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import django
|
||||
from django.db import connection # pylint: disable=no-name-in-module
|
||||
|
||||
HAS_DJANGO = True
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
# If Django is installed and is not detected, uncomment
|
||||
# the following line to display additional information
|
||||
#log.warning('Could not load Django auth module. Found exception: %s', exc)
|
||||
# log.warning('Could not load Django auth module. Found exception: %s', exc)
|
||||
HAS_DJANGO = False
|
||||
# pylint: enable=import-error
|
||||
|
||||
|
@ -72,7 +74,7 @@ DJANGO_AUTH_CLASS = None
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'django'
|
||||
__virtualname__ = "django"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -91,9 +93,9 @@ def is_connection_usable():
|
|||
|
||||
|
||||
def __django_auth_setup():
|
||||
'''
|
||||
"""
|
||||
Prepare the connection to the Django authentication framework
|
||||
'''
|
||||
"""
|
||||
if django.VERSION >= (1, 7):
|
||||
django.setup()
|
||||
|
||||
|
@ -106,24 +108,28 @@ def __django_auth_setup():
|
|||
# they are needed. When using framework facilities outside the
|
||||
# web application container we need to run django.setup() to
|
||||
# get the model definitions cached.
|
||||
if '^model' in __opts__['external_auth']['django']:
|
||||
django_model_fullname = __opts__['external_auth']['django']['^model']
|
||||
django_model_name = django_model_fullname.split('.')[-1]
|
||||
django_module_name = '.'.join(django_model_fullname.split('.')[0:-1])
|
||||
if "^model" in __opts__["external_auth"]["django"]:
|
||||
django_model_fullname = __opts__["external_auth"]["django"]["^model"]
|
||||
django_model_name = django_model_fullname.split(".")[-1]
|
||||
django_module_name = ".".join(django_model_fullname.split(".")[0:-1])
|
||||
|
||||
django_auth_module = __import__(django_module_name, globals(), locals(), 'SaltExternalAuthModel')
|
||||
DJANGO_AUTH_CLASS_str = 'django_auth_module.{0}'.format(django_model_name)
|
||||
# pylint: disable=possibly-unused-variable
|
||||
django_auth_module = __import__(
|
||||
django_module_name, globals(), locals(), "SaltExternalAuthModel"
|
||||
)
|
||||
# pylint: enable=possibly-unused-variable
|
||||
DJANGO_AUTH_CLASS_str = "django_auth_module.{0}".format(django_model_name)
|
||||
DJANGO_AUTH_CLASS = eval(DJANGO_AUTH_CLASS_str) # pylint: disable=W0123
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Simple Django auth
|
||||
'''
|
||||
django_auth_path = __opts__['django_auth_path']
|
||||
"""
|
||||
django_auth_path = __opts__["django_auth_path"]
|
||||
if django_auth_path not in sys.path:
|
||||
sys.path.append(django_auth_path)
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', __opts__['django_auth_settings'])
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", __opts__["django_auth_settings"])
|
||||
|
||||
__django_auth_setup()
|
||||
|
||||
|
@ -131,21 +137,24 @@ def auth(username, password):
|
|||
connection.close()
|
||||
|
||||
import django.contrib.auth # pylint: disable=import-error,3rd-party-module-not-gated,no-name-in-module
|
||||
|
||||
user = django.contrib.auth.authenticate(username=username, password=password)
|
||||
if user is not None:
|
||||
if user.is_active:
|
||||
log.debug('Django authentication successful')
|
||||
log.debug("Django authentication successful")
|
||||
return True
|
||||
else:
|
||||
log.debug('Django authentication: the password is valid but the account is disabled.')
|
||||
log.debug(
|
||||
"Django authentication: the password is valid but the account is disabled."
|
||||
)
|
||||
else:
|
||||
log.debug('Django authentication failed.')
|
||||
log.debug("Django authentication failed.")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def acl(username):
|
||||
'''
|
||||
"""
|
||||
|
||||
:param username: Username to filter for
|
||||
:return: Dictionary that can be slotted into the ``__opts__`` structure for
|
||||
|
@ -181,7 +190,7 @@ def acl(username):
|
|||
- server1:
|
||||
- .*
|
||||
|
||||
'''
|
||||
"""
|
||||
__django_auth_setup()
|
||||
|
||||
if username is None:
|
||||
|
@ -203,10 +212,14 @@ def acl(username):
|
|||
for d in auth_dict[a.user_fk.username]:
|
||||
if isinstance(d, dict):
|
||||
if a.minion_or_fn_matcher in six.iterkeys(d):
|
||||
auth_dict[a.user_fk.username][a.minion_or_fn_matcher].append(a.minion_fn)
|
||||
auth_dict[a.user_fk.username][a.minion_or_fn_matcher].append(
|
||||
a.minion_fn
|
||||
)
|
||||
found = True
|
||||
if not found:
|
||||
auth_dict[a.user_fk.username].append({a.minion_or_fn_matcher: [a.minion_fn]})
|
||||
auth_dict[a.user_fk.username].append(
|
||||
{a.minion_or_fn_matcher: [a.minion_fn]}
|
||||
)
|
||||
|
||||
log.debug('django auth_dict is %s', auth_dict)
|
||||
log.debug("django auth_dict is %s", auth_dict)
|
||||
return auth_dict
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Provide authentication using local files
|
||||
|
||||
.. versionadded:: 2018.3.0
|
||||
|
@ -93,10 +93,11 @@ When using ``htdigest`` the ``^realm`` must be set:
|
|||
cory:
|
||||
- .*
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -106,7 +107,7 @@ import salt.utils.versions
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'file'
|
||||
__virtualname__ = "file"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -114,75 +115,86 @@ def __virtual__():
|
|||
|
||||
|
||||
def _get_file_auth_config():
|
||||
'''
|
||||
"""
|
||||
Setup defaults and check configuration variables for auth backends
|
||||
'''
|
||||
"""
|
||||
|
||||
config = {
|
||||
'filetype': 'text',
|
||||
'hashtype': 'plaintext',
|
||||
'field_separator': ':',
|
||||
'username_field': 1,
|
||||
'password_field': 2,
|
||||
"filetype": "text",
|
||||
"hashtype": "plaintext",
|
||||
"field_separator": ":",
|
||||
"username_field": 1,
|
||||
"password_field": 2,
|
||||
}
|
||||
|
||||
for opt in __opts__['external_auth'][__virtualname__]:
|
||||
if opt.startswith('^'):
|
||||
config[opt[1:]] = __opts__['external_auth'][__virtualname__][opt]
|
||||
for opt in __opts__["external_auth"][__virtualname__]:
|
||||
if opt.startswith("^"):
|
||||
config[opt[1:]] = __opts__["external_auth"][__virtualname__][opt]
|
||||
|
||||
if 'filename' not in config:
|
||||
log.error('salt.auth.file: An authentication file must be specified '
|
||||
'via external_auth:file:^filename')
|
||||
if "filename" not in config:
|
||||
log.error(
|
||||
"salt.auth.file: An authentication file must be specified "
|
||||
"via external_auth:file:^filename"
|
||||
)
|
||||
return False
|
||||
|
||||
if not os.path.exists(config['filename']):
|
||||
log.error('salt.auth.file: The configured external_auth:file:^filename (%s)'
|
||||
'does not exist on the filesystem', config['filename'])
|
||||
if not os.path.exists(config["filename"]):
|
||||
log.error(
|
||||
"salt.auth.file: The configured external_auth:file:^filename (%s)"
|
||||
"does not exist on the filesystem",
|
||||
config["filename"],
|
||||
)
|
||||
return False
|
||||
|
||||
config['username_field'] = int(config['username_field'])
|
||||
config['password_field'] = int(config['password_field'])
|
||||
config["username_field"] = int(config["username_field"])
|
||||
config["password_field"] = int(config["password_field"])
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _text(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
The text file function can authenticate plaintext and digest methods
|
||||
that are available in the :py:func:`hashutil.digest <salt.modules.hashutil.digest>`
|
||||
function.
|
||||
'''
|
||||
"""
|
||||
|
||||
filename = kwargs['filename']
|
||||
hashtype = kwargs['hashtype']
|
||||
field_separator = kwargs['field_separator']
|
||||
username_field = kwargs['username_field']-1
|
||||
password_field = kwargs['password_field']-1
|
||||
filename = kwargs["filename"]
|
||||
hashtype = kwargs["hashtype"]
|
||||
field_separator = kwargs["field_separator"]
|
||||
username_field = kwargs["username_field"] - 1
|
||||
password_field = kwargs["password_field"] - 1
|
||||
|
||||
with salt.utils.files.fopen(filename, 'r') as pwfile:
|
||||
with salt.utils.files.fopen(filename, "r") as pwfile:
|
||||
for line in pwfile.readlines():
|
||||
fields = line.strip().split(field_separator)
|
||||
|
||||
try:
|
||||
this_username = fields[username_field]
|
||||
except IndexError:
|
||||
log.error('salt.auth.file: username field (%s) does not exist '
|
||||
'in file %s', username_field, filename)
|
||||
log.error(
|
||||
"salt.auth.file: username field (%s) does not exist " "in file %s",
|
||||
username_field,
|
||||
filename,
|
||||
)
|
||||
return False
|
||||
try:
|
||||
this_password = fields[password_field]
|
||||
except IndexError:
|
||||
log.error('salt.auth.file: password field (%s) does not exist '
|
||||
'in file %s', password_field, filename)
|
||||
log.error(
|
||||
"salt.auth.file: password field (%s) does not exist " "in file %s",
|
||||
password_field,
|
||||
filename,
|
||||
)
|
||||
return False
|
||||
|
||||
if this_username == username:
|
||||
if hashtype == 'plaintext':
|
||||
if hashtype == "plaintext":
|
||||
if this_password == password:
|
||||
return True
|
||||
else:
|
||||
# Exceptions for unknown hash types will be raised by hashutil.digest
|
||||
if this_password == __salt__['hashutil.digest'](password, hashtype):
|
||||
if this_password == __salt__["hashutil.digest"](password, hashtype):
|
||||
return True
|
||||
|
||||
# Short circuit if we've already found the user but the password was wrong
|
||||
|
@ -191,73 +203,74 @@ def _text(username, password, **kwargs):
|
|||
|
||||
|
||||
def _htpasswd(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Provide authentication via Apache-style htpasswd files
|
||||
'''
|
||||
"""
|
||||
|
||||
from passlib.apache import HtpasswdFile
|
||||
|
||||
pwfile = HtpasswdFile(kwargs['filename'])
|
||||
pwfile = HtpasswdFile(kwargs["filename"])
|
||||
|
||||
# passlib below version 1.6 uses 'verify' function instead of 'check_password'
|
||||
if salt.utils.versions.version_cmp(kwargs['passlib_version'], '1.6') < 0:
|
||||
if salt.utils.versions.version_cmp(kwargs["passlib_version"], "1.6") < 0:
|
||||
return pwfile.verify(username, password)
|
||||
else:
|
||||
return pwfile.check_password(username, password)
|
||||
|
||||
|
||||
def _htdigest(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Provide authentication via Apache-style htdigest files
|
||||
'''
|
||||
"""
|
||||
|
||||
realm = kwargs.get('realm', None)
|
||||
realm = kwargs.get("realm", None)
|
||||
if not realm:
|
||||
log.error('salt.auth.file: A ^realm must be defined in '
|
||||
'external_auth:file for htdigest filetype')
|
||||
log.error(
|
||||
"salt.auth.file: A ^realm must be defined in "
|
||||
"external_auth:file for htdigest filetype"
|
||||
)
|
||||
return False
|
||||
|
||||
from passlib.apache import HtdigestFile
|
||||
|
||||
pwfile = HtdigestFile(kwargs['filename'])
|
||||
pwfile = HtdigestFile(kwargs["filename"])
|
||||
|
||||
# passlib below version 1.6 uses 'verify' function instead of 'check_password'
|
||||
if salt.utils.versions.version_cmp(kwargs['passlib_version'], '1.6') < 0:
|
||||
if salt.utils.versions.version_cmp(kwargs["passlib_version"], "1.6") < 0:
|
||||
return pwfile.verify(username, realm, password)
|
||||
else:
|
||||
return pwfile.check_password(username, realm, password)
|
||||
|
||||
|
||||
def _htfile(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Gate function for _htpasswd and _htdigest authentication backends
|
||||
'''
|
||||
"""
|
||||
|
||||
filetype = kwargs.get('filetype', 'htpasswd').lower()
|
||||
filetype = kwargs.get("filetype", "htpasswd").lower()
|
||||
|
||||
try:
|
||||
import passlib
|
||||
kwargs['passlib_version'] = passlib.__version__
|
||||
|
||||
kwargs["passlib_version"] = passlib.__version__
|
||||
except ImportError:
|
||||
log.error('salt.auth.file: The python-passlib library is required '
|
||||
'for %s filetype', filetype)
|
||||
log.error(
|
||||
"salt.auth.file: The python-passlib library is required " "for %s filetype",
|
||||
filetype,
|
||||
)
|
||||
return False
|
||||
|
||||
if filetype == 'htdigest':
|
||||
if filetype == "htdigest":
|
||||
return _htdigest(username, password, **kwargs)
|
||||
else:
|
||||
return _htpasswd(username, password, **kwargs)
|
||||
|
||||
|
||||
FILETYPE_FUNCTION_MAP = {
|
||||
'text': _text,
|
||||
'htpasswd': _htfile,
|
||||
'htdigest': _htfile
|
||||
}
|
||||
FILETYPE_FUNCTION_MAP = {"text": _text, "htpasswd": _htfile, "htdigest": _htfile}
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
File based authentication
|
||||
|
||||
^filename
|
||||
|
@ -296,13 +309,13 @@ def auth(username, password):
|
|||
numbering beginning at 1 (one).
|
||||
|
||||
Default: ``2``
|
||||
'''
|
||||
"""
|
||||
|
||||
config = _get_file_auth_config()
|
||||
|
||||
if not config:
|
||||
return False
|
||||
|
||||
auth_function = FILETYPE_FUNCTION_MAP.get(config['filetype'], 'text')
|
||||
auth_function = FILETYPE_FUNCTION_MAP.get(config["filetype"], "text")
|
||||
|
||||
return auth_function(username, password, **config)
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Provide authentication using OpenStack Keystone
|
||||
|
||||
:depends: - keystoneclient Python module
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from keystoneclient.v2_0 import client
|
||||
from keystoneclient.exceptions import AuthorizationFailure, Unauthorized
|
||||
|
@ -14,30 +15,31 @@ except ImportError:
|
|||
|
||||
|
||||
def get_auth_url():
|
||||
'''
|
||||
"""
|
||||
Try and get the URL from the config, else return localhost
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
return __opts__['keystone.auth_url']
|
||||
return __opts__["keystone.auth_url"]
|
||||
except KeyError:
|
||||
return 'http://localhost:35357/v2.0'
|
||||
return "http://localhost:35357/v2.0"
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Try and authenticate
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
keystone = client.Client(username=username, password=password,
|
||||
auth_url=get_auth_url())
|
||||
keystone = client.Client(
|
||||
username=username, password=password, auth_url=get_auth_url()
|
||||
)
|
||||
return keystone.authenticate()
|
||||
except (AuthorizationFailure, Unauthorized):
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
__opts__ = {}
|
||||
if auth('test', 'test'):
|
||||
if auth("test", "test"):
|
||||
print("Authenticated")
|
||||
else:
|
||||
print("Failed to authenticate")
|
||||
|
|
|
@ -1,97 +1,113 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Provide authentication using simple LDAP binds
|
||||
|
||||
:depends: - ldap Python module
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
|
||||
import itertools
|
||||
from salt.ext import six
|
||||
import logging
|
||||
|
||||
import salt.utils.data
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import third party libs
|
||||
from jinja2 import Environment
|
||||
|
||||
# Import salt libs
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.data
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
# Import third party libs
|
||||
from jinja2 import Environment
|
||||
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
import ldap
|
||||
import ldap.modlist
|
||||
import ldap.filter
|
||||
|
||||
HAS_LDAP = True
|
||||
# pylint: enable=no-name-in-module
|
||||
except ImportError:
|
||||
HAS_LDAP = False
|
||||
|
||||
# Defaults, override in master config
|
||||
__defopts__ = {'auth.ldap.basedn': '',
|
||||
'auth.ldap.uri': '',
|
||||
'auth.ldap.server': 'localhost',
|
||||
'auth.ldap.port': '389',
|
||||
'auth.ldap.starttls': False,
|
||||
'auth.ldap.tls': False,
|
||||
'auth.ldap.no_verify': False,
|
||||
'auth.ldap.anonymous': False,
|
||||
'auth.ldap.scope': 2,
|
||||
'auth.ldap.groupou': 'Groups',
|
||||
'auth.ldap.accountattributename': 'memberUid',
|
||||
'auth.ldap.groupattribute': 'memberOf',
|
||||
'auth.ldap.persontype': 'person',
|
||||
'auth.ldap.groupclass': 'posixGroup',
|
||||
'auth.ldap.activedirectory': False,
|
||||
'auth.ldap.freeipa': False,
|
||||
'auth.ldap.minion_stripdomains': [],
|
||||
}
|
||||
__defopts__ = {
|
||||
"auth.ldap.basedn": "",
|
||||
"auth.ldap.uri": "",
|
||||
"auth.ldap.server": "localhost",
|
||||
"auth.ldap.port": "389",
|
||||
"auth.ldap.starttls": False,
|
||||
"auth.ldap.tls": False,
|
||||
"auth.ldap.no_verify": False,
|
||||
"auth.ldap.anonymous": False,
|
||||
"auth.ldap.scope": 2,
|
||||
"auth.ldap.groupou": "Groups",
|
||||
"auth.ldap.accountattributename": "memberUid",
|
||||
"auth.ldap.groupattribute": "memberOf",
|
||||
"auth.ldap.persontype": "person",
|
||||
"auth.ldap.groupclass": "posixGroup",
|
||||
"auth.ldap.activedirectory": False,
|
||||
"auth.ldap.freeipa": False,
|
||||
"auth.ldap.minion_stripdomains": [],
|
||||
}
|
||||
|
||||
|
||||
def _config(key, mandatory=True, opts=None):
|
||||
'''
|
||||
"""
|
||||
Return a value for 'name' from master config file options or defaults.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
if opts:
|
||||
value = opts['auth.ldap.{0}'.format(key)]
|
||||
value = opts["auth.ldap.{0}".format(key)]
|
||||
else:
|
||||
value = __opts__['auth.ldap.{0}'.format(key)]
|
||||
value = __opts__["auth.ldap.{0}".format(key)]
|
||||
except KeyError:
|
||||
try:
|
||||
value = __defopts__['auth.ldap.{0}'.format(key)]
|
||||
value = __defopts__["auth.ldap.{0}".format(key)]
|
||||
except KeyError:
|
||||
if mandatory:
|
||||
msg = 'missing auth.ldap.{0} in master config'.format(key)
|
||||
msg = "missing auth.ldap.{0} in master config".format(key)
|
||||
raise SaltInvocationError(msg)
|
||||
return False
|
||||
return value
|
||||
|
||||
|
||||
def _render_template(param, username):
|
||||
'''
|
||||
"""
|
||||
Render config template, substituting username where found.
|
||||
'''
|
||||
"""
|
||||
env = Environment()
|
||||
template = env.from_string(param)
|
||||
variables = {'username': username}
|
||||
variables = {"username": username}
|
||||
return template.render(variables)
|
||||
|
||||
|
||||
class _LDAPConnection(object):
|
||||
'''
|
||||
"""
|
||||
Setup an LDAP connection.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, uri, server, port,
|
||||
starttls, tls, no_verify,
|
||||
binddn, bindpw,
|
||||
anonymous, accountattributename, activedirectory=False):
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
uri,
|
||||
server,
|
||||
port,
|
||||
starttls,
|
||||
tls,
|
||||
no_verify,
|
||||
binddn,
|
||||
bindpw,
|
||||
anonymous,
|
||||
accountattributename,
|
||||
activedirectory=False,
|
||||
):
|
||||
"""
|
||||
Bind to an LDAP directory using passed credentials.
|
||||
'''
|
||||
"""
|
||||
self.uri = uri
|
||||
self.server = server
|
||||
self.port = port
|
||||
|
@ -101,151 +117,180 @@ class _LDAPConnection(object):
|
|||
self.bindpw = bindpw
|
||||
if not HAS_LDAP:
|
||||
raise CommandExecutionError(
|
||||
'LDAP connection could not be made, the python-ldap module is '
|
||||
'not installed. Install python-ldap to use LDAP external auth.'
|
||||
"LDAP connection could not be made, the python-ldap module is "
|
||||
"not installed. Install python-ldap to use LDAP external auth."
|
||||
)
|
||||
if self.starttls and self.tls:
|
||||
raise CommandExecutionError(
|
||||
'Cannot bind with both starttls and tls enabled.'
|
||||
'Please enable only one of the protocols'
|
||||
"Cannot bind with both starttls and tls enabled."
|
||||
"Please enable only one of the protocols"
|
||||
)
|
||||
|
||||
schema = 'ldaps' if tls else 'ldap'
|
||||
if self.uri == '':
|
||||
self.uri = '{0}://{1}:{2}'.format(schema, self.server, self.port)
|
||||
schema = "ldaps" if tls else "ldap"
|
||||
if self.uri == "":
|
||||
self.uri = "{0}://{1}:{2}".format(schema, self.server, self.port)
|
||||
|
||||
try:
|
||||
if no_verify:
|
||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT,
|
||||
ldap.OPT_X_TLS_NEVER)
|
||||
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
|
||||
|
||||
self.ldap = ldap.initialize('{0}'.format(self.uri))
|
||||
self.ldap = ldap.initialize("{0}".format(self.uri))
|
||||
self.ldap.protocol_version = 3 # ldap.VERSION3
|
||||
self.ldap.set_option(ldap.OPT_REFERRALS, 0) # Needed for AD
|
||||
|
||||
if not anonymous:
|
||||
if self.bindpw is None or len(self.bindpw) < 1:
|
||||
raise CommandExecutionError(
|
||||
'LDAP bind password is not set: password cannot be empty if auth.ldap.anonymous is False'
|
||||
"LDAP bind password is not set: password cannot be empty if auth.ldap.anonymous is False"
|
||||
)
|
||||
if self.starttls:
|
||||
self.ldap.start_tls_s()
|
||||
self.ldap.simple_bind_s(self.binddn, self.bindpw)
|
||||
except Exception as ldap_error: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
'Failed to bind to LDAP server {0} as {1}: {2}'.format(
|
||||
"Failed to bind to LDAP server {0} as {1}: {2}".format(
|
||||
self.uri, self.binddn, ldap_error
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _bind_for_search(anonymous=False, opts=None):
|
||||
'''
|
||||
"""
|
||||
Bind with binddn and bindpw only for searching LDAP
|
||||
:param anonymous: Try binding anonymously
|
||||
:param opts: Pass in when __opts__ is not available
|
||||
:return: LDAPConnection object
|
||||
'''
|
||||
"""
|
||||
# Get config params; create connection dictionary
|
||||
connargs = {}
|
||||
# config params (auth.ldap.*)
|
||||
params = {
|
||||
'mandatory': ['uri', 'server', 'port', 'starttls', 'tls',
|
||||
'no_verify', 'anonymous',
|
||||
'accountattributename', 'activedirectory'],
|
||||
'additional': ['binddn', 'bindpw', 'filter', 'groupclass',
|
||||
'auth_by_group_membership_only'],
|
||||
"mandatory": [
|
||||
"uri",
|
||||
"server",
|
||||
"port",
|
||||
"starttls",
|
||||
"tls",
|
||||
"no_verify",
|
||||
"anonymous",
|
||||
"accountattributename",
|
||||
"activedirectory",
|
||||
],
|
||||
"additional": [
|
||||
"binddn",
|
||||
"bindpw",
|
||||
"filter",
|
||||
"groupclass",
|
||||
"auth_by_group_membership_only",
|
||||
],
|
||||
}
|
||||
|
||||
paramvalues = {}
|
||||
|
||||
for param in params['mandatory']:
|
||||
for param in params["mandatory"]:
|
||||
paramvalues[param] = _config(param, opts=opts)
|
||||
|
||||
for param in params['additional']:
|
||||
for param in params["additional"]:
|
||||
paramvalues[param] = _config(param, mandatory=False, opts=opts)
|
||||
|
||||
paramvalues['anonymous'] = anonymous
|
||||
paramvalues["anonymous"] = anonymous
|
||||
|
||||
# Only add binddn/bindpw to the connargs when they're set, as they're not
|
||||
# mandatory for initializing the LDAP object, but if they're provided
|
||||
# initially, a bind attempt will be done during the initialization to
|
||||
# validate them
|
||||
if paramvalues['binddn']:
|
||||
connargs['binddn'] = paramvalues['binddn']
|
||||
if paramvalues['bindpw']:
|
||||
params['mandatory'].append('bindpw')
|
||||
if paramvalues["binddn"]:
|
||||
connargs["binddn"] = paramvalues["binddn"]
|
||||
if paramvalues["bindpw"]:
|
||||
params["mandatory"].append("bindpw")
|
||||
|
||||
for name in params['mandatory']:
|
||||
for name in params["mandatory"]:
|
||||
connargs[name] = paramvalues[name]
|
||||
|
||||
if not paramvalues['anonymous']:
|
||||
if paramvalues['binddn'] and paramvalues['bindpw']:
|
||||
if not paramvalues["anonymous"]:
|
||||
if paramvalues["binddn"] and paramvalues["bindpw"]:
|
||||
# search for the user's DN to be used for the actual authentication
|
||||
return _LDAPConnection(**connargs).ldap
|
||||
|
||||
|
||||
def _bind(username, password, anonymous=False, opts=None):
|
||||
'''
|
||||
"""
|
||||
Authenticate via an LDAP bind
|
||||
'''
|
||||
"""
|
||||
# Get config params; create connection dictionary
|
||||
basedn = _config('basedn', opts=opts)
|
||||
scope = _config('scope', opts=opts)
|
||||
basedn = _config("basedn", opts=opts)
|
||||
scope = _config("scope", opts=opts)
|
||||
connargs = {}
|
||||
# config params (auth.ldap.*)
|
||||
params = {
|
||||
'mandatory': ['uri', 'server', 'port', 'starttls', 'tls',
|
||||
'no_verify', 'anonymous',
|
||||
'accountattributename', 'activedirectory'],
|
||||
'additional': ['binddn', 'bindpw', 'filter', 'groupclass',
|
||||
'auth_by_group_membership_only'],
|
||||
"mandatory": [
|
||||
"uri",
|
||||
"server",
|
||||
"port",
|
||||
"starttls",
|
||||
"tls",
|
||||
"no_verify",
|
||||
"anonymous",
|
||||
"accountattributename",
|
||||
"activedirectory",
|
||||
],
|
||||
"additional": [
|
||||
"binddn",
|
||||
"bindpw",
|
||||
"filter",
|
||||
"groupclass",
|
||||
"auth_by_group_membership_only",
|
||||
],
|
||||
}
|
||||
|
||||
paramvalues = {}
|
||||
|
||||
for param in params['mandatory']:
|
||||
for param in params["mandatory"]:
|
||||
paramvalues[param] = _config(param, opts=opts)
|
||||
|
||||
for param in params['additional']:
|
||||
for param in params["additional"]:
|
||||
paramvalues[param] = _config(param, mandatory=False, opts=opts)
|
||||
|
||||
paramvalues['anonymous'] = anonymous
|
||||
if paramvalues['binddn']:
|
||||
paramvalues["anonymous"] = anonymous
|
||||
if paramvalues["binddn"]:
|
||||
# the binddn can also be composited, e.g.
|
||||
# - {{ username }}@domain.com
|
||||
# - cn={{ username }},ou=users,dc=company,dc=tld
|
||||
# so make sure to render it first before using it
|
||||
paramvalues['binddn'] = _render_template(paramvalues['binddn'], username)
|
||||
paramvalues['binddn'] = ldap.filter.escape_filter_chars(paramvalues['binddn'])
|
||||
paramvalues["binddn"] = _render_template(paramvalues["binddn"], username)
|
||||
paramvalues["binddn"] = ldap.filter.escape_filter_chars(paramvalues["binddn"])
|
||||
|
||||
if paramvalues['filter']:
|
||||
if paramvalues["filter"]:
|
||||
escaped_username = ldap.filter.escape_filter_chars(username)
|
||||
paramvalues['filter'] = _render_template(paramvalues['filter'], escaped_username)
|
||||
paramvalues["filter"] = _render_template(
|
||||
paramvalues["filter"], escaped_username
|
||||
)
|
||||
|
||||
# Only add binddn/bindpw to the connargs when they're set, as they're not
|
||||
# mandatory for initializing the LDAP object, but if they're provided
|
||||
# initially, a bind attempt will be done during the initialization to
|
||||
# validate them
|
||||
if paramvalues['binddn']:
|
||||
connargs['binddn'] = paramvalues['binddn']
|
||||
if paramvalues['bindpw']:
|
||||
params['mandatory'].append('bindpw')
|
||||
if paramvalues["binddn"]:
|
||||
connargs["binddn"] = paramvalues["binddn"]
|
||||
if paramvalues["bindpw"]:
|
||||
params["mandatory"].append("bindpw")
|
||||
|
||||
for name in params['mandatory']:
|
||||
for name in params["mandatory"]:
|
||||
connargs[name] = paramvalues[name]
|
||||
|
||||
if not paramvalues['anonymous']:
|
||||
if paramvalues['binddn'] and paramvalues['bindpw']:
|
||||
if not paramvalues["anonymous"]:
|
||||
if paramvalues["binddn"] and paramvalues["bindpw"]:
|
||||
# search for the user's DN to be used for the actual authentication
|
||||
_ldap = _LDAPConnection(**connargs).ldap
|
||||
log.debug(
|
||||
'Running LDAP user dn search with filter:%s, dn:%s, '
|
||||
'scope:%s', paramvalues['filter'], basedn, scope
|
||||
"Running LDAP user dn search with filter:%s, dn:%s, " "scope:%s",
|
||||
paramvalues["filter"],
|
||||
basedn,
|
||||
scope,
|
||||
)
|
||||
result = _ldap.search_s(basedn, int(scope), paramvalues['filter'])
|
||||
result = _ldap.search_s(basedn, int(scope), paramvalues["filter"])
|
||||
if len(result) < 1:
|
||||
log.warning('Unable to find user %s', username)
|
||||
log.warning("Unable to find user %s", username)
|
||||
return False
|
||||
elif len(result) > 1:
|
||||
# Active Directory returns something odd. Though we do not
|
||||
|
@ -261,71 +306,81 @@ def _bind(username, password, anonymous=False, opts=None):
|
|||
cns = [tup[0] for tup in result]
|
||||
total_not_none = sum(1 for c in cns if c is not None)
|
||||
if total_not_none > 1:
|
||||
log.error('LDAP lookup found multiple results for user %s', username)
|
||||
log.error(
|
||||
"LDAP lookup found multiple results for user %s", username
|
||||
)
|
||||
return False
|
||||
elif total_not_none == 0:
|
||||
log.error('LDAP lookup--unable to find CN matching user %s', username)
|
||||
log.error(
|
||||
"LDAP lookup--unable to find CN matching user %s", username
|
||||
)
|
||||
return False
|
||||
|
||||
connargs['binddn'] = result[0][0]
|
||||
if paramvalues['binddn'] and not paramvalues['bindpw']:
|
||||
connargs['binddn'] = paramvalues['binddn']
|
||||
elif paramvalues['binddn'] and not paramvalues['bindpw']:
|
||||
connargs['binddn'] = paramvalues['binddn']
|
||||
connargs["binddn"] = result[0][0]
|
||||
if paramvalues["binddn"] and not paramvalues["bindpw"]:
|
||||
connargs["binddn"] = paramvalues["binddn"]
|
||||
elif paramvalues["binddn"] and not paramvalues["bindpw"]:
|
||||
connargs["binddn"] = paramvalues["binddn"]
|
||||
|
||||
# Update connection dictionary with the user's password
|
||||
connargs['bindpw'] = password
|
||||
connargs["bindpw"] = password
|
||||
|
||||
# Attempt bind with user dn and password
|
||||
if paramvalues['anonymous']:
|
||||
log.debug('Attempting anonymous LDAP bind')
|
||||
if paramvalues["anonymous"]:
|
||||
log.debug("Attempting anonymous LDAP bind")
|
||||
else:
|
||||
log.debug('Attempting LDAP bind with user dn: %s', connargs['binddn'])
|
||||
log.debug("Attempting LDAP bind with user dn: %s", connargs["binddn"])
|
||||
try:
|
||||
ldap_conn = _LDAPConnection(**connargs).ldap
|
||||
except Exception: # pylint: disable=broad-except
|
||||
connargs.pop('bindpw', None) # Don't log the password
|
||||
log.error('Failed to authenticate user dn via LDAP: %s', connargs)
|
||||
log.debug('Error authenticating user dn via LDAP:', exc_info=True)
|
||||
connargs.pop("bindpw", None) # Don't log the password
|
||||
log.error("Failed to authenticate user dn via LDAP: %s", connargs)
|
||||
log.debug("Error authenticating user dn via LDAP:", exc_info=True)
|
||||
return False
|
||||
log.debug('Successfully authenticated user dn via LDAP: %s', connargs['binddn'])
|
||||
log.debug("Successfully authenticated user dn via LDAP: %s", connargs["binddn"])
|
||||
return ldap_conn
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Simple LDAP auth
|
||||
'''
|
||||
"""
|
||||
if not HAS_LDAP:
|
||||
log.error('LDAP authentication requires python-ldap module')
|
||||
log.error("LDAP authentication requires python-ldap module")
|
||||
return False
|
||||
|
||||
bind = None
|
||||
|
||||
# If bind credentials are configured, verify that we receive a valid bind
|
||||
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
|
||||
search_bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
|
||||
if _config("binddn", mandatory=False) and _config("bindpw", mandatory=False):
|
||||
search_bind = _bind_for_search(anonymous=_config("anonymous", mandatory=False))
|
||||
|
||||
# If username & password are not None, attempt to verify they are valid
|
||||
if search_bind and username and password:
|
||||
bind = _bind(username, password,
|
||||
anonymous=_config('auth_by_group_membership_only', mandatory=False)
|
||||
and _config('anonymous', mandatory=False))
|
||||
bind = _bind(
|
||||
username,
|
||||
password,
|
||||
anonymous=_config("auth_by_group_membership_only", mandatory=False)
|
||||
and _config("anonymous", mandatory=False),
|
||||
)
|
||||
else:
|
||||
bind = _bind(username, password,
|
||||
anonymous=_config('auth_by_group_membership_only', mandatory=False)
|
||||
and _config('anonymous', mandatory=False))
|
||||
bind = _bind(
|
||||
username,
|
||||
password,
|
||||
anonymous=_config("auth_by_group_membership_only", mandatory=False)
|
||||
and _config("anonymous", mandatory=False),
|
||||
)
|
||||
|
||||
if bind:
|
||||
log.debug('LDAP authentication successful')
|
||||
log.debug("LDAP authentication successful")
|
||||
return bind
|
||||
|
||||
log.error('LDAP _bind authentication FAILED')
|
||||
log.error("LDAP _bind authentication FAILED")
|
||||
return False
|
||||
|
||||
|
||||
def groups(username, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Authenticate against an LDAP group
|
||||
|
||||
Behavior is highly dependent on if Active Directory is in use.
|
||||
|
@ -338,108 +393,157 @@ def groups(username, **kwargs):
|
|||
and returns members of those groups. Then we check against
|
||||
the username entered.
|
||||
|
||||
'''
|
||||
"""
|
||||
group_list = []
|
||||
|
||||
# If bind credentials are configured, use them instead of user's
|
||||
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
|
||||
bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
|
||||
if _config("binddn", mandatory=False) and _config("bindpw", mandatory=False):
|
||||
bind = _bind_for_search(anonymous=_config("anonymous", mandatory=False))
|
||||
else:
|
||||
bind = _bind(username, kwargs.get('password', ''),
|
||||
anonymous=_config('auth_by_group_membership_only', mandatory=False)
|
||||
and _config('anonymous', mandatory=False))
|
||||
bind = _bind(
|
||||
username,
|
||||
kwargs.get("password", ""),
|
||||
anonymous=_config("auth_by_group_membership_only", mandatory=False)
|
||||
and _config("anonymous", mandatory=False),
|
||||
)
|
||||
|
||||
if bind:
|
||||
log.debug('ldap bind to determine group membership succeeded!')
|
||||
log.debug("ldap bind to determine group membership succeeded!")
|
||||
|
||||
if _config('activedirectory'):
|
||||
if _config("activedirectory"):
|
||||
try:
|
||||
get_user_dn_search = '(&({0}={1})(objectClass={2}))'.format(_config('accountattributename'),
|
||||
username,
|
||||
_config('persontype'))
|
||||
user_dn_results = bind.search_s(_config('basedn'),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
get_user_dn_search, [str('distinguishedName')]) # future lint: disable=blacklisted-function
|
||||
get_user_dn_search = "(&({0}={1})(objectClass={2}))".format(
|
||||
_config("accountattributename"), username, _config("persontype")
|
||||
)
|
||||
user_dn_results = bind.search_s(
|
||||
_config("basedn"),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
get_user_dn_search,
|
||||
[str("distinguishedName")],
|
||||
) # future lint: disable=blacklisted-function
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
log.error('Exception thrown while looking up user DN in AD: %s', e)
|
||||
log.error("Exception thrown while looking up user DN in AD: %s", e)
|
||||
return group_list
|
||||
if not user_dn_results:
|
||||
log.error('Could not get distinguished name for user %s', username)
|
||||
log.error("Could not get distinguished name for user %s", username)
|
||||
return group_list
|
||||
# LDAP results are always tuples. First entry in the tuple is the DN
|
||||
dn = ldap.filter.escape_filter_chars(user_dn_results[0][0])
|
||||
ldap_search_string = '(&(member={0})(objectClass={1}))'.format(dn, _config('groupclass'))
|
||||
log.debug('Running LDAP group membership search: %s', ldap_search_string)
|
||||
ldap_search_string = "(&(member={0})(objectClass={1}))".format(
|
||||
dn, _config("groupclass")
|
||||
)
|
||||
log.debug("Running LDAP group membership search: %s", ldap_search_string)
|
||||
try:
|
||||
search_results = bind.search_s(_config('basedn'),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
ldap_search_string,
|
||||
[salt.utils.stringutils.to_str(_config('accountattributename')), str('cn')]) # future lint: disable=blacklisted-function
|
||||
search_results = bind.search_s(
|
||||
_config("basedn"),
|
||||
ldap.SCOPE_SUBTREE,
|
||||
ldap_search_string,
|
||||
[
|
||||
salt.utils.stringutils.to_str(_config("accountattributename")),
|
||||
str("cn"),
|
||||
],
|
||||
) # future lint: disable=blacklisted-function
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
log.error('Exception thrown while retrieving group membership in AD: %s', e)
|
||||
log.error(
|
||||
"Exception thrown while retrieving group membership in AD: %s", e
|
||||
)
|
||||
return group_list
|
||||
for _, entry in search_results:
|
||||
if 'cn' in entry:
|
||||
group_list.append(salt.utils.stringutils.to_unicode(entry['cn'][0]))
|
||||
log.debug('User %s is a member of groups: %s', username, group_list)
|
||||
if "cn" in entry:
|
||||
group_list.append(salt.utils.stringutils.to_unicode(entry["cn"][0]))
|
||||
log.debug("User %s is a member of groups: %s", username, group_list)
|
||||
|
||||
elif _config('freeipa'):
|
||||
elif _config("freeipa"):
|
||||
escaped_username = ldap.filter.escape_filter_chars(username)
|
||||
search_base = _config('group_basedn')
|
||||
search_string = _render_template(_config('group_filter'), escaped_username)
|
||||
search_results = bind.search_s(search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[salt.utils.stringutils.to_str(_config('accountattributename')), salt.utils.stringutils.to_str(_config('groupattribute')), str('cn')]) # future lint: disable=blacklisted-function
|
||||
search_base = _config("group_basedn")
|
||||
search_string = _render_template(_config("group_filter"), escaped_username)
|
||||
search_results = bind.search_s(
|
||||
search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[
|
||||
salt.utils.stringutils.to_str(_config("accountattributename")),
|
||||
salt.utils.stringutils.to_str(_config("groupattribute")),
|
||||
str("cn"),
|
||||
],
|
||||
) # future lint: disable=blacklisted-function
|
||||
|
||||
for entry, result in search_results:
|
||||
for user in itertools.chain(result.get(_config('accountattributename'), []),
|
||||
result.get(_config('groupattribute'), [])):
|
||||
if username == salt.utils.stringutils.to_unicode(user).split(',')[0].split('=')[-1]:
|
||||
group_list.append(entry.split(',')[0].split('=')[-1])
|
||||
for user in itertools.chain(
|
||||
result.get(_config("accountattributename"), []),
|
||||
result.get(_config("groupattribute"), []),
|
||||
):
|
||||
if (
|
||||
username
|
||||
== salt.utils.stringutils.to_unicode(user)
|
||||
.split(",")[0]
|
||||
.split("=")[-1]
|
||||
):
|
||||
group_list.append(entry.split(",")[0].split("=")[-1])
|
||||
|
||||
log.debug('User %s is a member of groups: %s', username, group_list)
|
||||
log.debug("User %s is a member of groups: %s", username, group_list)
|
||||
|
||||
if not auth(username, kwargs['password']):
|
||||
log.error('LDAP username and password do not match')
|
||||
if not auth(username, kwargs["password"]):
|
||||
log.error("LDAP username and password do not match")
|
||||
return []
|
||||
else:
|
||||
if _config('groupou'):
|
||||
search_base = 'ou={0},{1}'.format(_config('groupou'), _config('basedn'))
|
||||
if _config("groupou"):
|
||||
search_base = "ou={0},{1}".format(_config("groupou"), _config("basedn"))
|
||||
else:
|
||||
search_base = '{0}'.format(_config('basedn'))
|
||||
search_string = '(&({0}={1})(objectClass={2}))'.format(_config('accountattributename'),
|
||||
username, _config('groupclass'))
|
||||
search_results = bind.search_s(search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[salt.utils.stringutils.to_str(_config('accountattributename')),
|
||||
str('cn'), # future lint: disable=blacklisted-function
|
||||
salt.utils.stringutils.to_str(_config('groupattribute'))])
|
||||
search_base = "{0}".format(_config("basedn"))
|
||||
search_string = "(&({0}={1})(objectClass={2}))".format(
|
||||
_config("accountattributename"), username, _config("groupclass")
|
||||
)
|
||||
search_results = bind.search_s(
|
||||
search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[
|
||||
salt.utils.stringutils.to_str(_config("accountattributename")),
|
||||
str("cn"), # future lint: disable=blacklisted-function
|
||||
salt.utils.stringutils.to_str(_config("groupattribute")),
|
||||
],
|
||||
)
|
||||
for _, entry in search_results:
|
||||
if username in salt.utils.data.decode(entry[_config('accountattributename')]):
|
||||
group_list.append(salt.utils.stringutils.to_unicode(entry['cn'][0]))
|
||||
if username in salt.utils.data.decode(
|
||||
entry[_config("accountattributename")]
|
||||
):
|
||||
group_list.append(salt.utils.stringutils.to_unicode(entry["cn"][0]))
|
||||
for user, entry in search_results:
|
||||
if username == salt.utils.stringutils.to_unicode(user).split(',')[0].split('=')[-1]:
|
||||
for group in salt.utils.data.decode(entry[_config('groupattribute')]):
|
||||
group_list.append(salt.utils.stringutils.to_unicode(group).split(',')[0].split('=')[-1])
|
||||
log.debug('User %s is a member of groups: %s', username, group_list)
|
||||
if (
|
||||
username
|
||||
== salt.utils.stringutils.to_unicode(user)
|
||||
.split(",")[0]
|
||||
.split("=")[-1]
|
||||
):
|
||||
for group in salt.utils.data.decode(
|
||||
entry[_config("groupattribute")]
|
||||
):
|
||||
group_list.append(
|
||||
salt.utils.stringutils.to_unicode(group)
|
||||
.split(",")[0]
|
||||
.split("=")[-1]
|
||||
)
|
||||
log.debug("User %s is a member of groups: %s", username, group_list)
|
||||
|
||||
# Only test user auth on first call for job.
|
||||
# 'show_jid' only exists on first payload so we can use that for the conditional.
|
||||
if 'show_jid' in kwargs and not _bind(username, kwargs.get('password'),
|
||||
anonymous=_config('auth_by_group_membership_only', mandatory=False) and
|
||||
_config('anonymous', mandatory=False)):
|
||||
log.error('LDAP username and password do not match')
|
||||
if "show_jid" in kwargs and not _bind(
|
||||
username,
|
||||
kwargs.get("password"),
|
||||
anonymous=_config("auth_by_group_membership_only", mandatory=False)
|
||||
and _config("anonymous", mandatory=False),
|
||||
):
|
||||
log.error("LDAP username and password do not match")
|
||||
return []
|
||||
else:
|
||||
log.error('ldap bind to determine group membership FAILED!')
|
||||
log.error("ldap bind to determine group membership FAILED!")
|
||||
|
||||
return group_list
|
||||
|
||||
|
||||
def __expand_ldap_entries(entries, opts=None):
|
||||
'''
|
||||
"""
|
||||
|
||||
:param entries: ldap subtree in external_auth config option
|
||||
:param opts: Opts to use when __opts__ not defined
|
||||
|
@ -458,7 +562,7 @@ def __expand_ldap_entries(entries, opts=None):
|
|||
- allowed_fn_list_attribute^
|
||||
|
||||
This function only gets called if auth.ldap.activedirectory = True
|
||||
'''
|
||||
"""
|
||||
bind = _bind_for_search(opts=opts)
|
||||
acl_tree = []
|
||||
for user_or_group_dict in entries:
|
||||
|
@ -468,26 +572,25 @@ def __expand_ldap_entries(entries, opts=None):
|
|||
for minion_or_ou, matchers in six.iteritems(user_or_group_dict):
|
||||
permissions = matchers
|
||||
retrieved_minion_ids = []
|
||||
if minion_or_ou.startswith('ldap('):
|
||||
search_base = minion_or_ou.lstrip('ldap(').rstrip(')')
|
||||
if minion_or_ou.startswith("ldap("):
|
||||
search_base = minion_or_ou.lstrip("ldap(").rstrip(")")
|
||||
|
||||
search_string = '(objectClass=computer)'
|
||||
search_string = "(objectClass=computer)"
|
||||
try:
|
||||
search_results = bind.search_s(search_base,
|
||||
ldap.SCOPE_SUBTREE,
|
||||
search_string,
|
||||
[str('cn')]) # future lint: disable=blacklisted-function
|
||||
search_results = bind.search_s(
|
||||
search_base, ldap.SCOPE_SUBTREE, search_string, [str("cn")]
|
||||
) # future lint: disable=blacklisted-function
|
||||
for ldap_match in search_results:
|
||||
try:
|
||||
minion_id = ldap_match[1]['cn'][0].lower()
|
||||
minion_id = ldap_match[1]["cn"][0].lower()
|
||||
# Some LDAP/AD trees only have the FQDN of machines
|
||||
# in their computer lists. auth.minion_stripdomains
|
||||
# lets a user strip off configured domain names
|
||||
# and arrive at the basic minion_id
|
||||
if opts.get('auth.ldap.minion_stripdomains', None):
|
||||
for domain in opts['auth.ldap.minion_stripdomains']:
|
||||
if opts.get("auth.ldap.minion_stripdomains", None):
|
||||
for domain in opts["auth.ldap.minion_stripdomains"]:
|
||||
if minion_id.endswith(domain):
|
||||
minion_id = minion_id[:-len(domain)]
|
||||
minion_id = minion_id[: -len(domain)]
|
||||
break
|
||||
retrieved_minion_ids.append(minion_id)
|
||||
except TypeError:
|
||||
|
@ -498,30 +601,36 @@ def __expand_ldap_entries(entries, opts=None):
|
|||
|
||||
for minion_id in retrieved_minion_ids:
|
||||
acl_tree.append({minion_id: permissions})
|
||||
log.trace('Expanded acl_tree is: %s', acl_tree)
|
||||
log.trace("Expanded acl_tree is: %s", acl_tree)
|
||||
except ldap.NO_SUCH_OBJECT:
|
||||
pass
|
||||
else:
|
||||
acl_tree.append({minion_or_ou: matchers})
|
||||
|
||||
log.trace('__expand_ldap_entries: %s', acl_tree)
|
||||
log.trace("__expand_ldap_entries: %s", acl_tree)
|
||||
return acl_tree
|
||||
|
||||
|
||||
def process_acl(auth_list, opts=None):
|
||||
'''
|
||||
"""
|
||||
Query LDAP, retrieve list of minion_ids from an OU or other search.
|
||||
For each minion_id returned from the LDAP search, copy the perms
|
||||
matchers into the auth dictionary
|
||||
:param auth_list:
|
||||
:param opts: __opts__ for when __opts__ is not injected
|
||||
:return: Modified auth list.
|
||||
'''
|
||||
"""
|
||||
ou_names = []
|
||||
for item in auth_list:
|
||||
if isinstance(item, six.string_types):
|
||||
continue
|
||||
ou_names.extend([potential_ou for potential_ou in item.keys() if potential_ou.startswith('ldap(')])
|
||||
ou_names.extend(
|
||||
[
|
||||
potential_ou
|
||||
for potential_ou in item.keys()
|
||||
if potential_ou.startswith("ldap(")
|
||||
]
|
||||
)
|
||||
if ou_names:
|
||||
auth_list = __expand_ldap_entries(auth_list, opts)
|
||||
return auth_list
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
'''
|
||||
"""
|
||||
Provide authentication using MySQL.
|
||||
|
||||
When using MySQL as an authentication backend, you will need to create or
|
||||
|
@ -47,9 +47,10 @@ Enable MySQL authentication.
|
|||
- test.*
|
||||
|
||||
:depends: - MySQL-python Python module
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -64,6 +65,7 @@ except ImportError:
|
|||
try:
|
||||
# MySQLdb import failed, try to import PyMySQL
|
||||
import pymysql
|
||||
|
||||
pymysql.install_as_MySQLdb()
|
||||
import MySQLdb
|
||||
import MySQLdb.cursors
|
||||
|
@ -74,52 +76,51 @@ except ImportError:
|
|||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Confirm that a python mysql client is installed.
|
||||
'''
|
||||
return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
|
||||
"""
|
||||
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
|
||||
|
||||
|
||||
def __get_connection_info():
|
||||
'''
|
||||
"""
|
||||
Grab MySQL Connection Details
|
||||
'''
|
||||
"""
|
||||
conn_info = {}
|
||||
|
||||
try:
|
||||
conn_info['hostname'] = __opts__['mysql_auth']['hostname']
|
||||
conn_info['username'] = __opts__['mysql_auth']['username']
|
||||
conn_info['password'] = __opts__['mysql_auth']['password']
|
||||
conn_info['database'] = __opts__['mysql_auth']['database']
|
||||
conn_info["hostname"] = __opts__["mysql_auth"]["hostname"]
|
||||
conn_info["username"] = __opts__["mysql_auth"]["username"]
|
||||
conn_info["password"] = __opts__["mysql_auth"]["password"]
|
||||
conn_info["database"] = __opts__["mysql_auth"]["database"]
|
||||
|
||||
conn_info['auth_sql'] = __opts__['mysql_auth']['auth_sql']
|
||||
conn_info["auth_sql"] = __opts__["mysql_auth"]["auth_sql"]
|
||||
except KeyError as e:
|
||||
log.error('%s does not exist', e)
|
||||
log.error("%s does not exist", e)
|
||||
return None
|
||||
|
||||
return conn_info
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Authenticate using a MySQL user table
|
||||
'''
|
||||
"""
|
||||
_info = __get_connection_info()
|
||||
|
||||
if _info is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
conn = MySQLdb.connect(_info['hostname'],
|
||||
_info['username'],
|
||||
_info['password'],
|
||||
_info['database'])
|
||||
conn = MySQLdb.connect(
|
||||
_info["hostname"], _info["username"], _info["password"], _info["database"]
|
||||
)
|
||||
except OperationalError as e:
|
||||
log.error(e)
|
||||
return False
|
||||
|
||||
cur = conn.cursor()
|
||||
cur.execute(_info['auth_sql'].format(username, password))
|
||||
cur.execute(_info["auth_sql"].format(username, password))
|
||||
|
||||
if cur.rowcount == 1:
|
||||
return True
|
||||
|
|
110
salt/auth/pam.py
110
salt/auth/pam.py
|
@ -4,7 +4,7 @@
|
|||
# (c) 2007 Chris AtLee <chris@atlee.ca>
|
||||
# Licensed under the MIT license:
|
||||
# http://www.opensource.org/licenses/mit-license.php
|
||||
'''
|
||||
"""
|
||||
Authenticate against PAM
|
||||
|
||||
Provides an authenticate function that will allow the caller to authenticate
|
||||
|
@ -33,26 +33,39 @@ authenticated against. This defaults to `login`
|
|||
This should not be needed with python >= 3.3, because the `os` modules has the
|
||||
`getgrouplist` function.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
from ctypes import CDLL, POINTER, Structure, CFUNCTYPE, cast, pointer, sizeof
|
||||
from ctypes import c_void_p, c_uint, c_char_p, c_char, c_int
|
||||
from ctypes import (
|
||||
CDLL,
|
||||
CFUNCTYPE,
|
||||
POINTER,
|
||||
Structure,
|
||||
c_char,
|
||||
c_char_p,
|
||||
c_int,
|
||||
c_uint,
|
||||
c_void_p,
|
||||
cast,
|
||||
pointer,
|
||||
sizeof,
|
||||
)
|
||||
from ctypes.util import find_library
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.user
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
LIBC = CDLL(find_library('c'))
|
||||
LIBC = CDLL(find_library("c"))
|
||||
|
||||
CALLOC = LIBC.calloc
|
||||
CALLOC.restype = c_void_p
|
||||
|
@ -62,7 +75,7 @@ try:
|
|||
STRDUP.argstypes = [c_char_p]
|
||||
STRDUP.restype = POINTER(c_char) # NOT c_char_p !!!!
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.trace('Failed to load libc using ctypes', exc_info=True)
|
||||
log.trace("Failed to load libc using ctypes", exc_info=True)
|
||||
HAS_LIBC = False
|
||||
else:
|
||||
HAS_LIBC = True
|
||||
|
@ -75,12 +88,11 @@ PAM_TEXT_INFO = 4
|
|||
|
||||
|
||||
class PamHandle(Structure):
|
||||
'''
|
||||
"""
|
||||
Wrapper class for pam_handle_t
|
||||
'''
|
||||
_fields_ = [
|
||||
('handle', c_void_p)
|
||||
]
|
||||
"""
|
||||
|
||||
_fields_ = [("handle", c_void_p)]
|
||||
|
||||
def __init__(self):
|
||||
Structure.__init__(self)
|
||||
|
@ -88,57 +100,51 @@ class PamHandle(Structure):
|
|||
|
||||
|
||||
class PamMessage(Structure):
|
||||
'''
|
||||
"""
|
||||
Wrapper class for pam_message structure
|
||||
'''
|
||||
"""
|
||||
|
||||
_fields_ = [
|
||||
("msg_style", c_int),
|
||||
("msg", c_char_p),
|
||||
]
|
||||
("msg_style", c_int),
|
||||
("msg", c_char_p),
|
||||
]
|
||||
|
||||
def __repr__(self):
|
||||
return '<PamMessage {0} \'{1}\'>'.format(self.msg_style, self.msg)
|
||||
return "<PamMessage {0} '{1}'>".format(self.msg_style, self.msg)
|
||||
|
||||
|
||||
class PamResponse(Structure):
|
||||
'''
|
||||
"""
|
||||
Wrapper class for pam_response structure
|
||||
'''
|
||||
"""
|
||||
|
||||
_fields_ = [
|
||||
('resp', c_char_p),
|
||||
('resp_retcode', c_int),
|
||||
]
|
||||
("resp", c_char_p),
|
||||
("resp_retcode", c_int),
|
||||
]
|
||||
|
||||
def __repr__(self):
|
||||
return '<PamResponse {0} \'{1}\'>'.format(self.resp_retcode, self.resp)
|
||||
return "<PamResponse {0} '{1}'>".format(self.resp_retcode, self.resp)
|
||||
|
||||
|
||||
CONV_FUNC = CFUNCTYPE(
|
||||
c_int,
|
||||
c_int,
|
||||
POINTER(POINTER(PamMessage)),
|
||||
POINTER(POINTER(PamResponse)),
|
||||
c_void_p)
|
||||
c_int, c_int, POINTER(POINTER(PamMessage)), POINTER(POINTER(PamResponse)), c_void_p
|
||||
)
|
||||
|
||||
|
||||
class PamConv(Structure):
|
||||
'''
|
||||
"""
|
||||
Wrapper class for pam_conv structure
|
||||
'''
|
||||
_fields_ = [
|
||||
('conv', CONV_FUNC),
|
||||
('appdata_ptr', c_void_p)
|
||||
]
|
||||
"""
|
||||
|
||||
_fields_ = [("conv", CONV_FUNC), ("appdata_ptr", c_void_p)]
|
||||
|
||||
|
||||
try:
|
||||
LIBPAM = CDLL(find_library('pam'))
|
||||
LIBPAM = CDLL(find_library("pam"))
|
||||
PAM_START = LIBPAM.pam_start
|
||||
PAM_START.restype = c_int
|
||||
PAM_START.argtypes = [c_char_p,
|
||||
c_char_p,
|
||||
POINTER(PamConv),
|
||||
POINTER(PamHandle)]
|
||||
PAM_START.argtypes = [c_char_p, c_char_p, POINTER(PamConv), POINTER(PamHandle)]
|
||||
|
||||
PAM_AUTHENTICATE = LIBPAM.pam_authenticate
|
||||
PAM_AUTHENTICATE.restype = c_int
|
||||
|
@ -152,29 +158,29 @@ try:
|
|||
PAM_END.restype = c_int
|
||||
PAM_END.argtypes = [PamHandle, c_int]
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.trace('Failed to load pam using ctypes', exc_info=True)
|
||||
log.trace("Failed to load pam using ctypes", exc_info=True)
|
||||
HAS_PAM = False
|
||||
else:
|
||||
HAS_PAM = True
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Only load on Linux systems
|
||||
'''
|
||||
"""
|
||||
return HAS_LIBC and HAS_PAM
|
||||
|
||||
|
||||
def authenticate(username, password):
|
||||
'''
|
||||
"""
|
||||
Returns True if the given username and password authenticate for the
|
||||
given service. Returns False otherwise
|
||||
|
||||
``username``: the username to authenticate
|
||||
|
||||
``password``: the password in plain text
|
||||
'''
|
||||
service = __opts__.get('auth.pam.service', 'login')
|
||||
"""
|
||||
service = __opts__.get("auth.pam.service", "login")
|
||||
|
||||
if isinstance(username, six.text_type):
|
||||
username = username.encode(__salt_system_encoding__)
|
||||
|
@ -185,10 +191,10 @@ def authenticate(username, password):
|
|||
|
||||
@CONV_FUNC
|
||||
def my_conv(n_messages, messages, p_response, app_data):
|
||||
'''
|
||||
"""
|
||||
Simple conversation function that responds to any
|
||||
prompt where the echo is off with the supplied password
|
||||
'''
|
||||
"""
|
||||
# Create an array of n_messages response objects
|
||||
addr = CALLOC(n_messages, sizeof(PamResponse))
|
||||
p_response[0] = cast(addr, POINTER(PamResponse))
|
||||
|
@ -217,16 +223,16 @@ def authenticate(username, password):
|
|||
|
||||
|
||||
def auth(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Authenticate via pam
|
||||
'''
|
||||
"""
|
||||
return authenticate(username, password)
|
||||
|
||||
|
||||
def groups(username, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Retrieve groups for a given user for this auth provider
|
||||
|
||||
Uses system groups
|
||||
'''
|
||||
"""
|
||||
return salt.utils.user.get_group_list(username)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Majority of code shamelessly stolen from
|
||||
# http://www.v13.gr/blog/?p=303
|
||||
'''
|
||||
"""
|
||||
Authenticate via a PKI certificate.
|
||||
|
||||
.. note::
|
||||
|
@ -14,16 +14,21 @@ a user via their public cert against a pre-defined Certificate Authority.
|
|||
TODO: Add a 'ca_dir' option to configure a directory of CA files, a la Apache.
|
||||
|
||||
:depends: - pyOpenSSL module
|
||||
'''
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
|
||||
# Import third party libs
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
try:
|
||||
from M2Crypto import X509
|
||||
|
||||
HAS_M2 = True
|
||||
except ImportError:
|
||||
HAS_M2 = False
|
||||
|
@ -37,23 +42,21 @@ except ImportError:
|
|||
HAS_DEPS = False
|
||||
# pylint: enable=import-error
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Requires newer pycrypto and pyOpenSSL
|
||||
'''
|
||||
"""
|
||||
if HAS_DEPS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def auth(username, password, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Returns True if the given user cert (password is the cert contents)
|
||||
was issued by the CA and if cert's Common Name is equal to username.
|
||||
|
||||
|
@ -73,22 +76,22 @@ def auth(username, password, **kwargs):
|
|||
ca_file: /etc/pki/tls/ca_certs/trusted-ca.crt
|
||||
your_user:
|
||||
- .*
|
||||
'''
|
||||
"""
|
||||
pem = password
|
||||
cacert_file = __salt__['config.get']('external_auth:pki:ca_file')
|
||||
cacert_file = __salt__["config.get"]("external_auth:pki:ca_file")
|
||||
|
||||
log.debug('Attempting to authenticate via pki.')
|
||||
log.debug('Using CA file: %s', cacert_file)
|
||||
log.debug('Certificate contents: %s', pem)
|
||||
log.debug("Attempting to authenticate via pki.")
|
||||
log.debug("Using CA file: %s", cacert_file)
|
||||
log.debug("Certificate contents: %s", pem)
|
||||
|
||||
if HAS_M2:
|
||||
cert = X509.load_cert_string(pem, X509.FORMAT_PEM)
|
||||
cacert = X509.load_cert(cacert_file, X509.FORMAT_PEM)
|
||||
if cert.verify(cacert.get_pubkey()):
|
||||
log.info('Successfully authenticated certificate: {0}'.format(pem))
|
||||
log.info("Successfully authenticated certificate: {0}".format(pem))
|
||||
return True
|
||||
else:
|
||||
log.info('Failed to authenticate certificate: {0}'.format(pem))
|
||||
log.info("Failed to authenticate certificate: {0}".format(pem))
|
||||
return False
|
||||
|
||||
c = OpenSSL.crypto
|
||||
|
@ -113,7 +116,7 @@ def auth(username, password, **kwargs):
|
|||
# - signature
|
||||
# http://usefulfor.com/nothing/2009/06/10/x509-certificate-basics/
|
||||
der_cert = der[0]
|
||||
#der_algo = der[1]
|
||||
# der_algo = der[1]
|
||||
der_sig = der[2]
|
||||
|
||||
# The signature is a BIT STRING (Type 3)
|
||||
|
@ -129,17 +132,19 @@ def auth(username, password, **kwargs):
|
|||
|
||||
# First byte is the number of unused bits. This should be 0
|
||||
# http://msdn.microsoft.com/en-us/library/windows/desktop/bb540792(v=vs.85).aspx
|
||||
if sig0[0] != '\x00':
|
||||
raise Exception('Number of unused bits is strange')
|
||||
if sig0[0] != "\x00":
|
||||
raise Exception("Number of unused bits is strange")
|
||||
# Now get the signature itself
|
||||
sig = sig0[1:]
|
||||
|
||||
# And verify the certificate
|
||||
try:
|
||||
c.verify(cacert, sig, der_cert, algo)
|
||||
assert dict(cert.get_subject().get_components())['CN'] == username, "Certificate's CN should match the username"
|
||||
log.info('Successfully authenticated certificate: %s', pem)
|
||||
assert (
|
||||
dict(cert.get_subject().get_components())["CN"] == username
|
||||
), "Certificate's CN should match the username"
|
||||
log.info("Successfully authenticated certificate: %s", pem)
|
||||
return True
|
||||
except (OpenSSL.crypto.Error, AssertionError):
|
||||
log.info('Failed to authenticate certificate: %s', pem)
|
||||
log.info("Failed to authenticate certificate: %s", pem)
|
||||
return False
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Provide authentication using a REST call
|
||||
|
||||
REST auth can be defined like any other eauth module:
|
||||
|
@ -21,10 +21,11 @@ run any execution module and all runners.
|
|||
The REST call should return a JSON object that maps to a regular eauth YAML structure
|
||||
as above.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
|
@ -32,7 +33,7 @@ import salt.utils.http
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'rest'
|
||||
__virtualname__ = "rest"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -41,30 +42,31 @@ def __virtual__():
|
|||
|
||||
def rest_auth_setup():
|
||||
|
||||
if '^url' in __opts__['external_auth']['rest']:
|
||||
return __opts__['external_auth']['rest']['^url']
|
||||
if "^url" in __opts__["external_auth"]["rest"]:
|
||||
return __opts__["external_auth"]["rest"]["^url"]
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
REST authentication
|
||||
'''
|
||||
"""
|
||||
|
||||
url = rest_auth_setup()
|
||||
|
||||
data = {'username': username, 'password': password}
|
||||
data = {"username": username, "password": password}
|
||||
|
||||
# Post to the API endpoint. If 200 is returned then the result will be the ACLs
|
||||
# for this user
|
||||
result = salt.utils.http.query(url, method='POST', data=data, status=True,
|
||||
decode=True)
|
||||
if result['status'] == 200:
|
||||
log.debug('eauth REST call returned 200: %s', result)
|
||||
if result['dict'] is not None:
|
||||
return result['dict']
|
||||
result = salt.utils.http.query(
|
||||
url, method="POST", data=data, status=True, decode=True
|
||||
)
|
||||
if result["status"] == 200:
|
||||
log.debug("eauth REST call returned 200: %s", result)
|
||||
if result["dict"] is not None:
|
||||
return result["dict"]
|
||||
return True
|
||||
else:
|
||||
log.debug('eauth REST call failed: %s', result)
|
||||
log.debug("eauth REST call failed: %s", result)
|
||||
return False
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''Provide authentication using configured shared secret
|
||||
"""Provide authentication using configured shared secret
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -28,17 +28,18 @@ See the documentation for cherrypy to setup the headers in your
|
|||
frontal.
|
||||
|
||||
.. versionadded:: Beryllium
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Shared secret authentication
|
||||
'''
|
||||
return password == __opts__.get('sharedsecret')
|
||||
"""
|
||||
return password == __opts__.get("sharedsecret")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
'''
|
||||
"""
|
||||
Provide authentication using YubiKey.
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
@ -36,31 +36,34 @@ two values in your /etc/salt/master configuration.
|
|||
Please wait five to ten minutes after generating the key before testing so that
|
||||
the API key will be updated on all the YubiCloud servers.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from yubico_client import Yubico, yubico_exceptions
|
||||
|
||||
HAS_YUBICO = True
|
||||
except ImportError:
|
||||
HAS_YUBICO = False
|
||||
|
||||
|
||||
def __get_yubico_users(username):
|
||||
'''
|
||||
"""
|
||||
Grab the YubiKey Client ID & Secret Key
|
||||
'''
|
||||
"""
|
||||
user = {}
|
||||
|
||||
try:
|
||||
if __opts__['yubico_users'].get(username, None):
|
||||
(user['id'], user['key']) = list(__opts__['yubico_users'][username].values())
|
||||
if __opts__["yubico_users"].get(username, None):
|
||||
(user["id"], user["key"]) = list(
|
||||
__opts__["yubico_users"][username].values()
|
||||
)
|
||||
else:
|
||||
return None
|
||||
except KeyError:
|
||||
|
@ -70,17 +73,17 @@ def __get_yubico_users(username):
|
|||
|
||||
|
||||
def auth(username, password):
|
||||
'''
|
||||
"""
|
||||
Authenticate against yubico server
|
||||
'''
|
||||
"""
|
||||
_cred = __get_yubico_users(username)
|
||||
|
||||
client = Yubico(_cred['id'], _cred['key'])
|
||||
client = Yubico(_cred["id"], _cred["key"])
|
||||
|
||||
try:
|
||||
return client.verify(password)
|
||||
except yubico_exceptions.StatusCodeError as e:
|
||||
log.info('Unable to verify YubiKey `%s`', e)
|
||||
log.info("Unable to verify YubiKey `%s`", e)
|
||||
return False
|
||||
|
||||
|
||||
|
@ -88,10 +91,10 @@ def groups(username, *args, **kwargs):
|
|||
return False
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__opts__ = {'yubico_users': {'damian': {'id': '12345', 'key': 'ABC123'}}}
|
||||
if __name__ == "__main__":
|
||||
__opts__ = {"yubico_users": {"damian": {"id": "12345", "key": "ABC123"}}}
|
||||
|
||||
if auth('damian', 'OPT'):
|
||||
if auth("damian", "OPT"):
|
||||
print("Authenticated")
|
||||
else:
|
||||
print("Failed to authenticate")
|
||||
|
|
|
@ -1,27 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
This package contains the loader modules for the salt streams system
|
||||
'''
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import re
|
||||
|
||||
# Import Salt libs
|
||||
import salt.loader
|
||||
import salt.utils.event
|
||||
import salt.utils.minion
|
||||
from salt.ext.six.moves import map
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Beacon(object):
|
||||
'''
|
||||
"""
|
||||
This class is used to evaluate and execute on the beacon system
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, functions):
|
||||
self.opts = opts
|
||||
self.functions = functions
|
||||
|
@ -29,7 +31,7 @@ class Beacon(object):
|
|||
self.interval_map = dict()
|
||||
|
||||
def process(self, config, grains):
|
||||
'''
|
||||
"""
|
||||
Process the configured beacons
|
||||
|
||||
The config must be a list and looks like this in yaml
|
||||
|
@ -40,13 +42,13 @@ class Beacon(object):
|
|||
- files:
|
||||
- /etc/fstab: {}
|
||||
- /var/cache/foo: {}
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
b_config = copy.deepcopy(config)
|
||||
if 'enabled' in b_config and not b_config['enabled']:
|
||||
if "enabled" in b_config and not b_config["enabled"]:
|
||||
return
|
||||
for mod in config:
|
||||
if mod == 'enabled':
|
||||
if mod == "enabled":
|
||||
continue
|
||||
|
||||
# Convert beacons that are lists to a dict to make processing easier
|
||||
|
@ -57,51 +59,62 @@ class Beacon(object):
|
|||
elif isinstance(config[mod], dict):
|
||||
current_beacon_config = config[mod]
|
||||
|
||||
if 'enabled' in current_beacon_config:
|
||||
if not current_beacon_config['enabled']:
|
||||
log.trace('Beacon %s disabled', mod)
|
||||
if "enabled" in current_beacon_config:
|
||||
if not current_beacon_config["enabled"]:
|
||||
log.trace("Beacon %s disabled", mod)
|
||||
continue
|
||||
else:
|
||||
# remove 'enabled' item before processing the beacon
|
||||
if isinstance(config[mod], dict):
|
||||
del config[mod]['enabled']
|
||||
del config[mod]["enabled"]
|
||||
else:
|
||||
self._remove_list_item(config[mod], 'enabled')
|
||||
self._remove_list_item(config[mod], "enabled")
|
||||
|
||||
log.trace('Beacon processing: %s', mod)
|
||||
log.trace("Beacon processing: %s", mod)
|
||||
beacon_name = None
|
||||
if self._determine_beacon_config(current_beacon_config, 'beacon_module'):
|
||||
beacon_name = current_beacon_config['beacon_module']
|
||||
if self._determine_beacon_config(current_beacon_config, "beacon_module"):
|
||||
beacon_name = current_beacon_config["beacon_module"]
|
||||
else:
|
||||
beacon_name = mod
|
||||
fun_str = '{0}.beacon'.format(beacon_name)
|
||||
validate_str = '{0}.validate'.format(beacon_name)
|
||||
fun_str = "{0}.beacon".format(beacon_name)
|
||||
validate_str = "{0}.validate".format(beacon_name)
|
||||
if fun_str in self.beacons:
|
||||
runonce = self._determine_beacon_config(current_beacon_config, 'run_once')
|
||||
interval = self._determine_beacon_config(current_beacon_config, 'interval')
|
||||
runonce = self._determine_beacon_config(
|
||||
current_beacon_config, "run_once"
|
||||
)
|
||||
interval = self._determine_beacon_config(
|
||||
current_beacon_config, "interval"
|
||||
)
|
||||
if interval:
|
||||
b_config = self._trim_config(b_config, mod, 'interval')
|
||||
b_config = self._trim_config(b_config, mod, "interval")
|
||||
if not self._process_interval(mod, interval):
|
||||
log.trace('Skipping beacon %s. Interval not reached.', mod)
|
||||
log.trace("Skipping beacon %s. Interval not reached.", mod)
|
||||
continue
|
||||
if self._determine_beacon_config(current_beacon_config, 'disable_during_state_run'):
|
||||
log.trace('Evaluting if beacon %s should be skipped due to a state run.', mod)
|
||||
b_config = self._trim_config(b_config, mod, 'disable_during_state_run')
|
||||
if self._determine_beacon_config(
|
||||
current_beacon_config, "disable_during_state_run"
|
||||
):
|
||||
log.trace(
|
||||
"Evaluting if beacon %s should be skipped due to a state run.",
|
||||
mod,
|
||||
)
|
||||
b_config = self._trim_config(
|
||||
b_config, mod, "disable_during_state_run"
|
||||
)
|
||||
is_running = False
|
||||
running_jobs = salt.utils.minion.running(self.opts)
|
||||
for job in running_jobs:
|
||||
if re.match('state.*', job['fun']):
|
||||
if re.match("state.*", job["fun"]):
|
||||
is_running = True
|
||||
if is_running:
|
||||
close_str = '{0}.close'.format(beacon_name)
|
||||
close_str = "{0}.close".format(beacon_name)
|
||||
if close_str in self.beacons:
|
||||
log.info('Closing beacon %s. State run in progress.', mod)
|
||||
log.info("Closing beacon %s. State run in progress.", mod)
|
||||
self.beacons[close_str](b_config[mod])
|
||||
else:
|
||||
log.info('Skipping beacon %s. State run in progress.', mod)
|
||||
log.info("Skipping beacon %s. State run in progress.", mod)
|
||||
continue
|
||||
# Update __grains__ on the beacon
|
||||
self.beacons[fun_str].__globals__['__grains__'] = grains
|
||||
self.beacons[fun_str].__globals__["__grains__"] = grains
|
||||
|
||||
# Run the validate function if it's available,
|
||||
# otherwise there is a warning about it being missing
|
||||
|
@ -109,31 +122,32 @@ class Beacon(object):
|
|||
valid, vcomment = self.beacons[validate_str](b_config[mod])
|
||||
|
||||
if not valid:
|
||||
log.info('Beacon %s configuration invalid, '
|
||||
'not running.\n%s', mod, vcomment)
|
||||
log.info(
|
||||
"Beacon %s configuration invalid, " "not running.\n%s",
|
||||
mod,
|
||||
vcomment,
|
||||
)
|
||||
continue
|
||||
|
||||
# Fire the beacon!
|
||||
raw = self.beacons[fun_str](b_config[mod])
|
||||
for data in raw:
|
||||
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
|
||||
if 'tag' in data:
|
||||
tag += data.pop('tag')
|
||||
if 'id' not in data:
|
||||
data['id'] = self.opts['id']
|
||||
ret.append({'tag': tag,
|
||||
'data': data,
|
||||
'beacon_name': beacon_name})
|
||||
tag = "salt/beacon/{0}/{1}/".format(self.opts["id"], mod)
|
||||
if "tag" in data:
|
||||
tag += data.pop("tag")
|
||||
if "id" not in data:
|
||||
data["id"] = self.opts["id"]
|
||||
ret.append({"tag": tag, "data": data, "beacon_name": beacon_name})
|
||||
if runonce:
|
||||
self.disable_beacon(mod)
|
||||
else:
|
||||
log.warning('Unable to process beacon %s', mod)
|
||||
log.warning("Unable to process beacon %s", mod)
|
||||
return ret
|
||||
|
||||
def _trim_config(self, b_config, mod, key):
|
||||
'''
|
||||
"""
|
||||
Take a beacon configuration and strip out the interval bits
|
||||
'''
|
||||
"""
|
||||
if isinstance(b_config[mod], list):
|
||||
self._remove_list_item(b_config[mod], key)
|
||||
elif isinstance(b_config[mod], dict):
|
||||
|
@ -141,9 +155,9 @@ class Beacon(object):
|
|||
return b_config
|
||||
|
||||
def _determine_beacon_config(self, current_beacon_config, key):
|
||||
'''
|
||||
"""
|
||||
Process a beacon configuration to determine its interval
|
||||
'''
|
||||
"""
|
||||
|
||||
interval = False
|
||||
if isinstance(current_beacon_config, dict):
|
||||
|
@ -152,30 +166,30 @@ class Beacon(object):
|
|||
return interval
|
||||
|
||||
def _process_interval(self, mod, interval):
|
||||
'''
|
||||
"""
|
||||
Process beacons with intervals
|
||||
Return True if a beacon should be run on this loop
|
||||
'''
|
||||
log.trace('Processing interval %s for beacon mod %s', interval, mod)
|
||||
loop_interval = self.opts['loop_interval']
|
||||
"""
|
||||
log.trace("Processing interval %s for beacon mod %s", interval, mod)
|
||||
loop_interval = self.opts["loop_interval"]
|
||||
if mod in self.interval_map:
|
||||
log.trace('Processing interval in map')
|
||||
log.trace("Processing interval in map")
|
||||
counter = self.interval_map[mod]
|
||||
log.trace('Interval counter: %s', counter)
|
||||
log.trace("Interval counter: %s", counter)
|
||||
if counter * loop_interval >= interval:
|
||||
self.interval_map[mod] = 1
|
||||
return True
|
||||
else:
|
||||
self.interval_map[mod] += 1
|
||||
else:
|
||||
log.trace('Interval process inserting mod: %s', mod)
|
||||
log.trace("Interval process inserting mod: %s", mod)
|
||||
self.interval_map[mod] = 1
|
||||
return False
|
||||
|
||||
def _get_index(self, beacon_config, label):
|
||||
'''
|
||||
"""
|
||||
Return the index of a labeled config item in the beacon config, -1 if the index is not found
|
||||
'''
|
||||
"""
|
||||
|
||||
indexes = [index for index, item in enumerate(beacon_config) if label in item]
|
||||
if len(indexes) < 1:
|
||||
|
@ -184,51 +198,47 @@ class Beacon(object):
|
|||
return indexes[0]
|
||||
|
||||
def _remove_list_item(self, beacon_config, label):
|
||||
'''
|
||||
"""
|
||||
Remove an item from a beacon config list
|
||||
'''
|
||||
"""
|
||||
|
||||
index = self._get_index(beacon_config, label)
|
||||
del beacon_config[index]
|
||||
|
||||
def _update_enabled(self, name, enabled_value):
|
||||
'''
|
||||
"""
|
||||
Update whether an individual beacon is enabled
|
||||
'''
|
||||
"""
|
||||
|
||||
if isinstance(self.opts['beacons'][name], dict):
|
||||
if isinstance(self.opts["beacons"][name], dict):
|
||||
# Backwards compatibility
|
||||
self.opts['beacons'][name]['enabled'] = enabled_value
|
||||
self.opts["beacons"][name]["enabled"] = enabled_value
|
||||
else:
|
||||
enabled_index = self._get_index(self.opts['beacons'][name], 'enabled')
|
||||
enabled_index = self._get_index(self.opts["beacons"][name], "enabled")
|
||||
if enabled_index >= 0:
|
||||
self.opts['beacons'][name][enabled_index]['enabled'] = enabled_value
|
||||
self.opts["beacons"][name][enabled_index]["enabled"] = enabled_value
|
||||
else:
|
||||
self.opts['beacons'][name].append({'enabled': enabled_value})
|
||||
self.opts["beacons"][name].append({"enabled": enabled_value})
|
||||
|
||||
def _get_beacons(self,
|
||||
include_opts=True,
|
||||
include_pillar=True):
|
||||
'''
|
||||
def _get_beacons(self, include_opts=True, include_pillar=True):
|
||||
"""
|
||||
Return the beacons data structure
|
||||
'''
|
||||
"""
|
||||
beacons = {}
|
||||
if include_pillar:
|
||||
pillar_beacons = self.opts.get('pillar', {}).get('beacons', {})
|
||||
pillar_beacons = self.opts.get("pillar", {}).get("beacons", {})
|
||||
if not isinstance(pillar_beacons, dict):
|
||||
raise ValueError('Beacons must be of type dict.')
|
||||
raise ValueError("Beacons must be of type dict.")
|
||||
beacons.update(pillar_beacons)
|
||||
if include_opts:
|
||||
opts_beacons = self.opts.get('beacons', {})
|
||||
opts_beacons = self.opts.get("beacons", {})
|
||||
if not isinstance(opts_beacons, dict):
|
||||
raise ValueError('Beacons must be of type dict.')
|
||||
raise ValueError("Beacons must be of type dict.")
|
||||
beacons.update(opts_beacons)
|
||||
return beacons
|
||||
|
||||
def list_beacons(self,
|
||||
include_pillar=True,
|
||||
include_opts=True):
|
||||
'''
|
||||
def list_beacons(self, include_pillar=True, include_opts=True):
|
||||
"""
|
||||
List the beacon items
|
||||
|
||||
include_pillar: Whether to include beacons that are
|
||||
|
@ -236,214 +246,266 @@ class Beacon(object):
|
|||
|
||||
include_opts: Whether to include beacons that are
|
||||
configured in opts, default is True.
|
||||
'''
|
||||
beacons = self._get_beacons(include_pillar=include_pillar,
|
||||
include_opts=include_opts)
|
||||
"""
|
||||
beacons = self._get_beacons(
|
||||
include_pillar=include_pillar, include_opts=include_opts
|
||||
)
|
||||
|
||||
# Fire the complete event back along with the list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': True, 'beacons': beacons},
|
||||
tag='/salt/minion/minion_beacons_list_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{"complete": True, "beacons": beacons},
|
||||
tag="/salt/minion/minion_beacons_list_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def list_available_beacons(self):
|
||||
'''
|
||||
"""
|
||||
List the available beacons
|
||||
'''
|
||||
_beacons = ['{0}'.format(_beacon.replace('.beacon', ''))
|
||||
for _beacon in self.beacons if '.beacon' in _beacon]
|
||||
"""
|
||||
_beacons = [
|
||||
"{0}".format(_beacon.replace(".beacon", ""))
|
||||
for _beacon in self.beacons
|
||||
if ".beacon" in _beacon
|
||||
]
|
||||
|
||||
# Fire the complete event back along with the list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': True, 'beacons': _beacons},
|
||||
tag='/salt/minion/minion_beacons_list_available_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{"complete": True, "beacons": _beacons},
|
||||
tag="/salt/minion/minion_beacons_list_available_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def validate_beacon(self, name, beacon_data):
|
||||
'''
|
||||
"""
|
||||
Return available beacon functions
|
||||
'''
|
||||
validate_str = '{}.validate'.format(name)
|
||||
"""
|
||||
validate_str = "{}.validate".format(name)
|
||||
# Run the validate function if it's available,
|
||||
# otherwise there is a warning about it being missing
|
||||
if validate_str in self.beacons:
|
||||
if 'enabled' in beacon_data:
|
||||
del beacon_data['enabled']
|
||||
if "enabled" in beacon_data:
|
||||
del beacon_data["enabled"]
|
||||
valid, vcomment = self.beacons[validate_str](beacon_data)
|
||||
else:
|
||||
vcomment = 'Beacon {0} does not have a validate' \
|
||||
' function, skipping validation.'.format(name)
|
||||
vcomment = (
|
||||
"Beacon {0} does not have a validate"
|
||||
" function, skipping validation.".format(name)
|
||||
)
|
||||
valid = True
|
||||
|
||||
# Fire the complete event back along with the list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': True,
|
||||
'vcomment': vcomment,
|
||||
'valid': valid},
|
||||
tag='/salt/minion/minion_beacon_validation_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{"complete": True, "vcomment": vcomment, "valid": valid},
|
||||
tag="/salt/minion/minion_beacon_validation_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def add_beacon(self, name, beacon_data):
|
||||
'''
|
||||
"""
|
||||
Add a beacon item
|
||||
'''
|
||||
"""
|
||||
|
||||
data = {}
|
||||
data[name] = beacon_data
|
||||
|
||||
if name in self._get_beacons(include_opts=False):
|
||||
comment = 'Cannot update beacon item {0}, ' \
|
||||
'because it is configured in pillar.'.format(name)
|
||||
comment = (
|
||||
"Cannot update beacon item {0}, "
|
||||
"because it is configured in pillar.".format(name)
|
||||
)
|
||||
complete = False
|
||||
else:
|
||||
if name in self.opts['beacons']:
|
||||
comment = 'Updating settings for beacon ' \
|
||||
'item: {0}'.format(name)
|
||||
if name in self.opts["beacons"]:
|
||||
comment = "Updating settings for beacon " "item: {0}".format(name)
|
||||
else:
|
||||
comment = 'Added new beacon item: {0}'.format(name)
|
||||
comment = "Added new beacon item: {0}".format(name)
|
||||
complete = True
|
||||
self.opts['beacons'].update(data)
|
||||
self.opts["beacons"].update(data)
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': complete, 'comment': comment,
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_add_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": complete,
|
||||
"comment": comment,
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_add_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def modify_beacon(self, name, beacon_data):
|
||||
'''
|
||||
"""
|
||||
Modify a beacon item
|
||||
'''
|
||||
"""
|
||||
|
||||
data = {}
|
||||
data[name] = beacon_data
|
||||
|
||||
if name in self._get_beacons(include_opts=False):
|
||||
comment = 'Cannot modify beacon item {0}, ' \
|
||||
'it is configured in pillar.'.format(name)
|
||||
comment = (
|
||||
"Cannot modify beacon item {0}, "
|
||||
"it is configured in pillar.".format(name)
|
||||
)
|
||||
complete = False
|
||||
else:
|
||||
comment = 'Updating settings for beacon ' \
|
||||
'item: {0}'.format(name)
|
||||
comment = "Updating settings for beacon " "item: {0}".format(name)
|
||||
complete = True
|
||||
self.opts['beacons'].update(data)
|
||||
self.opts["beacons"].update(data)
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': complete, 'comment': comment,
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_modify_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": complete,
|
||||
"comment": comment,
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_modify_complete",
|
||||
)
|
||||
return True
|
||||
|
||||
def delete_beacon(self, name):
|
||||
'''
|
||||
"""
|
||||
Delete a beacon item
|
||||
'''
|
||||
"""
|
||||
|
||||
if name in self._get_beacons(include_opts=False):
|
||||
comment = 'Cannot delete beacon item {0}, ' \
|
||||
'it is configured in pillar.'.format(name)
|
||||
comment = (
|
||||
"Cannot delete beacon item {0}, "
|
||||
"it is configured in pillar.".format(name)
|
||||
)
|
||||
complete = False
|
||||
else:
|
||||
if name in self.opts['beacons']:
|
||||
del self.opts['beacons'][name]
|
||||
comment = 'Deleting beacon item: {0}'.format(name)
|
||||
if name in self.opts["beacons"]:
|
||||
del self.opts["beacons"][name]
|
||||
comment = "Deleting beacon item: {0}".format(name)
|
||||
else:
|
||||
comment = 'Beacon item {0} not found.'.format(name)
|
||||
comment = "Beacon item {0} not found.".format(name)
|
||||
complete = True
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': complete, 'comment': comment,
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_delete_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": complete,
|
||||
"comment": comment,
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_delete_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def enable_beacons(self):
|
||||
'''
|
||||
"""
|
||||
Enable beacons
|
||||
'''
|
||||
"""
|
||||
|
||||
self.opts['beacons']['enabled'] = True
|
||||
self.opts["beacons"]["enabled"] = True
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': True, 'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacons_enabled_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{"complete": True, "beacons": self.opts["beacons"]},
|
||||
tag="/salt/minion/minion_beacons_enabled_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def disable_beacons(self):
|
||||
'''
|
||||
"""
|
||||
Enable beacons
|
||||
'''
|
||||
"""
|
||||
|
||||
self.opts['beacons']['enabled'] = False
|
||||
self.opts["beacons"]["enabled"] = False
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': True, 'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacons_disabled_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{"complete": True, "beacons": self.opts["beacons"]},
|
||||
tag="/salt/minion/minion_beacons_disabled_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def enable_beacon(self, name):
|
||||
'''
|
||||
"""
|
||||
Enable a beacon
|
||||
'''
|
||||
"""
|
||||
|
||||
if name in self._get_beacons(include_opts=False):
|
||||
comment = 'Cannot enable beacon item {0}, ' \
|
||||
'it is configured in pillar.'.format(name)
|
||||
comment = (
|
||||
"Cannot enable beacon item {0}, "
|
||||
"it is configured in pillar.".format(name)
|
||||
)
|
||||
complete = False
|
||||
else:
|
||||
self._update_enabled(name, True)
|
||||
comment = 'Enabling beacon item {0}'.format(name)
|
||||
comment = "Enabling beacon item {0}".format(name)
|
||||
complete = True
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': complete, 'comment': comment,
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_enabled_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": complete,
|
||||
"comment": comment,
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_enabled_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def disable_beacon(self, name):
|
||||
'''
|
||||
"""
|
||||
Disable a beacon
|
||||
'''
|
||||
"""
|
||||
|
||||
if name in self._get_beacons(include_opts=False):
|
||||
comment = 'Cannot disable beacon item {0}, ' \
|
||||
'it is configured in pillar.'.format(name)
|
||||
comment = (
|
||||
"Cannot disable beacon item {0}, "
|
||||
"it is configured in pillar.".format(name)
|
||||
)
|
||||
complete = False
|
||||
else:
|
||||
self._update_enabled(name, False)
|
||||
comment = 'Disabling beacon item {0}'.format(name)
|
||||
comment = "Disabling beacon item {0}".format(name)
|
||||
complete = True
|
||||
|
||||
# Fire the complete event back along with updated list of beacons
|
||||
with salt.utils.event.get_event('minion', opts=self.opts) as evt:
|
||||
evt.fire_event({'complete': complete, 'comment': comment,
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_disabled_complete')
|
||||
with salt.utils.event.get_event("minion", opts=self.opts) as evt:
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": complete,
|
||||
"comment": comment,
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_disabled_complete",
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def reset(self):
|
||||
'''
|
||||
"""
|
||||
Reset the beacons to defaults
|
||||
'''
|
||||
self.opts['beacons'] = {}
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts)
|
||||
evt.fire_event({'complete': True, 'comment': 'Beacons have been reset',
|
||||
'beacons': self.opts['beacons']},
|
||||
tag='/salt/minion/minion_beacon_reset_complete')
|
||||
"""
|
||||
self.opts["beacons"] = {}
|
||||
evt = salt.utils.event.get_event("minion", opts=self.opts)
|
||||
evt.fire_event(
|
||||
{
|
||||
"complete": True,
|
||||
"comment": "Beacons have been reset",
|
||||
"beacons": self.opts["beacons"],
|
||||
},
|
||||
tag="/salt/minion/minion_beacon_reset_complete",
|
||||
)
|
||||
return True
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to emit adb device state changes for Android devices
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Salt libs
|
||||
|
@ -15,14 +16,14 @@ from salt.ext.six.moves import map
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'adb'
|
||||
__virtualname__ = "adb"
|
||||
|
||||
last_state = {}
|
||||
last_state_extra = {'value': False, 'no_devices': False}
|
||||
last_state_extra = {"value": False, "no_devices": False}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
which_result = salt.utils.path.which('adb')
|
||||
which_result = salt.utils.path.which("adb")
|
||||
if which_result is None:
|
||||
return False
|
||||
else:
|
||||
|
@ -30,38 +31,53 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for adb beacon should be a dictionary with states array
|
||||
if not isinstance(config, list):
|
||||
log.info('Configuration for adb beacon must be a list.')
|
||||
return False, ('Configuration for adb beacon must be a list.')
|
||||
log.info("Configuration for adb beacon must be a list.")
|
||||
return False, ("Configuration for adb beacon must be a list.")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'states' not in _config:
|
||||
log.info('Configuration for adb beacon must include a states array.')
|
||||
return False, ('Configuration for adb beacon must include a states array.')
|
||||
if "states" not in _config:
|
||||
log.info("Configuration for adb beacon must include a states array.")
|
||||
return False, ("Configuration for adb beacon must include a states array.")
|
||||
else:
|
||||
if not isinstance(_config['states'], list):
|
||||
log.info('Configuration for adb beacon must include a states array.')
|
||||
return False, ('Configuration for adb beacon must include a states array.')
|
||||
if not isinstance(_config["states"], list):
|
||||
log.info("Configuration for adb beacon must include a states array.")
|
||||
return False, ("Configuration for adb beacon must include a states array.")
|
||||
else:
|
||||
states = ['offline', 'bootloader', 'device', 'host',
|
||||
'recovery', 'no permissions',
|
||||
'sideload', 'unauthorized', 'unknown', 'missing']
|
||||
if any(s not in states for s in _config['states']):
|
||||
log.info('Need a one of the following adb '
|
||||
'states: %s', ', '.join(states))
|
||||
return False, ('Need a one of the following adb '
|
||||
'states: {0}'.format(', '.join(states)))
|
||||
return True, 'Valid beacon configuration'
|
||||
states = [
|
||||
"offline",
|
||||
"bootloader",
|
||||
"device",
|
||||
"host",
|
||||
"recovery",
|
||||
"no permissions",
|
||||
"sideload",
|
||||
"unauthorized",
|
||||
"unknown",
|
||||
"missing",
|
||||
]
|
||||
if any(s not in states for s in _config["states"]):
|
||||
log.info(
|
||||
"Need a one of the following adb " "states: %s", ", ".join(states)
|
||||
)
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Need a one of the following adb "
|
||||
"states: {0}".format(", ".join(states))
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit the status of all devices returned by adb
|
||||
|
||||
Specify the device states that should emit an event,
|
||||
|
@ -79,47 +95,60 @@ def beacon(config):
|
|||
- no_devices_event: True
|
||||
- battery_low: 25
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
log.trace('adb beacon starting')
|
||||
log.trace("adb beacon starting")
|
||||
ret = []
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
out = __salt__['cmd.run']('adb devices', runas=_config.get('user', None))
|
||||
out = __salt__["cmd.run"]("adb devices", runas=_config.get("user", None))
|
||||
|
||||
lines = out.split('\n')[1:]
|
||||
lines = out.split("\n")[1:]
|
||||
last_state_devices = list(last_state.keys())
|
||||
found_devices = []
|
||||
|
||||
for line in lines:
|
||||
try:
|
||||
device, state = line.split('\t')
|
||||
device, state = line.split("\t")
|
||||
found_devices.append(device)
|
||||
if device not in last_state_devices or \
|
||||
('state' in last_state[device] and last_state[device]['state'] != state):
|
||||
if state in _config['states']:
|
||||
ret.append({'device': device, 'state': state, 'tag': state})
|
||||
last_state[device] = {'state': state}
|
||||
if device not in last_state_devices or (
|
||||
"state" in last_state[device] and last_state[device]["state"] != state
|
||||
):
|
||||
if state in _config["states"]:
|
||||
ret.append({"device": device, "state": state, "tag": state})
|
||||
last_state[device] = {"state": state}
|
||||
|
||||
if 'battery_low' in _config:
|
||||
if "battery_low" in _config:
|
||||
val = last_state.get(device, {})
|
||||
cmd = 'adb -s {0} shell cat /sys/class/power_supply/*/capacity'.format(device)
|
||||
battery_levels = __salt__['cmd.run'](cmd, runas=_config.get('user', None)).split('\n')
|
||||
cmd = "adb -s {0} shell cat /sys/class/power_supply/*/capacity".format(
|
||||
device
|
||||
)
|
||||
battery_levels = __salt__["cmd.run"](
|
||||
cmd, runas=_config.get("user", None)
|
||||
).split("\n")
|
||||
|
||||
for l in battery_levels:
|
||||
battery_level = int(l)
|
||||
if 0 < battery_level < 100:
|
||||
if 'battery' not in val or battery_level != val['battery']:
|
||||
if ('battery' not in val or val['battery'] > _config['battery_low']) and \
|
||||
battery_level <= _config['battery_low']:
|
||||
ret.append({'device': device, 'battery_level': battery_level, 'tag': 'battery_low'})
|
||||
if "battery" not in val or battery_level != val["battery"]:
|
||||
if (
|
||||
"battery" not in val
|
||||
or val["battery"] > _config["battery_low"]
|
||||
) and battery_level <= _config["battery_low"]:
|
||||
ret.append(
|
||||
{
|
||||
"device": device,
|
||||
"battery_level": battery_level,
|
||||
"tag": "battery_low",
|
||||
}
|
||||
)
|
||||
|
||||
if device not in last_state:
|
||||
last_state[device] = {}
|
||||
|
||||
last_state[device].update({'battery': battery_level})
|
||||
last_state[device].update({"battery": battery_level})
|
||||
|
||||
except ValueError:
|
||||
continue
|
||||
|
@ -127,18 +156,18 @@ def beacon(config):
|
|||
# Find missing devices and remove them / send an event
|
||||
for device in last_state_devices:
|
||||
if device not in found_devices:
|
||||
if 'missing' in _config['states']:
|
||||
ret.append({'device': device, 'state': 'missing', 'tag': 'missing'})
|
||||
if "missing" in _config["states"]:
|
||||
ret.append({"device": device, "state": "missing", "tag": "missing"})
|
||||
|
||||
del last_state[device]
|
||||
|
||||
# Maybe send an event if we don't have any devices
|
||||
if 'no_devices_event' in _config and _config['no_devices_event'] is True:
|
||||
if len(found_devices) == 0 and not last_state_extra['no_devices']:
|
||||
ret.append({'tag': 'no_devices'})
|
||||
if "no_devices_event" in _config and _config["no_devices_event"] is True:
|
||||
if len(found_devices) == 0 and not last_state_extra["no_devices"]:
|
||||
ret.append({"tag": "no_devices"})
|
||||
|
||||
# Did we have no devices listed this time around?
|
||||
|
||||
last_state_extra['no_devices'] = len(found_devices) == 0
|
||||
last_state_extra["no_devices"] = len(found_devices) == 0
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,47 +1,56 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to fire event when we notice a AIX user is locked due to many failed login attempts.
|
||||
|
||||
.. versionadded:: 2018.3.0
|
||||
|
||||
:depends: none
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'aix_account'
|
||||
__virtualname__ = "aix_account"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Only load if kernel is AIX
|
||||
'''
|
||||
if __grains__['kernel'] == ('AIX'):
|
||||
"""
|
||||
if __grains__["kernel"] == ("AIX"):
|
||||
return __virtualname__
|
||||
|
||||
return (False, 'The aix_account beacon module failed to load: '
|
||||
'only available on AIX systems.')
|
||||
return (
|
||||
False,
|
||||
"The aix_account beacon module failed to load: "
|
||||
"only available on AIX systems.",
|
||||
)
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for aix_account beacon should be a dictionary
|
||||
if not isinstance(config, dict):
|
||||
return False, ('Configuration for aix_account beacon must be a dict.')
|
||||
if 'user' not in config:
|
||||
return False, ('Configuration for aix_account beacon must '
|
||||
'include a user or ALL for all users.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, ("Configuration for aix_account beacon must be a dict.")
|
||||
if "user" not in config:
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for aix_account beacon must "
|
||||
"include a user or ALL for all users."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Checks for locked accounts due to too many invalid login attempts, 3 or higher.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -51,13 +60,13 @@ def beacon(config):
|
|||
user: ALL
|
||||
interval: 120
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
ret = []
|
||||
|
||||
user = config['user']
|
||||
user = config["user"]
|
||||
|
||||
locked_accounts = __salt__['shadow.login_failures'](user)
|
||||
ret.append({'accounts': locked_accounts})
|
||||
locked_accounts = __salt__["shadow.login_failures"](user)
|
||||
ret.append({"accounts": locked_accounts})
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to announce via avahi (zeroconf)
|
||||
|
||||
.. versionadded:: 2016.11.0
|
||||
|
@ -10,9 +10,10 @@ Dependencies
|
|||
- python-avahi
|
||||
- dbus-python
|
||||
|
||||
'''
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
|
@ -22,6 +23,7 @@ from salt.ext.six.moves import map
|
|||
# Import 3rd Party libs
|
||||
try:
|
||||
import avahi
|
||||
|
||||
HAS_PYAVAHI = True
|
||||
except ImportError:
|
||||
HAS_PYAVAHI = False
|
||||
|
@ -29,11 +31,16 @@ except ImportError:
|
|||
try:
|
||||
import dbus
|
||||
from dbus import DBusException
|
||||
|
||||
BUS = dbus.SystemBus()
|
||||
SERVER = dbus.Interface(BUS.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER),
|
||||
avahi.DBUS_INTERFACE_SERVER)
|
||||
GROUP = dbus.Interface(BUS.get_object(avahi.DBUS_NAME, SERVER.EntryGroupNew()),
|
||||
avahi.DBUS_INTERFACE_ENTRY_GROUP)
|
||||
SERVER = dbus.Interface(
|
||||
BUS.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER),
|
||||
avahi.DBUS_INTERFACE_SERVER,
|
||||
)
|
||||
GROUP = dbus.Interface(
|
||||
BUS.get_object(avahi.DBUS_NAME, SERVER.EntryGroupNew()),
|
||||
avahi.DBUS_INTERFACE_ENTRY_GROUP,
|
||||
)
|
||||
HAS_DBUS = True
|
||||
except (ImportError, NameError):
|
||||
HAS_DBUS = False
|
||||
|
@ -42,7 +49,7 @@ except DBusException:
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'avahi_announce'
|
||||
__virtualname__ = "avahi_announce"
|
||||
|
||||
LAST_GRAINS = {}
|
||||
|
||||
|
@ -51,33 +58,41 @@ def __virtual__():
|
|||
if HAS_PYAVAHI:
|
||||
if HAS_DBUS:
|
||||
return __virtualname__
|
||||
return False, 'The {0} beacon cannot be loaded. The ' \
|
||||
'\'python-dbus\' dependency is missing.'.format(__virtualname__)
|
||||
return False, 'The {0} beacon cannot be loaded. The ' \
|
||||
'\'python-avahi\' dependency is missing.'.format(__virtualname__)
|
||||
return (
|
||||
False,
|
||||
"The {0} beacon cannot be loaded. The "
|
||||
"'python-dbus' dependency is missing.".format(__virtualname__),
|
||||
)
|
||||
return (
|
||||
False,
|
||||
"The {0} beacon cannot be loaded. The "
|
||||
"'python-avahi' dependency is missing.".format(__virtualname__),
|
||||
)
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for avahi_announce '
|
||||
'beacon must be a list.')
|
||||
return False, ("Configuration for avahi_announce beacon must be a list.")
|
||||
|
||||
elif not all(x in _config for x in ('servicetype',
|
||||
'port',
|
||||
'txt')):
|
||||
return False, ('Configuration for avahi_announce beacon '
|
||||
'must contain servicetype, port and txt items.')
|
||||
return True, 'Valid beacon configuration.'
|
||||
elif not all(x in _config for x in ("servicetype", "port", "txt")):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for avahi_announce beacon "
|
||||
"must contain servicetype, port and txt items."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration."
|
||||
|
||||
|
||||
def _enforce_txt_record_maxlen(key, value):
|
||||
'''
|
||||
"""
|
||||
Enforces the TXT record maximum length of 255 characters.
|
||||
TXT record length includes key, value, and '='.
|
||||
|
||||
|
@ -88,16 +103,16 @@ def _enforce_txt_record_maxlen(key, value):
|
|||
:return: The value of the TXT record. It may be truncated if it exceeds
|
||||
the maximum permitted length. In case of truncation, '...' is
|
||||
appended to indicate that the entire value is not present.
|
||||
'''
|
||||
"""
|
||||
# Add 1 for '=' seperator between key and value
|
||||
if len(key) + len(value) + 1 > 255:
|
||||
# 255 - 3 ('...') - 1 ('=') = 251
|
||||
return value[:251 - len(key)] + '...'
|
||||
return value[: 251 - len(key)] + "..."
|
||||
return value
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Broadcast values via zeroconf
|
||||
|
||||
If the announced values are static, it is advised to set run_once: True
|
||||
|
@ -149,7 +164,7 @@ def beacon(config):
|
|||
ProdName: grains.productname
|
||||
SerialNo: grains.serialnumber
|
||||
Comments: 'this is a test'
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
changes = {}
|
||||
txt = {}
|
||||
|
@ -159,76 +174,100 @@ def beacon(config):
|
|||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'servicename' in _config:
|
||||
servicename = _config['servicename']
|
||||
if "servicename" in _config:
|
||||
servicename = _config["servicename"]
|
||||
else:
|
||||
servicename = __grains__['host']
|
||||
servicename = __grains__["host"]
|
||||
# Check for hostname change
|
||||
if LAST_GRAINS and LAST_GRAINS['host'] != servicename:
|
||||
changes['servicename'] = servicename
|
||||
if LAST_GRAINS and LAST_GRAINS["host"] != servicename:
|
||||
changes["servicename"] = servicename
|
||||
|
||||
if LAST_GRAINS and _config.get('reset_on_change', False):
|
||||
if LAST_GRAINS and _config.get("reset_on_change", False):
|
||||
# Check for IP address change in the case when we reset on change
|
||||
if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []):
|
||||
changes['ipv4'] = __grains__.get('ipv4', [])
|
||||
if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []):
|
||||
changes['ipv6'] = __grains__.get('ipv6', [])
|
||||
if LAST_GRAINS.get("ipv4", []) != __grains__.get("ipv4", []):
|
||||
changes["ipv4"] = __grains__.get("ipv4", [])
|
||||
if LAST_GRAINS.get("ipv6", []) != __grains__.get("ipv6", []):
|
||||
changes["ipv6"] = __grains__.get("ipv6", [])
|
||||
|
||||
for item in _config['txt']:
|
||||
changes_key = 'txt.' + salt.utils.stringutils.to_unicode(item)
|
||||
if _config['txt'][item].startswith('grains.'):
|
||||
grain = _config['txt'][item][7:]
|
||||
for item in _config["txt"]:
|
||||
changes_key = "txt." + salt.utils.stringutils.to_unicode(item)
|
||||
if _config["txt"][item].startswith("grains."):
|
||||
grain = _config["txt"][item][7:]
|
||||
grain_index = None
|
||||
square_bracket = grain.find('[')
|
||||
if square_bracket != -1 and grain[-1] == ']':
|
||||
grain_index = int(grain[square_bracket+1:-1])
|
||||
square_bracket = grain.find("[")
|
||||
if square_bracket != -1 and grain[-1] == "]":
|
||||
grain_index = int(grain[square_bracket + 1 : -1])
|
||||
grain = grain[:square_bracket]
|
||||
|
||||
grain_value = __grains__.get(grain, '')
|
||||
grain_value = __grains__.get(grain, "")
|
||||
if isinstance(grain_value, list):
|
||||
if grain_index is not None:
|
||||
grain_value = grain_value[grain_index]
|
||||
else:
|
||||
grain_value = ','.join(grain_value)
|
||||
grain_value = ",".join(grain_value)
|
||||
txt[item] = _enforce_txt_record_maxlen(item, grain_value)
|
||||
if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')):
|
||||
if LAST_GRAINS and (
|
||||
LAST_GRAINS.get(grain, "") != __grains__.get(grain, "")
|
||||
):
|
||||
changes[changes_key] = txt[item]
|
||||
else:
|
||||
txt[item] = _enforce_txt_record_maxlen(item, _config['txt'][item])
|
||||
txt[item] = _enforce_txt_record_maxlen(item, _config["txt"][item])
|
||||
|
||||
if not LAST_GRAINS:
|
||||
changes[changes_key] = txt[item]
|
||||
|
||||
if changes:
|
||||
if not LAST_GRAINS:
|
||||
changes['servicename'] = servicename
|
||||
changes['servicetype'] = _config['servicetype']
|
||||
changes['port'] = _config['port']
|
||||
changes['ipv4'] = __grains__.get('ipv4', [])
|
||||
changes['ipv6'] = __grains__.get('ipv6', [])
|
||||
GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
|
||||
servicename, _config['servicetype'], '', '',
|
||||
dbus.UInt16(_config['port']), avahi.dict_to_txt_array(txt))
|
||||
changes["servicename"] = servicename
|
||||
changes["servicetype"] = _config["servicetype"]
|
||||
changes["port"] = _config["port"]
|
||||
changes["ipv4"] = __grains__.get("ipv4", [])
|
||||
changes["ipv6"] = __grains__.get("ipv6", [])
|
||||
GROUP.AddService(
|
||||
avahi.IF_UNSPEC,
|
||||
avahi.PROTO_UNSPEC,
|
||||
dbus.UInt32(0),
|
||||
servicename,
|
||||
_config["servicetype"],
|
||||
"",
|
||||
"",
|
||||
dbus.UInt16(_config["port"]),
|
||||
avahi.dict_to_txt_array(txt),
|
||||
)
|
||||
GROUP.Commit()
|
||||
elif _config.get('reset_on_change', False) or 'servicename' in changes:
|
||||
elif _config.get("reset_on_change", False) or "servicename" in changes:
|
||||
# A change in 'servicename' requires a reset because we can only
|
||||
# directly update TXT records
|
||||
GROUP.Reset()
|
||||
reset_wait = _config.get('reset_wait', 0)
|
||||
reset_wait = _config.get("reset_wait", 0)
|
||||
if reset_wait > 0:
|
||||
time.sleep(reset_wait)
|
||||
GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
|
||||
servicename, _config['servicetype'], '', '',
|
||||
dbus.UInt16(_config['port']), avahi.dict_to_txt_array(txt))
|
||||
GROUP.AddService(
|
||||
avahi.IF_UNSPEC,
|
||||
avahi.PROTO_UNSPEC,
|
||||
dbus.UInt32(0),
|
||||
servicename,
|
||||
_config["servicetype"],
|
||||
"",
|
||||
"",
|
||||
dbus.UInt16(_config["port"]),
|
||||
avahi.dict_to_txt_array(txt),
|
||||
)
|
||||
GROUP.Commit()
|
||||
else:
|
||||
GROUP.UpdateServiceTxt(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
|
||||
servicename, _config['servicetype'], '',
|
||||
avahi.dict_to_txt_array(txt))
|
||||
GROUP.UpdateServiceTxt(
|
||||
avahi.IF_UNSPEC,
|
||||
avahi.PROTO_UNSPEC,
|
||||
dbus.UInt32(0),
|
||||
servicename,
|
||||
_config["servicetype"],
|
||||
"",
|
||||
avahi.dict_to_txt_array(txt),
|
||||
)
|
||||
|
||||
ret.append({'tag': 'result', 'changes': changes})
|
||||
ret.append({"tag": "result", "changes": changes})
|
||||
|
||||
if _config.get('copy_grains', False):
|
||||
if _config.get("copy_grains", False):
|
||||
LAST_GRAINS = __grains__.copy()
|
||||
else:
|
||||
LAST_GRAINS = __grains__
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to announce via Bonjour (zeroconf)
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
import select
|
||||
|
@ -17,13 +18,14 @@ from salt.ext.six.moves import map
|
|||
# Import 3rd Party libs
|
||||
try:
|
||||
import pybonjour
|
||||
|
||||
HAS_PYBONJOUR = True
|
||||
except ImportError:
|
||||
HAS_PYBONJOUR = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'bonjour_announce'
|
||||
__virtualname__ = "bonjour_announce"
|
||||
|
||||
LAST_GRAINS = {}
|
||||
SD_REF = None
|
||||
|
@ -36,42 +38,46 @@ def __virtual__():
|
|||
|
||||
|
||||
def _close_sd_ref():
|
||||
'''
|
||||
"""
|
||||
Close the SD_REF object if it isn't NULL
|
||||
For use with atexit.register
|
||||
'''
|
||||
"""
|
||||
global SD_REF
|
||||
if SD_REF:
|
||||
SD_REF.close()
|
||||
SD_REF = None
|
||||
|
||||
|
||||
def _register_callback(sdRef, flags, errorCode, name, regtype, domain): # pylint: disable=unused-argument
|
||||
def _register_callback(
|
||||
sdRef, flags, errorCode, name, regtype, domain
|
||||
): # pylint: disable=unused-argument
|
||||
if errorCode != pybonjour.kDNSServiceErr_NoError:
|
||||
log.error('Bonjour registration failed with error code %s', errorCode)
|
||||
log.error("Bonjour registration failed with error code %s", errorCode)
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for bonjour_announce '
|
||||
'beacon must be a list.')
|
||||
return False, ("Configuration for bonjour_announce beacon must be a list.")
|
||||
|
||||
elif not all(x in _config for x in ('servicetype',
|
||||
'port',
|
||||
'txt')):
|
||||
return False, ('Configuration for bonjour_announce beacon '
|
||||
'must contain servicetype, port and txt items.')
|
||||
return True, 'Valid beacon configuration.'
|
||||
elif not all(x in _config for x in ("servicetype", "port", "txt")):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for bonjour_announce beacon "
|
||||
"must contain servicetype, port and txt items."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration."
|
||||
|
||||
|
||||
def _enforce_txt_record_maxlen(key, value):
|
||||
'''
|
||||
"""
|
||||
Enforces the TXT record maximum length of 255 characters.
|
||||
TXT record length includes key, value, and '='.
|
||||
|
||||
|
@ -82,16 +88,16 @@ def _enforce_txt_record_maxlen(key, value):
|
|||
:return: The value of the TXT record. It may be truncated if it exceeds
|
||||
the maximum permitted length. In case of truncation, '...' is
|
||||
appended to indicate that the entire value is not present.
|
||||
'''
|
||||
"""
|
||||
# Add 1 for '=' seperator between key and value
|
||||
if len(key) + len(value) + 1 > 255:
|
||||
# 255 - 3 ('...') - 1 ('=') = 251
|
||||
return value[:251 - len(key)] + '...'
|
||||
return value[: 251 - len(key)] + "..."
|
||||
return value
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Broadcast values via zeroconf
|
||||
|
||||
If the announced values are static, it is advised to set run_once: True
|
||||
|
@ -143,7 +149,7 @@ def beacon(config):
|
|||
ProdName: grains.productname
|
||||
SerialNo: grains.serialnumber
|
||||
Comments: 'this is a test'
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
changes = {}
|
||||
txt = {}
|
||||
|
@ -154,42 +160,44 @@ def beacon(config):
|
|||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'servicename' in _config:
|
||||
servicename = _config['servicename']
|
||||
if "servicename" in _config:
|
||||
servicename = _config["servicename"]
|
||||
else:
|
||||
servicename = __grains__['host']
|
||||
servicename = __grains__["host"]
|
||||
# Check for hostname change
|
||||
if LAST_GRAINS and LAST_GRAINS['host'] != servicename:
|
||||
changes['servicename'] = servicename
|
||||
if LAST_GRAINS and LAST_GRAINS["host"] != servicename:
|
||||
changes["servicename"] = servicename
|
||||
|
||||
if LAST_GRAINS and _config.get('reset_on_change', False):
|
||||
if LAST_GRAINS and _config.get("reset_on_change", False):
|
||||
# Check for IP address change in the case when we reset on change
|
||||
if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []):
|
||||
changes['ipv4'] = __grains__.get('ipv4', [])
|
||||
if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []):
|
||||
changes['ipv6'] = __grains__.get('ipv6', [])
|
||||
if LAST_GRAINS.get("ipv4", []) != __grains__.get("ipv4", []):
|
||||
changes["ipv4"] = __grains__.get("ipv4", [])
|
||||
if LAST_GRAINS.get("ipv6", []) != __grains__.get("ipv6", []):
|
||||
changes["ipv6"] = __grains__.get("ipv6", [])
|
||||
|
||||
for item in _config['txt']:
|
||||
changes_key = 'txt.' + salt.utils.stringutils.to_unicode(item)
|
||||
if _config['txt'][item].startswith('grains.'):
|
||||
grain = _config['txt'][item][7:]
|
||||
for item in _config["txt"]:
|
||||
changes_key = "txt." + salt.utils.stringutils.to_unicode(item)
|
||||
if _config["txt"][item].startswith("grains."):
|
||||
grain = _config["txt"][item][7:]
|
||||
grain_index = None
|
||||
square_bracket = grain.find('[')
|
||||
if square_bracket != -1 and grain[-1] == ']':
|
||||
grain_index = int(grain[square_bracket+1:-1])
|
||||
square_bracket = grain.find("[")
|
||||
if square_bracket != -1 and grain[-1] == "]":
|
||||
grain_index = int(grain[square_bracket + 1 : -1])
|
||||
grain = grain[:square_bracket]
|
||||
|
||||
grain_value = __grains__.get(grain, '')
|
||||
grain_value = __grains__.get(grain, "")
|
||||
if isinstance(grain_value, list):
|
||||
if grain_index is not None:
|
||||
grain_value = grain_value[grain_index]
|
||||
else:
|
||||
grain_value = ','.join(grain_value)
|
||||
grain_value = ",".join(grain_value)
|
||||
txt[item] = _enforce_txt_record_maxlen(item, grain_value)
|
||||
if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')):
|
||||
if LAST_GRAINS and (
|
||||
LAST_GRAINS.get(grain, "") != __grains__.get(grain, "")
|
||||
):
|
||||
changes[changes_key] = txt[item]
|
||||
else:
|
||||
txt[item] = _enforce_txt_record_maxlen(item, _config['txt'][item])
|
||||
txt[item] = _enforce_txt_record_maxlen(item, _config["txt"][item])
|
||||
|
||||
if not LAST_GRAINS:
|
||||
changes[changes_key] = txt[item]
|
||||
|
@ -197,49 +205,49 @@ def beacon(config):
|
|||
if changes:
|
||||
txt_record = pybonjour.TXTRecord(items=txt)
|
||||
if not LAST_GRAINS:
|
||||
changes['servicename'] = servicename
|
||||
changes['servicetype'] = _config['servicetype']
|
||||
changes['port'] = _config['port']
|
||||
changes['ipv4'] = __grains__.get('ipv4', [])
|
||||
changes['ipv6'] = __grains__.get('ipv6', [])
|
||||
changes["servicename"] = servicename
|
||||
changes["servicetype"] = _config["servicetype"]
|
||||
changes["port"] = _config["port"]
|
||||
changes["ipv4"] = __grains__.get("ipv4", [])
|
||||
changes["ipv6"] = __grains__.get("ipv6", [])
|
||||
SD_REF = pybonjour.DNSServiceRegister(
|
||||
name=servicename,
|
||||
regtype=_config['servicetype'],
|
||||
port=_config['port'],
|
||||
regtype=_config["servicetype"],
|
||||
port=_config["port"],
|
||||
txtRecord=txt_record,
|
||||
callBack=_register_callback)
|
||||
callBack=_register_callback,
|
||||
)
|
||||
atexit.register(_close_sd_ref)
|
||||
ready = select.select([SD_REF], [], [])
|
||||
if SD_REF in ready[0]:
|
||||
pybonjour.DNSServiceProcessResult(SD_REF)
|
||||
elif _config.get('reset_on_change', False) or 'servicename' in changes:
|
||||
elif _config.get("reset_on_change", False) or "servicename" in changes:
|
||||
# A change in 'servicename' requires a reset because we can only
|
||||
# directly update TXT records
|
||||
SD_REF.close()
|
||||
SD_REF = None
|
||||
reset_wait = _config.get('reset_wait', 0)
|
||||
reset_wait = _config.get("reset_wait", 0)
|
||||
if reset_wait > 0:
|
||||
time.sleep(reset_wait)
|
||||
SD_REF = pybonjour.DNSServiceRegister(
|
||||
name=servicename,
|
||||
regtype=_config['servicetype'],
|
||||
port=_config['port'],
|
||||
regtype=_config["servicetype"],
|
||||
port=_config["port"],
|
||||
txtRecord=txt_record,
|
||||
callBack=_register_callback)
|
||||
callBack=_register_callback,
|
||||
)
|
||||
ready = select.select([SD_REF], [], [])
|
||||
if SD_REF in ready[0]:
|
||||
pybonjour.DNSServiceProcessResult(SD_REF)
|
||||
else:
|
||||
txt_record_raw = six.text_type(txt_record).encode('utf-8')
|
||||
txt_record_raw = six.text_type(txt_record).encode("utf-8")
|
||||
pybonjour.DNSServiceUpdateRecord(
|
||||
SD_REF,
|
||||
RecordRef=None,
|
||||
flags=0,
|
||||
rdata=txt_record_raw)
|
||||
SD_REF, RecordRef=None, flags=0, rdata=txt_record_raw
|
||||
)
|
||||
|
||||
ret.append({'tag': 'result', 'changes': changes})
|
||||
ret.append({"tag": "result", "changes": changes})
|
||||
|
||||
if _config.get('copy_grains', False):
|
||||
if _config.get("copy_grains", False):
|
||||
LAST_GRAINS = __grains__.copy()
|
||||
else:
|
||||
LAST_GRAINS = __grains__
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to fire events at failed login of users
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
@ -89,48 +89,52 @@ Match the event like so in the master config file:
|
|||
API key to post to Slack, a bot user is likely better suited for this. The
|
||||
:py:mod:`slack engine <salt.engines.slack>` documentation has information
|
||||
on how to set up a bot user.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.files
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six
|
||||
import salt.utils.files
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.stringutils
|
||||
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
__virtualname__ = 'btmp'
|
||||
BTMP = '/var/log/btmp'
|
||||
FMT = b'hi32s4s32s256shhiii4i20x'
|
||||
__virtualname__ = "btmp"
|
||||
BTMP = "/var/log/btmp"
|
||||
FMT = b"hi32s4s32s256shhiii4i20x"
|
||||
FIELDS = [
|
||||
'type',
|
||||
'PID',
|
||||
'line',
|
||||
'inittab',
|
||||
'user',
|
||||
'hostname',
|
||||
'exit_status',
|
||||
'session',
|
||||
'time',
|
||||
'addr'
|
||||
]
|
||||
"type",
|
||||
"PID",
|
||||
"line",
|
||||
"inittab",
|
||||
"user",
|
||||
"hostname",
|
||||
"exit_status",
|
||||
"session",
|
||||
"time",
|
||||
"addr",
|
||||
]
|
||||
SIZE = struct.calcsize(FMT)
|
||||
LOC_KEY = 'btmp.loc'
|
||||
LOC_KEY = "btmp.loc"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import dateutil.parser as dateutil_parser
|
||||
|
||||
_TIME_SUPPORTED = True
|
||||
except ImportError:
|
||||
_TIME_SUPPORTED = False
|
||||
|
@ -143,120 +147,110 @@ def __virtual__():
|
|||
|
||||
|
||||
def _validate_time_range(trange, status, msg):
|
||||
'''
|
||||
"""
|
||||
Check time range
|
||||
'''
|
||||
"""
|
||||
# If trange is empty, just return the current status & msg
|
||||
if not trange:
|
||||
return status, msg
|
||||
|
||||
if not isinstance(trange, dict):
|
||||
status = False
|
||||
msg = ('The time_range parameter for '
|
||||
'btmp beacon must '
|
||||
'be a dictionary.')
|
||||
msg = "The time_range parameter for " "btmp beacon must " "be a dictionary."
|
||||
|
||||
if not all(k in trange for k in ('start', 'end')):
|
||||
if not all(k in trange for k in ("start", "end")):
|
||||
status = False
|
||||
msg = ('The time_range parameter for '
|
||||
'btmp beacon must contain '
|
||||
'start & end options.')
|
||||
msg = (
|
||||
"The time_range parameter for "
|
||||
"btmp beacon must contain "
|
||||
"start & end options."
|
||||
)
|
||||
|
||||
return status, msg
|
||||
|
||||
|
||||
def _gather_group_members(group, groups, users):
|
||||
'''
|
||||
"""
|
||||
Gather group members
|
||||
'''
|
||||
_group = __salt__['group.info'](group)
|
||||
"""
|
||||
_group = __salt__["group.info"](group)
|
||||
|
||||
if not _group:
|
||||
log.warning('Group %s does not exist, ignoring.', group)
|
||||
log.warning("Group %s does not exist, ignoring.", group)
|
||||
return
|
||||
|
||||
for member in _group['members']:
|
||||
for member in _group["members"]:
|
||||
if member not in users:
|
||||
users[member] = groups[group]
|
||||
|
||||
|
||||
def _check_time_range(time_range, now):
|
||||
'''
|
||||
"""
|
||||
Check time range
|
||||
'''
|
||||
"""
|
||||
if _TIME_SUPPORTED:
|
||||
_start = dateutil_parser.parse(time_range['start'])
|
||||
_end = dateutil_parser.parse(time_range['end'])
|
||||
_start = dateutil_parser.parse(time_range["start"])
|
||||
_end = dateutil_parser.parse(time_range["end"])
|
||||
|
||||
return bool(_start <= now <= _end)
|
||||
else:
|
||||
log.error('Dateutil is required.')
|
||||
log.error("Dateutil is required.")
|
||||
return False
|
||||
|
||||
|
||||
def _get_loc():
|
||||
'''
|
||||
"""
|
||||
return the active file location
|
||||
'''
|
||||
"""
|
||||
if LOC_KEY in __context__:
|
||||
return __context__[LOC_KEY]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
vstatus = True
|
||||
vmsg = 'Valid beacon configuration'
|
||||
vmsg = "Valid beacon configuration"
|
||||
|
||||
# Configuration for load beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
vstatus = False
|
||||
vmsg = ('Configuration for btmp beacon must '
|
||||
'be a list.')
|
||||
vmsg = "Configuration for btmp beacon must " "be a list."
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'users' in _config:
|
||||
if not isinstance(_config['users'], dict):
|
||||
if "users" in _config:
|
||||
if not isinstance(_config["users"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('User configuration for btmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "User configuration for btmp beacon must " "be a dictionary."
|
||||
else:
|
||||
for user in _config['users']:
|
||||
_time_range = _config['users'][user].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
for user in _config["users"]:
|
||||
_time_range = _config["users"][user].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
if 'groups' in _config:
|
||||
if not isinstance(_config['groups'], dict):
|
||||
if "groups" in _config:
|
||||
if not isinstance(_config["groups"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('Group configuration for btmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "Group configuration for btmp beacon must " "be a dictionary."
|
||||
else:
|
||||
for group in _config['groups']:
|
||||
_time_range = _config['groups'][group].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
for group in _config["groups"]:
|
||||
_time_range = _config["groups"][group].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
if 'defaults' in _config:
|
||||
if not isinstance(_config['defaults'], dict):
|
||||
if "defaults" in _config:
|
||||
if not isinstance(_config["defaults"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('Defaults configuration for btmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "Defaults configuration for btmp beacon must " "be a dictionary."
|
||||
else:
|
||||
_time_range = _config['defaults'].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
_time_range = _config["defaults"].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
|
@ -264,9 +258,9 @@ def validate(config):
|
|||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Read the last btmp file and return information on the failed logins
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
users = {}
|
||||
|
@ -274,16 +268,16 @@ def beacon(config):
|
|||
defaults = None
|
||||
|
||||
for config_item in config:
|
||||
if 'users' in config_item:
|
||||
users = config_item['users']
|
||||
if "users" in config_item:
|
||||
users = config_item["users"]
|
||||
|
||||
if 'groups' in config_item:
|
||||
groups = config_item['groups']
|
||||
if "groups" in config_item:
|
||||
groups = config_item["groups"]
|
||||
|
||||
if 'defaults' in config_item:
|
||||
defaults = config_item['defaults']
|
||||
if "defaults" in config_item:
|
||||
defaults = config_item["defaults"]
|
||||
|
||||
with salt.utils.files.fopen(BTMP, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(BTMP, "rb") as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
@ -304,27 +298,26 @@ def beacon(config):
|
|||
if isinstance(event[field], salt.ext.six.string_types):
|
||||
if isinstance(event[field], bytes):
|
||||
event[field] = salt.utils.stringutils.to_unicode(event[field])
|
||||
event[field] = event[field].strip('\x00')
|
||||
event[field] = event[field].strip("\x00")
|
||||
|
||||
for group in groups:
|
||||
_gather_group_members(group, groups, users)
|
||||
|
||||
if users:
|
||||
if event['user'] in users:
|
||||
_user = users[event['user']]
|
||||
if isinstance(_user, dict) and 'time_range' in _user:
|
||||
if _check_time_range(_user['time_range'], now):
|
||||
if event["user"] in users:
|
||||
_user = users[event["user"]]
|
||||
if isinstance(_user, dict) and "time_range" in _user:
|
||||
if _check_time_range(_user["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
if defaults and 'time_range' in defaults:
|
||||
if _check_time_range(defaults['time_range'],
|
||||
now):
|
||||
if defaults and "time_range" in defaults:
|
||||
if _check_time_range(defaults["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
ret.append(event)
|
||||
else:
|
||||
if defaults and 'time_range' in defaults:
|
||||
if _check_time_range(defaults['time_range'], now):
|
||||
if defaults and "time_range" in defaults:
|
||||
if _check_time_range(defaults["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
ret.append(event)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to monitor certificate expiration dates from files on the filesystem.
|
||||
|
||||
.. versionadded:: 3000
|
||||
|
@ -7,24 +7,26 @@ Beacon to monitor certificate expiration dates from files on the filesystem.
|
|||
:maintainer: <devops@eitr.tech>
|
||||
:maturity: new
|
||||
:depends: OpenSSL
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from datetime import datetime
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin,3rd-party-module-not-gated
|
||||
import salt.utils.files
|
||||
|
||||
# Import salt libs
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin,3rd-party-module-not-gated
|
||||
from salt.ext.six.moves import map as _map
|
||||
from salt.ext.six.moves import range as _range
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin,3rd-party-module-not-gated
|
||||
import salt.utils.files
|
||||
|
||||
|
||||
# Import Third Party Libs
|
||||
try:
|
||||
from OpenSSL import crypto
|
||||
|
||||
HAS_OPENSSL = True
|
||||
except ImportError:
|
||||
HAS_OPENSSL = False
|
||||
|
@ -33,7 +35,7 @@ log = logging.getLogger(__name__)
|
|||
|
||||
DEFAULT_NOTIFY_DAYS = 45
|
||||
|
||||
__virtualname__ = 'cert_info'
|
||||
__virtualname__ = "cert_info"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -44,24 +46,26 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(_map(_config.update, config))
|
||||
|
||||
# Configuration for cert_info beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for cert_info beacon must be a list.')
|
||||
return False, ("Configuration for cert_info beacon must be a list.")
|
||||
|
||||
if 'files' not in _config:
|
||||
return False, ('Configuration for cert_info beacon '
|
||||
'must contain files option.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if "files" not in _config:
|
||||
return (
|
||||
False,
|
||||
("Configuration for cert_info beacon must contain files option."),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Monitor the certificate files on the minion.
|
||||
|
||||
Specify a notification threshold in days and only emit a beacon if any certificates are
|
||||
|
@ -81,7 +85,7 @@ def beacon(config):
|
|||
- notify_days: 45
|
||||
- interval: 86400
|
||||
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
certificates = []
|
||||
CryptoError = crypto.Error # pylint: disable=invalid-name
|
||||
|
@ -89,75 +93,97 @@ def beacon(config):
|
|||
_config = {}
|
||||
list(_map(_config.update, config))
|
||||
|
||||
global_notify_days = _config.get('notify_days', DEFAULT_NOTIFY_DAYS)
|
||||
global_notify_days = _config.get("notify_days", DEFAULT_NOTIFY_DAYS)
|
||||
|
||||
for cert_path in _config.get('files', []):
|
||||
for cert_path in _config.get("files", []):
|
||||
notify_days = global_notify_days
|
||||
|
||||
if isinstance(cert_path, dict):
|
||||
try:
|
||||
notify_days = cert_path[cert_path.keys()[0]].get('notify_days', global_notify_days)
|
||||
notify_days = cert_path[cert_path.keys()[0]].get(
|
||||
"notify_days", global_notify_days
|
||||
)
|
||||
cert_path = cert_path.keys()[0]
|
||||
except IndexError as exc:
|
||||
log.error('Unable to load certificate %s (%s)', cert_path, exc)
|
||||
log.error("Unable to load certificate %s (%s)", cert_path, exc)
|
||||
continue
|
||||
|
||||
try:
|
||||
with salt.utils.files.fopen(cert_path) as fp_:
|
||||
cert = crypto.load_certificate(crypto.FILETYPE_PEM, fp_.read())
|
||||
except (IOError, CryptoError) as exc:
|
||||
log.error('Unable to load certificate %s (%s)', cert_path, exc)
|
||||
log.error("Unable to load certificate %s (%s)", cert_path, exc)
|
||||
continue
|
||||
|
||||
cert_date = datetime.strptime(cert.get_notAfter().decode(encoding='UTF-8'), "%Y%m%d%H%M%SZ")
|
||||
cert_date = datetime.strptime(
|
||||
cert.get_notAfter().decode(encoding="UTF-8"), "%Y%m%d%H%M%SZ"
|
||||
)
|
||||
date_diff = (cert_date - datetime.today()).days
|
||||
log.debug('Certificate %s expires in %s days.', cert_path, date_diff)
|
||||
log.debug("Certificate %s expires in %s days.", cert_path, date_diff)
|
||||
|
||||
if notify_days < 0 or date_diff <= notify_days:
|
||||
log.debug('Certificate %s triggered beacon due to %s day notification threshold.', cert_path, notify_days)
|
||||
log.debug(
|
||||
"Certificate %s triggered beacon due to %s day notification threshold.",
|
||||
cert_path,
|
||||
notify_days,
|
||||
)
|
||||
extensions = []
|
||||
for ext in _range(0, cert.get_extension_count()):
|
||||
extensions.append(
|
||||
{
|
||||
'ext_name': cert.get_extension(ext).get_short_name().decode(encoding='UTF-8'),
|
||||
'ext_data': str(cert.get_extension(ext))
|
||||
"ext_name": cert.get_extension(ext)
|
||||
.get_short_name()
|
||||
.decode(encoding="UTF-8"),
|
||||
"ext_data": str(cert.get_extension(ext)),
|
||||
}
|
||||
)
|
||||
|
||||
certificates.append(
|
||||
{
|
||||
'cert_path': cert_path,
|
||||
'issuer': ','.join(
|
||||
['{0}="{1}"'.format(
|
||||
t[0].decode(encoding='UTF-8'),
|
||||
t[1].decode(encoding='UTF-8')
|
||||
) for t in cert.get_issuer().get_components()]),
|
||||
'issuer_dict': {
|
||||
k.decode('UTF-8'): v.decode('UTF-8') for k, v in cert.get_issuer().get_components()
|
||||
"cert_path": cert_path,
|
||||
"issuer": ",".join(
|
||||
[
|
||||
'{0}="{1}"'.format(
|
||||
t[0].decode(encoding="UTF-8"),
|
||||
t[1].decode(encoding="UTF-8"),
|
||||
)
|
||||
for t in cert.get_issuer().get_components()
|
||||
]
|
||||
),
|
||||
"issuer_dict": {
|
||||
k.decode("UTF-8"): v.decode("UTF-8")
|
||||
for k, v in cert.get_issuer().get_components()
|
||||
},
|
||||
'notAfter_raw': cert.get_notAfter().decode(encoding='UTF-8'),
|
||||
'notAfter': cert_date.strftime("%Y-%m-%d %H:%M:%SZ"),
|
||||
'notBefore_raw': cert.get_notBefore().decode(encoding='UTF-8'),
|
||||
'notBefore': datetime.strptime(
|
||||
cert.get_notBefore().decode(encoding='UTF-8'), "%Y%m%d%H%M%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%SZ"),
|
||||
'serial_number': cert.get_serial_number(),
|
||||
'signature_algorithm': cert.get_signature_algorithm().decode(encoding='UTF-8'),
|
||||
'subject': ','.join(
|
||||
['{0}="{1}"'.format(
|
||||
t[0].decode(encoding='UTF-8'),
|
||||
t[1].decode(encoding='UTF-8')
|
||||
) for t in cert.get_subject().get_components()]),
|
||||
'subject_dict': {
|
||||
k.decode('UTF-8'): v.decode('UTF-8') for k, v in cert.get_subject().get_components()
|
||||
"notAfter_raw": cert.get_notAfter().decode(encoding="UTF-8"),
|
||||
"notAfter": cert_date.strftime("%Y-%m-%d %H:%M:%SZ"),
|
||||
"notBefore_raw": cert.get_notBefore().decode(encoding="UTF-8"),
|
||||
"notBefore": datetime.strptime(
|
||||
cert.get_notBefore().decode(encoding="UTF-8"), "%Y%m%d%H%M%SZ"
|
||||
).strftime("%Y-%m-%d %H:%M:%SZ"),
|
||||
"serial_number": cert.get_serial_number(),
|
||||
"signature_algorithm": cert.get_signature_algorithm().decode(
|
||||
encoding="UTF-8"
|
||||
),
|
||||
"subject": ",".join(
|
||||
[
|
||||
'{0}="{1}"'.format(
|
||||
t[0].decode(encoding="UTF-8"),
|
||||
t[1].decode(encoding="UTF-8"),
|
||||
)
|
||||
for t in cert.get_subject().get_components()
|
||||
]
|
||||
),
|
||||
"subject_dict": {
|
||||
k.decode("UTF-8"): v.decode("UTF-8")
|
||||
for k, v in cert.get_subject().get_components()
|
||||
},
|
||||
'version': cert.get_version(),
|
||||
'extensions': extensions,
|
||||
'has_expired': cert.has_expired()
|
||||
"version": cert.get_version(),
|
||||
"extensions": extensions,
|
||||
"has_expired": cert.has_expired(),
|
||||
}
|
||||
)
|
||||
|
||||
if certificates:
|
||||
ret.append({'certificates': certificates})
|
||||
ret.append({"certificates": certificates})
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to monitor disk usage.
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
:depends: python-psutil
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
|
@ -17,13 +18,14 @@ import salt.utils.platform
|
|||
# Import Third Party Libs
|
||||
try:
|
||||
import psutil
|
||||
|
||||
HAS_PSUTIL = True
|
||||
except ImportError:
|
||||
HAS_PSUTIL = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'diskusage'
|
||||
__virtualname__ = "diskusage"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -34,18 +36,17 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for diskusage beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for diskusage beacon '
|
||||
'must be a list.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, ("Configuration for diskusage beacon must be a list.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
r'''
|
||||
r"""
|
||||
Monitor the disk usage of the minion
|
||||
|
||||
Specify thresholds for each disk and only emit a beacon if any of them are
|
||||
|
@ -84,7 +85,7 @@ def beacon(config):
|
|||
which means that if a regular expression matches another defined mount point,
|
||||
it will override the previously defined threshold.
|
||||
|
||||
'''
|
||||
"""
|
||||
parts = psutil.disk_partitions(all=True)
|
||||
ret = []
|
||||
for mounts in config:
|
||||
|
@ -93,16 +94,16 @@ def beacon(config):
|
|||
# Because we're using regular expressions
|
||||
# if our mount doesn't end with a $, insert one.
|
||||
mount_re = mount
|
||||
if not mount.endswith('$'):
|
||||
mount_re = '{0}$'.format(mount)
|
||||
if not mount.endswith("$"):
|
||||
mount_re = "{0}$".format(mount)
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
# mount_re comes in formatted with a $ at the end
|
||||
# can be `C:\\$` or `C:\\\\$`
|
||||
# re string must be like `C:\\\\` regardless of \\ or \\\\
|
||||
# also, psutil returns uppercase
|
||||
mount_re = re.sub(r':\\\$', r':\\\\', mount_re)
|
||||
mount_re = re.sub(r':\\\\\$', r':\\\\', mount_re)
|
||||
mount_re = re.sub(r":\\\$", r":\\\\", mount_re)
|
||||
mount_re = re.sub(r":\\\\\$", r":\\\\", mount_re)
|
||||
mount_re = mount_re.upper()
|
||||
|
||||
for part in parts:
|
||||
|
@ -112,14 +113,14 @@ def beacon(config):
|
|||
try:
|
||||
_current_usage = psutil.disk_usage(_mount)
|
||||
except OSError:
|
||||
log.warning('%s is not a valid mount point.', _mount)
|
||||
log.warning("%s is not a valid mount point.", _mount)
|
||||
continue
|
||||
|
||||
current_usage = _current_usage.percent
|
||||
monitor_usage = mounts[mount]
|
||||
if '%' in monitor_usage:
|
||||
monitor_usage = re.sub('%', '', monitor_usage)
|
||||
if "%" in monitor_usage:
|
||||
monitor_usage = re.sub("%", "", monitor_usage)
|
||||
monitor_usage = float(monitor_usage)
|
||||
if current_usage >= monitor_usage:
|
||||
ret.append({'diskusage': current_usage, 'mount': _mount})
|
||||
ret.append({"diskusage": current_usage, "mount": _mount})
|
||||
return ret
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to emit when a display is available to a linux machine
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Salt libs
|
||||
|
@ -15,14 +16,14 @@ from salt.ext.six.moves import map
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'glxinfo'
|
||||
__virtualname__ = "glxinfo"
|
||||
|
||||
last_state = {}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
||||
which_result = salt.utils.path.which('glxinfo')
|
||||
which_result = salt.utils.path.which("glxinfo")
|
||||
if which_result is None:
|
||||
return False
|
||||
else:
|
||||
|
@ -30,24 +31,29 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for glxinfo beacon should be a dictionary
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for glxinfo beacon must be a list.')
|
||||
return False, ("Configuration for glxinfo beacon must be a list.")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'user' not in _config:
|
||||
return False, ('Configuration for glxinfo beacon must '
|
||||
'include a user as glxinfo is not available to root.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if "user" not in _config:
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for glxinfo beacon must "
|
||||
"include a user as glxinfo is not available to root."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit the status of a connected display to the minion
|
||||
|
||||
Mainly this is used to detect when the display fails to connect
|
||||
|
@ -60,23 +66,24 @@ def beacon(config):
|
|||
- user: frank
|
||||
- screen_event: True
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
log.trace('glxinfo beacon starting')
|
||||
log.trace("glxinfo beacon starting")
|
||||
ret = []
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
retcode = __salt__['cmd.retcode']('DISPLAY=:0 glxinfo',
|
||||
runas=_config['user'], python_shell=True)
|
||||
retcode = __salt__["cmd.retcode"](
|
||||
"DISPLAY=:0 glxinfo", runas=_config["user"], python_shell=True
|
||||
)
|
||||
|
||||
if 'screen_event' in _config and _config['screen_event']:
|
||||
last_value = last_state.get('screen_available', False)
|
||||
if "screen_event" in _config and _config["screen_event"]:
|
||||
last_value = last_state.get("screen_available", False)
|
||||
screen_available = retcode == 0
|
||||
if last_value != screen_available or 'screen_available' not in last_state:
|
||||
ret.append({'tag': 'screen_event', 'screen_available': screen_available})
|
||||
if last_value != screen_available or "screen_available" not in last_state:
|
||||
ret.append({"tag": "screen_event", "screen_available": screen_available})
|
||||
|
||||
last_state['screen_available'] = screen_available
|
||||
last_state["screen_available"] = screen_available
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,66 +1,69 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Watch current connections of haproxy server backends.
|
||||
Fire an event when over a specified threshold.
|
||||
|
||||
.. versionadded:: 2016.11.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'haproxy'
|
||||
__virtualname__ = "haproxy"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Only load the module if haproxyctl module is installed
|
||||
'''
|
||||
if 'haproxy.get_sessions' in __salt__:
|
||||
"""
|
||||
if "haproxy.get_sessions" in __salt__:
|
||||
return __virtualname__
|
||||
else:
|
||||
log.debug('Not loading haproxy beacon')
|
||||
log.debug("Not loading haproxy beacon")
|
||||
return False
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for haproxy beacon must '
|
||||
'be a list.')
|
||||
return False, ("Configuration for haproxy beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'backends' not in _config:
|
||||
return False, ('Configuration for haproxy beacon '
|
||||
'requires backends.')
|
||||
if "backends" not in _config:
|
||||
return False, ("Configuration for haproxy beacon requires backends.")
|
||||
else:
|
||||
if not isinstance(_config['backends'], dict):
|
||||
return False, ('Backends for haproxy beacon '
|
||||
'must be a dictionary.')
|
||||
if not isinstance(_config["backends"], dict):
|
||||
return False, ("Backends for haproxy beacon must be a dictionary.")
|
||||
else:
|
||||
for backend in _config['backends']:
|
||||
log.debug('_config %s', _config['backends'][backend])
|
||||
if 'servers' not in _config['backends'][backend]:
|
||||
return False, ('Backends for haproxy beacon '
|
||||
'require servers.')
|
||||
for backend in _config["backends"]:
|
||||
log.debug("_config %s", _config["backends"][backend])
|
||||
if "servers" not in _config["backends"][backend]:
|
||||
return (
|
||||
False,
|
||||
("Backends for haproxy beacon require servers."),
|
||||
)
|
||||
else:
|
||||
_servers = _config['backends'][backend]['servers']
|
||||
_servers = _config["backends"][backend]["servers"]
|
||||
if not isinstance(_servers, list):
|
||||
return False, ('Servers for haproxy beacon '
|
||||
'must be a list.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return (
|
||||
False,
|
||||
("Servers for haproxy beacon must be a list."),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Check if current number of sessions of a server for a specific haproxy backend
|
||||
is over a defined threshold.
|
||||
|
||||
|
@ -75,27 +78,30 @@ def beacon(config):
|
|||
- web1
|
||||
- web2
|
||||
- interval: 120
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for backend in _config.get('backends', ()):
|
||||
backend_config = _config['backends'][backend]
|
||||
threshold = backend_config['threshold']
|
||||
for server in backend_config['servers']:
|
||||
scur = __salt__['haproxy.get_sessions'](server, backend)
|
||||
for backend in _config.get("backends", ()):
|
||||
backend_config = _config["backends"][backend]
|
||||
threshold = backend_config["threshold"]
|
||||
for server in backend_config["servers"]:
|
||||
scur = __salt__["haproxy.get_sessions"](server, backend)
|
||||
if scur:
|
||||
if int(scur) > int(threshold):
|
||||
_server = {'server': server,
|
||||
'scur': scur,
|
||||
'threshold': threshold,
|
||||
}
|
||||
log.debug('Emit because %s > %s'
|
||||
' for %s in %s', scur,
|
||||
threshold,
|
||||
server,
|
||||
backend)
|
||||
_server = {
|
||||
"server": server,
|
||||
"scur": scur,
|
||||
"threshold": threshold,
|
||||
}
|
||||
log.debug(
|
||||
"Emit because %s > %s" " for %s in %s",
|
||||
scur,
|
||||
threshold,
|
||||
server,
|
||||
backend,
|
||||
)
|
||||
ret.append(_server)
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Watch files and translate the changes into salt events
|
||||
|
||||
:depends: - pyinotify Python module >= 0.9.5
|
||||
|
@ -13,9 +13,10 @@ Watch files and translate the changes into salt events
|
|||
:note: The `inotify` beacon only works on OSes that have `inotify`
|
||||
kernel support.
|
||||
|
||||
'''
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import collections
|
||||
import fnmatch
|
||||
import logging
|
||||
|
@ -24,25 +25,28 @@ import re
|
|||
|
||||
# Import salt libs
|
||||
import salt.ext.six
|
||||
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import pyinotify
|
||||
|
||||
HAS_PYINOTIFY = True
|
||||
DEFAULT_MASK = pyinotify.IN_CREATE | pyinotify.IN_DELETE | pyinotify.IN_MODIFY
|
||||
MASKS = {}
|
||||
for var in dir(pyinotify):
|
||||
if var.startswith('IN_'):
|
||||
if var.startswith("IN_"):
|
||||
key = var[3:].lower()
|
||||
MASKS[key] = getattr(pyinotify, var)
|
||||
except ImportError:
|
||||
HAS_PYINOTIFY = False
|
||||
DEFAULT_MASK = None
|
||||
|
||||
__virtualname__ = 'inotify'
|
||||
__virtualname__ = "inotify"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -54,108 +58,146 @@ def __virtual__():
|
|||
|
||||
|
||||
def _get_mask(mask):
|
||||
'''
|
||||
"""
|
||||
Return the int that represents the mask
|
||||
'''
|
||||
"""
|
||||
return MASKS.get(mask, 0)
|
||||
|
||||
|
||||
def _enqueue(revent):
|
||||
'''
|
||||
"""
|
||||
Enqueue the event
|
||||
'''
|
||||
__context__['inotify.queue'].append(revent)
|
||||
"""
|
||||
__context__["inotify.queue"].append(revent)
|
||||
|
||||
|
||||
def _get_notifier(config):
|
||||
'''
|
||||
"""
|
||||
Check the context for the notifier and construct it if not present
|
||||
'''
|
||||
if 'inotify.notifier' not in __context__:
|
||||
__context__['inotify.queue'] = collections.deque()
|
||||
"""
|
||||
if "inotify.notifier" not in __context__:
|
||||
__context__["inotify.queue"] = collections.deque()
|
||||
wm = pyinotify.WatchManager()
|
||||
__context__['inotify.notifier'] = pyinotify.Notifier(wm, _enqueue)
|
||||
if ('coalesce' in config and
|
||||
isinstance(config['coalesce'], bool) and
|
||||
config['coalesce']):
|
||||
__context__['inotify.notifier'].coalesce_events()
|
||||
return __context__['inotify.notifier']
|
||||
__context__["inotify.notifier"] = pyinotify.Notifier(wm, _enqueue)
|
||||
if (
|
||||
"coalesce" in config
|
||||
and isinstance(config["coalesce"], bool)
|
||||
and config["coalesce"]
|
||||
):
|
||||
__context__["inotify.notifier"].coalesce_events()
|
||||
return __context__["inotify.notifier"]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
|
||||
VALID_MASK = [
|
||||
'access',
|
||||
'attrib',
|
||||
'close_nowrite',
|
||||
'close_write',
|
||||
'create',
|
||||
'delete',
|
||||
'delete_self',
|
||||
'excl_unlink',
|
||||
'ignored',
|
||||
'modify',
|
||||
'moved_from',
|
||||
'moved_to',
|
||||
'move_self',
|
||||
'oneshot',
|
||||
'onlydir',
|
||||
'open',
|
||||
'unmount'
|
||||
"access",
|
||||
"attrib",
|
||||
"close_nowrite",
|
||||
"close_write",
|
||||
"create",
|
||||
"delete",
|
||||
"delete_self",
|
||||
"excl_unlink",
|
||||
"ignored",
|
||||
"modify",
|
||||
"moved_from",
|
||||
"moved_to",
|
||||
"move_self",
|
||||
"oneshot",
|
||||
"onlydir",
|
||||
"open",
|
||||
"unmount",
|
||||
]
|
||||
|
||||
# Configuration for inotify beacon should be a dict of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, 'Configuration for inotify beacon must be a list.'
|
||||
return False, "Configuration for inotify beacon must be a list."
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'files' not in _config:
|
||||
return False, 'Configuration for inotify beacon must include files.'
|
||||
if "files" not in _config:
|
||||
return False, "Configuration for inotify beacon must include files."
|
||||
else:
|
||||
if not isinstance(_config['files'], dict):
|
||||
return False, ('Configuration for inotify beacon invalid, '
|
||||
'files must be a dict.')
|
||||
if not isinstance(_config["files"], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon invalid, "
|
||||
"files must be a dict."
|
||||
),
|
||||
)
|
||||
|
||||
for path in _config.get('files'):
|
||||
for path in _config.get("files"):
|
||||
|
||||
if not isinstance(_config['files'][path], dict):
|
||||
return False, ('Configuration for inotify beacon must '
|
||||
'be a list of dictionaries.')
|
||||
if not isinstance(_config["files"][path], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon must "
|
||||
"be a list of dictionaries."
|
||||
),
|
||||
)
|
||||
else:
|
||||
if not any(j in ['mask',
|
||||
'recurse',
|
||||
'auto_add'] for j in _config['files'][path]):
|
||||
return False, ('Configuration for inotify beacon must '
|
||||
'contain mask, recurse or auto_add items.')
|
||||
if not any(
|
||||
j in ["mask", "recurse", "auto_add"]
|
||||
for j in _config["files"][path]
|
||||
):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon must "
|
||||
"contain mask, recurse or auto_add items."
|
||||
),
|
||||
)
|
||||
|
||||
if 'auto_add' in _config['files'][path]:
|
||||
if not isinstance(_config['files'][path]['auto_add'], bool):
|
||||
return False, ('Configuration for inotify beacon '
|
||||
'auto_add must be boolean.')
|
||||
if "auto_add" in _config["files"][path]:
|
||||
if not isinstance(_config["files"][path]["auto_add"], bool):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon "
|
||||
"auto_add must be boolean."
|
||||
),
|
||||
)
|
||||
|
||||
if 'recurse' in _config['files'][path]:
|
||||
if not isinstance(_config['files'][path]['recurse'], bool):
|
||||
return False, ('Configuration for inotify beacon '
|
||||
'recurse must be boolean.')
|
||||
if "recurse" in _config["files"][path]:
|
||||
if not isinstance(_config["files"][path]["recurse"], bool):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon "
|
||||
"recurse must be boolean."
|
||||
),
|
||||
)
|
||||
|
||||
if 'mask' in _config['files'][path]:
|
||||
if not isinstance(_config['files'][path]['mask'], list):
|
||||
return False, ('Configuration for inotify beacon '
|
||||
'mask must be list.')
|
||||
for mask in _config['files'][path]['mask']:
|
||||
if "mask" in _config["files"][path]:
|
||||
if not isinstance(_config["files"][path]["mask"], list):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon "
|
||||
"mask must be list."
|
||||
),
|
||||
)
|
||||
for mask in _config["files"][path]["mask"]:
|
||||
if mask not in VALID_MASK:
|
||||
return False, ('Configuration for inotify beacon '
|
||||
'invalid mask option {0}.'.format(mask))
|
||||
return True, 'Valid beacon configuration'
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for inotify beacon "
|
||||
"invalid mask option {0}.".format(mask)
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Watch the configured files
|
||||
|
||||
Example Config
|
||||
|
@ -218,7 +260,7 @@ def beacon(config):
|
|||
This option is top-level (at the same level as the path) and therefore
|
||||
affects all paths that are being watched. This is due to this option
|
||||
being at the Notifier level in pyinotify.
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
|
@ -230,34 +272,33 @@ def beacon(config):
|
|||
if notifier.check_events(1):
|
||||
notifier.read_events()
|
||||
notifier.process_events()
|
||||
queue = __context__['inotify.queue']
|
||||
queue = __context__["inotify.queue"]
|
||||
while queue:
|
||||
event = queue.popleft()
|
||||
|
||||
_append = True
|
||||
# Find the matching path in config
|
||||
path = event.path
|
||||
while path != '/':
|
||||
if path in _config.get('files', {}):
|
||||
while path != "/":
|
||||
if path in _config.get("files", {}):
|
||||
break
|
||||
path = os.path.dirname(path)
|
||||
|
||||
excludes = _config['files'][path].get('exclude', '')
|
||||
excludes = _config["files"][path].get("exclude", "")
|
||||
|
||||
if excludes and isinstance(excludes, list):
|
||||
for exclude in excludes:
|
||||
if isinstance(exclude, dict):
|
||||
_exclude = next(iter(exclude))
|
||||
if exclude[_exclude].get('regex', False):
|
||||
if exclude[_exclude].get("regex", False):
|
||||
try:
|
||||
if re.search(_exclude, event.pathname):
|
||||
_append = False
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.warning('Failed to compile regex: %s',
|
||||
_exclude)
|
||||
log.warning("Failed to compile regex: %s", _exclude)
|
||||
else:
|
||||
exclude = _exclude
|
||||
elif '*' in exclude:
|
||||
elif "*" in exclude:
|
||||
if fnmatch.fnmatch(event.pathname, exclude):
|
||||
_append = False
|
||||
else:
|
||||
|
@ -265,12 +306,14 @@ def beacon(config):
|
|||
_append = False
|
||||
|
||||
if _append:
|
||||
sub = {'tag': event.path,
|
||||
'path': event.pathname,
|
||||
'change': event.maskname}
|
||||
sub = {
|
||||
"tag": event.path,
|
||||
"path": event.pathname,
|
||||
"change": event.maskname,
|
||||
}
|
||||
ret.append(sub)
|
||||
else:
|
||||
log.info('Excluding %s from event for %s', event.pathname, path)
|
||||
log.info("Excluding %s from event for %s", event.pathname, path)
|
||||
|
||||
# Get paths currently being watched
|
||||
current = set()
|
||||
|
@ -279,10 +322,10 @@ def beacon(config):
|
|||
|
||||
# Update existing watches and add new ones
|
||||
# TODO: make the config handle more options
|
||||
for path in _config.get('files', ()):
|
||||
for path in _config.get("files", ()):
|
||||
|
||||
if isinstance(_config['files'][path], dict):
|
||||
mask = _config['files'][path].get('mask', DEFAULT_MASK)
|
||||
if isinstance(_config["files"][path], dict):
|
||||
mask = _config["files"][path].get("mask", DEFAULT_MASK)
|
||||
if isinstance(mask, list):
|
||||
r_mask = 0
|
||||
for sub in mask:
|
||||
|
@ -292,8 +335,8 @@ def beacon(config):
|
|||
else:
|
||||
r_mask = mask
|
||||
mask = r_mask
|
||||
rec = _config['files'][path].get('recurse', False)
|
||||
auto_add = _config['files'][path].get('auto_add', False)
|
||||
rec = _config["files"][path].get("recurse", False)
|
||||
auto_add = _config["files"][path].get("auto_add", False)
|
||||
else:
|
||||
mask = DEFAULT_MASK
|
||||
rec = False
|
||||
|
@ -310,7 +353,7 @@ def beacon(config):
|
|||
if update:
|
||||
wm.update_watch(wd, mask=mask, rec=rec, auto_add=auto_add)
|
||||
elif os.path.exists(path):
|
||||
excludes = _config['files'][path].get('exclude', '')
|
||||
excludes = _config["files"][path].get("exclude", "")
|
||||
excl = None
|
||||
if isinstance(excludes, list):
|
||||
excl = []
|
||||
|
@ -328,6 +371,6 @@ def beacon(config):
|
|||
|
||||
|
||||
def close(config):
|
||||
if 'inotify.notifier' in __context__:
|
||||
__context__['inotify.notifier'].stop()
|
||||
del __context__['inotify.notifier']
|
||||
if "inotify.notifier" in __context__:
|
||||
__context__["inotify.notifier"].stop()
|
||||
del __context__["inotify.notifier"]
|
||||
|
|
|
@ -1,27 +1,31 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
A simple beacon to watch journald for specific entries
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import salt.ext.six
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.ext.six
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import systemd.journal # pylint: disable=no-name-in-module
|
||||
|
||||
HAS_SYSTEMD = True
|
||||
except ImportError:
|
||||
HAS_SYSTEMD = False
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'journald'
|
||||
__virtualname__ = "journald"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -31,38 +35,43 @@ def __virtual__():
|
|||
|
||||
|
||||
def _get_journal():
|
||||
'''
|
||||
"""
|
||||
Return the active running journal object
|
||||
'''
|
||||
if 'systemd.journald' in __context__:
|
||||
return __context__['systemd.journald']
|
||||
__context__['systemd.journald'] = systemd.journal.Reader()
|
||||
"""
|
||||
if "systemd.journald" in __context__:
|
||||
return __context__["systemd.journald"]
|
||||
__context__["systemd.journald"] = systemd.journal.Reader()
|
||||
# get to the end of the journal
|
||||
__context__['systemd.journald'].seek_tail()
|
||||
__context__['systemd.journald'].get_previous()
|
||||
return __context__['systemd.journald']
|
||||
__context__["systemd.journald"].seek_tail()
|
||||
__context__["systemd.journald"].get_previous()
|
||||
return __context__["systemd.journald"]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for journald beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return (False, 'Configuration for journald beacon must be a list.')
|
||||
return (False, "Configuration for journald beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for name in _config.get('services', {}):
|
||||
if not isinstance(_config['services'][name], dict):
|
||||
return False, ('Services configuration for journald beacon '
|
||||
'must be a list of dictionaries.')
|
||||
return True, 'Valid beacon configuration'
|
||||
for name in _config.get("services", {}):
|
||||
if not isinstance(_config["services"][name], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Services configuration for journald beacon "
|
||||
"must be a list of dictionaries."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
The journald beacon allows for the systemd journal to be parsed and linked
|
||||
objects to be turned into events.
|
||||
|
||||
|
@ -76,7 +85,7 @@ def beacon(config):
|
|||
sshd:
|
||||
SYSLOG_IDENTIFIER: sshd
|
||||
PRIORITY: 6
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
journal = _get_journal()
|
||||
|
||||
|
@ -88,17 +97,17 @@ def beacon(config):
|
|||
if not cur:
|
||||
break
|
||||
|
||||
for name in _config.get('services', {}):
|
||||
for name in _config.get("services", {}):
|
||||
n_flag = 0
|
||||
for key in _config['services'][name]:
|
||||
for key in _config["services"][name]:
|
||||
if isinstance(key, salt.ext.six.string_types):
|
||||
key = salt.utils.data.decode(key)
|
||||
if key in cur:
|
||||
if _config['services'][name][key] == cur[key]:
|
||||
if _config["services"][name][key] == cur[key]:
|
||||
n_flag += 1
|
||||
if n_flag == len(_config['services'][name]):
|
||||
if n_flag == len(_config["services"][name]):
|
||||
# Match!
|
||||
sub = salt.utils.data.simple_types_filter(cur)
|
||||
sub.update({'tag': name})
|
||||
sub.update({"tag": name})
|
||||
ret.append(sub)
|
||||
return ret
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to emit system load averages
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import Py3 compat
|
||||
from salt.ext.six.moves import zip
|
||||
from salt.ext.six.moves import map, zip
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'load'
|
||||
__virtualname__ = "load"
|
||||
|
||||
LAST_STATUS = {}
|
||||
|
||||
|
@ -30,53 +30,76 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
|
||||
# Configuration for load beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for load beacon must be a list.')
|
||||
return False, ("Configuration for load beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'emitatstartup' in _config:
|
||||
if not isinstance(_config['emitatstartup'], bool):
|
||||
return False, ('Configuration for load beacon option '
|
||||
'emitatstartup must be a boolean.')
|
||||
if "emitatstartup" in _config:
|
||||
if not isinstance(_config["emitatstartup"], bool):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for load beacon option "
|
||||
"emitatstartup must be a boolean."
|
||||
),
|
||||
)
|
||||
|
||||
if 'onchangeonly' in _config:
|
||||
if not isinstance(_config['onchangeonly'], bool):
|
||||
return False, ('Configuration for load beacon option '
|
||||
'onchangeonly must be a boolean.')
|
||||
if "onchangeonly" in _config:
|
||||
if not isinstance(_config["onchangeonly"], bool):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for load beacon option "
|
||||
"onchangeonly must be a boolean."
|
||||
),
|
||||
)
|
||||
|
||||
if 'averages' not in _config:
|
||||
return False, ('Averages configuration is required'
|
||||
' for load beacon.')
|
||||
if "averages" not in _config:
|
||||
return False, ("Averages configuration is required for load beacon.")
|
||||
else:
|
||||
|
||||
if not any(j in ['1m', '5m', '15m'] for j
|
||||
in _config.get('averages', {})):
|
||||
return False, ('Averages configuration for load beacon '
|
||||
'must contain 1m, 5m or 15m items.')
|
||||
if not any(j in ["1m", "5m", "15m"] for j in _config.get("averages", {})):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Averages configuration for load beacon "
|
||||
"must contain 1m, 5m or 15m items."
|
||||
),
|
||||
)
|
||||
|
||||
for item in ['1m', '5m', '15m']:
|
||||
if not isinstance(_config['averages'][item], list):
|
||||
return False, ('Averages configuration for load beacon: '
|
||||
'1m, 5m and 15m items must be '
|
||||
'a list of two items.')
|
||||
for item in ["1m", "5m", "15m"]:
|
||||
if not isinstance(_config["averages"][item], list):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Averages configuration for load beacon: "
|
||||
"1m, 5m and 15m items must be "
|
||||
"a list of two items."
|
||||
),
|
||||
)
|
||||
else:
|
||||
if len(_config['averages'][item]) != 2:
|
||||
return False, ('Configuration for load beacon: '
|
||||
'1m, 5m and 15m items must be '
|
||||
'a list of two items.')
|
||||
if len(_config["averages"][item]) != 2:
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for load beacon: "
|
||||
"1m, 5m and 15m items must be "
|
||||
"a list of two items."
|
||||
),
|
||||
)
|
||||
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit the load averages of this host.
|
||||
|
||||
Specify thresholds for each load average
|
||||
|
@ -108,70 +131,79 @@ def beacon(config):
|
|||
- emitatstartup: True
|
||||
- onchangeonly: False
|
||||
|
||||
'''
|
||||
log.trace('load beacon starting')
|
||||
"""
|
||||
log.trace("load beacon starting")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
# Default config if not present
|
||||
if 'emitatstartup' not in _config:
|
||||
_config['emitatstartup'] = True
|
||||
if 'onchangeonly' not in _config:
|
||||
_config['onchangeonly'] = False
|
||||
if "emitatstartup" not in _config:
|
||||
_config["emitatstartup"] = True
|
||||
if "onchangeonly" not in _config:
|
||||
_config["onchangeonly"] = False
|
||||
|
||||
ret = []
|
||||
avgs = os.getloadavg()
|
||||
avg_keys = ['1m', '5m', '15m']
|
||||
avg_keys = ["1m", "5m", "15m"]
|
||||
avg_dict = dict(zip(avg_keys, avgs))
|
||||
|
||||
if _config['onchangeonly']:
|
||||
if _config["onchangeonly"]:
|
||||
if not LAST_STATUS:
|
||||
for k in ['1m', '5m', '15m']:
|
||||
for k in ["1m", "5m", "15m"]:
|
||||
LAST_STATUS[k] = avg_dict[k]
|
||||
if not _config['emitatstartup']:
|
||||
if not _config["emitatstartup"]:
|
||||
log.debug("Don't emit because emitatstartup is False")
|
||||
return ret
|
||||
|
||||
send_beacon = False
|
||||
|
||||
# Check each entry for threshold
|
||||
for k in ['1m', '5m', '15m']:
|
||||
if k in _config.get('averages', {}):
|
||||
if _config['onchangeonly']:
|
||||
for k in ["1m", "5m", "15m"]:
|
||||
if k in _config.get("averages", {}):
|
||||
if _config["onchangeonly"]:
|
||||
# Emit if current is more that threshold and old value less
|
||||
# that threshold
|
||||
if float(avg_dict[k]) > float(_config['averages'][k][1]) and \
|
||||
float(LAST_STATUS[k]) < float(_config['averages'][k][1]):
|
||||
log.debug('Emit because %f > %f and last was '
|
||||
'%f', float(avg_dict[k]),
|
||||
float(_config['averages'][k][1]),
|
||||
float(LAST_STATUS[k]))
|
||||
if float(avg_dict[k]) > float(_config["averages"][k][1]) and float(
|
||||
LAST_STATUS[k]
|
||||
) < float(_config["averages"][k][1]):
|
||||
log.debug(
|
||||
"Emit because %f > %f and last was " "%f",
|
||||
float(avg_dict[k]),
|
||||
float(_config["averages"][k][1]),
|
||||
float(LAST_STATUS[k]),
|
||||
)
|
||||
send_beacon = True
|
||||
break
|
||||
# Emit if current is less that threshold and old value more
|
||||
# that threshold
|
||||
if float(avg_dict[k]) < float(_config['averages'][k][0]) and \
|
||||
float(LAST_STATUS[k]) > float(_config['averages'][k][0]):
|
||||
log.debug('Emit because %f < %f and last was'
|
||||
'%f', float(avg_dict[k]),
|
||||
float(_config['averages'][k][0]),
|
||||
float(LAST_STATUS[k]))
|
||||
if float(avg_dict[k]) < float(_config["averages"][k][0]) and float(
|
||||
LAST_STATUS[k]
|
||||
) > float(_config["averages"][k][0]):
|
||||
log.debug(
|
||||
"Emit because %f < %f and last was" "%f",
|
||||
float(avg_dict[k]),
|
||||
float(_config["averages"][k][0]),
|
||||
float(LAST_STATUS[k]),
|
||||
)
|
||||
send_beacon = True
|
||||
break
|
||||
else:
|
||||
# Emit no matter LAST_STATUS
|
||||
if float(avg_dict[k]) < float(_config['averages'][k][0]) or \
|
||||
float(avg_dict[k]) > float(_config['averages'][k][1]):
|
||||
log.debug('Emit because %f < %f or > '
|
||||
'%f', float(avg_dict[k]),
|
||||
float(_config['averages'][k][0]),
|
||||
float(_config['averages'][k][1]))
|
||||
if float(avg_dict[k]) < float(_config["averages"][k][0]) or float(
|
||||
avg_dict[k]
|
||||
) > float(_config["averages"][k][1]):
|
||||
log.debug(
|
||||
"Emit because %f < %f or > " "%f",
|
||||
float(avg_dict[k]),
|
||||
float(_config["averages"][k][0]),
|
||||
float(_config["averages"][k][1]),
|
||||
)
|
||||
send_beacon = True
|
||||
break
|
||||
|
||||
if _config['onchangeonly']:
|
||||
for k in ['1m', '5m', '15m']:
|
||||
if _config["onchangeonly"]:
|
||||
for k in ["1m", "5m", "15m"]:
|
||||
LAST_STATUS[k] = avg_dict[k]
|
||||
|
||||
if send_beacon:
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to fire events at specific log messages.
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
|
@ -15,21 +16,21 @@ import salt.utils.files
|
|||
import salt.utils.platform
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
|
||||
try:
|
||||
import re
|
||||
|
||||
HAS_REGEX = True
|
||||
except ImportError:
|
||||
HAS_REGEX = False
|
||||
|
||||
__virtualname__ = 'log'
|
||||
LOC_KEY = 'log.loc'
|
||||
__virtualname__ = "log"
|
||||
LOC_KEY = "log.loc"
|
||||
|
||||
SKEL = {}
|
||||
SKEL['tag'] = ''
|
||||
SKEL['match'] = 'no'
|
||||
SKEL['raw'] = ''
|
||||
SKEL['error'] = ''
|
||||
SKEL["tag"] = ""
|
||||
SKEL["match"] = "no"
|
||||
SKEL["raw"] = ""
|
||||
SKEL["error"] = ""
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -42,33 +43,32 @@ def __virtual__():
|
|||
|
||||
|
||||
def _get_loc():
|
||||
'''
|
||||
"""
|
||||
return the active file location
|
||||
'''
|
||||
"""
|
||||
if LOC_KEY in __context__:
|
||||
return __context__[LOC_KEY]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
# Configuration for log beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for log beacon must be a list.')
|
||||
return False, ("Configuration for log beacon must be a list.")
|
||||
|
||||
if 'file' not in _config:
|
||||
return False, ('Configuration for log beacon '
|
||||
'must contain file option.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if "file" not in _config:
|
||||
return False, ("Configuration for log beacon must contain file option.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
# TODO: match values should be returned in the event
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Read the log file and return match whole string
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -85,20 +85,20 @@ def beacon(config):
|
|||
regex matching is based on the `re`_ module
|
||||
|
||||
.. _re: https://docs.python.org/3.6/library/re.html#regular-expression-syntax
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
ret = []
|
||||
|
||||
if 'file' not in _config:
|
||||
if "file" not in _config:
|
||||
event = SKEL.copy()
|
||||
event['tag'] = 'global'
|
||||
event['error'] = 'file not defined in config'
|
||||
event["tag"] = "global"
|
||||
event["error"] = "file not defined in config"
|
||||
ret.append(event)
|
||||
return ret
|
||||
|
||||
with salt.utils.files.fopen(_config['file'], 'r') as fp_:
|
||||
with salt.utils.files.fopen(_config["file"], "r") as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
@ -110,20 +110,20 @@ def beacon(config):
|
|||
fp_.seek(loc)
|
||||
|
||||
txt = fp_.read()
|
||||
log.info('txt %s', txt)
|
||||
log.info("txt %s", txt)
|
||||
|
||||
d = {}
|
||||
for tag in _config.get('tags', {}):
|
||||
if 'regex' not in _config['tags'][tag]:
|
||||
for tag in _config.get("tags", {}):
|
||||
if "regex" not in _config["tags"][tag]:
|
||||
continue
|
||||
if len(_config['tags'][tag]['regex']) < 1:
|
||||
if len(_config["tags"][tag]["regex"]) < 1:
|
||||
continue
|
||||
try:
|
||||
d[tag] = re.compile(r'{0}'.format(_config['tags'][tag]['regex']))
|
||||
d[tag] = re.compile(r"{0}".format(_config["tags"][tag]["regex"]))
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
event = SKEL.copy()
|
||||
event['tag'] = tag
|
||||
event['error'] = 'bad regex'
|
||||
event["tag"] = tag
|
||||
event["error"] = "bad regex"
|
||||
ret.append(event)
|
||||
|
||||
for line in txt.splitlines():
|
||||
|
@ -132,13 +132,13 @@ def beacon(config):
|
|||
m = reg.match(line)
|
||||
if m:
|
||||
event = SKEL.copy()
|
||||
event['tag'] = tag
|
||||
event['raw'] = line
|
||||
event['match'] = 'yes'
|
||||
event["tag"] = tag
|
||||
event["raw"] = line
|
||||
event["match"] = "yes"
|
||||
ret.append(event)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
event = SKEL.copy()
|
||||
event['tag'] = tag
|
||||
event['error'] = 'bad match'
|
||||
event["tag"] = tag
|
||||
event["error"] = "bad match"
|
||||
ret.append(event)
|
||||
return ret
|
||||
|
|
|
@ -1,28 +1,31 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to monitor memory usage.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
:depends: python-psutil
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import Third Party Libs
|
||||
try:
|
||||
import psutil
|
||||
|
||||
HAS_PSUTIL = True
|
||||
except ImportError:
|
||||
HAS_PSUTIL = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'memusage'
|
||||
__virtualname__ = "memusage"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -33,26 +36,24 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for memusage beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for memusage '
|
||||
'beacon must be a list.')
|
||||
return False, ("Configuration for memusage beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'percent' not in _config:
|
||||
return False, ('Configuration for memusage beacon '
|
||||
'requires percent.')
|
||||
if "percent" not in _config:
|
||||
return False, ("Configuration for memusage beacon requires percent.")
|
||||
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Monitor the memory usage of the minion
|
||||
|
||||
Specify thresholds for percent used and only emit a beacon
|
||||
|
@ -63,7 +64,7 @@ def beacon(config):
|
|||
beacons:
|
||||
memusage:
|
||||
- percent: 63%
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
_config = {}
|
||||
|
@ -72,10 +73,10 @@ def beacon(config):
|
|||
_current_usage = psutil.virtual_memory()
|
||||
|
||||
current_usage = _current_usage.percent
|
||||
monitor_usage = _config['percent']
|
||||
if '%' in monitor_usage:
|
||||
monitor_usage = re.sub('%', '', monitor_usage)
|
||||
monitor_usage = _config["percent"]
|
||||
if "%" in monitor_usage:
|
||||
monitor_usage = re.sub("%", "", monitor_usage)
|
||||
monitor_usage = float(monitor_usage)
|
||||
if current_usage >= monitor_usage:
|
||||
ret.append({'memusage': current_usage})
|
||||
ret.append({"memusage": current_usage})
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Watch NAPALM functions and fire events on specific triggers
|
||||
===========================================================
|
||||
|
||||
|
@ -166,49 +166,51 @@ Event structure example:
|
|||
The event examplified above has been fired when the device
|
||||
identified by the Minion id ``edge01.bjm01`` has been synchronized
|
||||
with a NTP server at a stratum level greater than 5.
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import Python std lib
|
||||
import re
|
||||
import logging
|
||||
|
||||
import salt.utils.napalm
|
||||
|
||||
# Import Salt modules
|
||||
from salt.ext import six
|
||||
import salt.utils.napalm
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
_numeric_regex = re.compile(r'^(<|>|<=|>=|==|!=)\s*(\d+(\.\d+){0,1})$')
|
||||
_numeric_regex = re.compile(r"^(<|>|<=|>=|==|!=)\s*(\d+(\.\d+){0,1})$")
|
||||
# the numeric regex will match the right operand, e.g '>= 20', '< 100', '!= 20', '< 1000.12' etc.
|
||||
_numeric_operand = {
|
||||
'<': '__lt__',
|
||||
'>': '__gt__',
|
||||
'>=': '__ge__',
|
||||
'<=': '__le__',
|
||||
'==': '__eq__',
|
||||
'!=': '__ne__',
|
||||
"<": "__lt__",
|
||||
">": "__gt__",
|
||||
">=": "__ge__",
|
||||
"<=": "__le__",
|
||||
"==": "__eq__",
|
||||
"!=": "__ne__",
|
||||
} # mathematical operand - private method map
|
||||
|
||||
|
||||
__virtualname__ = 'napalm'
|
||||
__virtualname__ = "napalm"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
This beacon can only work when running under a regular or a proxy minion, managed through napalm.
|
||||
'''
|
||||
"""
|
||||
return salt.utils.napalm.virtual(__opts__, __virtualname__, __file__)
|
||||
|
||||
|
||||
def _compare(cur_cmp, cur_struct):
|
||||
'''
|
||||
"""
|
||||
Compares two objects and return a boolean value
|
||||
when there's a match.
|
||||
'''
|
||||
"""
|
||||
if isinstance(cur_cmp, dict) and isinstance(cur_struct, dict):
|
||||
log.debug('Comparing dict to dict')
|
||||
log.debug("Comparing dict to dict")
|
||||
for cmp_key, cmp_value in six.iteritems(cur_cmp):
|
||||
if cmp_key == '*':
|
||||
if cmp_key == "*":
|
||||
# matches any key from the source dictionary
|
||||
if isinstance(cmp_value, dict):
|
||||
found = False
|
||||
|
@ -237,69 +239,81 @@ def _compare(cur_cmp, cur_struct):
|
|||
else:
|
||||
return _compare(cmp_value, cur_struct[cmp_key])
|
||||
elif isinstance(cur_cmp, (list, tuple)) and isinstance(cur_struct, (list, tuple)):
|
||||
log.debug('Comparing list to list')
|
||||
log.debug("Comparing list to list")
|
||||
found = False
|
||||
for cur_cmp_ele in cur_cmp:
|
||||
for cur_struct_ele in cur_struct:
|
||||
found |= _compare(cur_cmp_ele, cur_struct_ele)
|
||||
return found
|
||||
elif isinstance(cur_cmp, dict) and isinstance(cur_struct, (list, tuple)):
|
||||
log.debug('Comparing dict to list (of dicts?)')
|
||||
log.debug("Comparing dict to list (of dicts?)")
|
||||
found = False
|
||||
for cur_struct_ele in cur_struct:
|
||||
found |= _compare(cur_cmp, cur_struct_ele)
|
||||
return found
|
||||
elif isinstance(cur_cmp, bool) and isinstance(cur_struct, bool):
|
||||
log.debug('Comparing booleans: %s ? %s', cur_cmp, cur_struct)
|
||||
log.debug("Comparing booleans: %s ? %s", cur_cmp, cur_struct)
|
||||
return cur_cmp == cur_struct
|
||||
elif isinstance(cur_cmp, (six.string_types, six.text_type)) and \
|
||||
isinstance(cur_struct, (six.string_types, six.text_type)):
|
||||
log.debug('Comparing strings (and regex?): %s ? %s', cur_cmp, cur_struct)
|
||||
elif isinstance(cur_cmp, (six.string_types, six.text_type)) and isinstance(
|
||||
cur_struct, (six.string_types, six.text_type)
|
||||
):
|
||||
log.debug("Comparing strings (and regex?): %s ? %s", cur_cmp, cur_struct)
|
||||
# Trying literal match
|
||||
matched = re.match(cur_cmp, cur_struct, re.I)
|
||||
if matched:
|
||||
return True
|
||||
return False
|
||||
elif isinstance(cur_cmp, (six.integer_types, float)) and \
|
||||
isinstance(cur_struct, (six.integer_types, float)):
|
||||
log.debug('Comparing numeric values: %d ? %d', cur_cmp, cur_struct)
|
||||
elif isinstance(cur_cmp, (six.integer_types, float)) and isinstance(
|
||||
cur_struct, (six.integer_types, float)
|
||||
):
|
||||
log.debug("Comparing numeric values: %d ? %d", cur_cmp, cur_struct)
|
||||
# numeric compare
|
||||
return cur_cmp == cur_struct
|
||||
elif isinstance(cur_struct, (six.integer_types, float)) and \
|
||||
isinstance(cur_cmp, (six.string_types, six.text_type)):
|
||||
elif isinstance(cur_struct, (six.integer_types, float)) and isinstance(
|
||||
cur_cmp, (six.string_types, six.text_type)
|
||||
):
|
||||
# Comapring the numerical value agains a presumably mathematical value
|
||||
log.debug('Comparing a numeric value (%d) with a string (%s)', cur_struct, cur_cmp)
|
||||
log.debug(
|
||||
"Comparing a numeric value (%d) with a string (%s)", cur_struct, cur_cmp
|
||||
)
|
||||
numeric_compare = _numeric_regex.match(cur_cmp)
|
||||
# determine if the value to compare agains is a mathematical operand
|
||||
if numeric_compare:
|
||||
compare_value = numeric_compare.group(2)
|
||||
return getattr(float(cur_struct), _numeric_operand[numeric_compare.group(1)])(float(compare_value))
|
||||
return getattr(
|
||||
float(cur_struct), _numeric_operand[numeric_compare.group(1)]
|
||||
)(float(compare_value))
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration.
|
||||
'''
|
||||
"""
|
||||
# Must be a list of dicts.
|
||||
if not isinstance(config, list):
|
||||
return False, 'Configuration for napalm beacon must be a list.'
|
||||
return False, "Configuration for napalm beacon must be a list."
|
||||
for mod in config:
|
||||
fun = mod.keys()[0]
|
||||
fun_cfg = mod.values()[0]
|
||||
if not isinstance(fun_cfg, dict):
|
||||
return False, 'The match structure for the {} execution function output must be a dictionary'.format(fun)
|
||||
return (
|
||||
False,
|
||||
"The match structure for the {} execution function output must be a dictionary".format(
|
||||
fun
|
||||
),
|
||||
)
|
||||
if fun not in __salt__:
|
||||
return False, 'Execution function {} is not availabe!'.format(fun)
|
||||
return True, 'Valid configuration for the napal beacon!'
|
||||
return False, "Execution function {} is not availabe!".format(fun)
|
||||
return True, "Valid configuration for the napal beacon!"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Watch napalm function and fire events.
|
||||
'''
|
||||
log.debug('Executing napalm beacon with config:')
|
||||
"""
|
||||
log.debug("Executing napalm beacon with config:")
|
||||
log.debug(config)
|
||||
ret = []
|
||||
for mod in config:
|
||||
|
@ -308,22 +322,22 @@ def beacon(config):
|
|||
event = {}
|
||||
fun = mod.keys()[0]
|
||||
fun_cfg = mod.values()[0]
|
||||
args = fun_cfg.pop('_args', [])
|
||||
kwargs = fun_cfg.pop('_kwargs', {})
|
||||
log.debug('Executing {fun} with {args} and {kwargs}'.format(
|
||||
fun=fun,
|
||||
args=args,
|
||||
kwargs=kwargs
|
||||
))
|
||||
args = fun_cfg.pop("_args", [])
|
||||
kwargs = fun_cfg.pop("_kwargs", {})
|
||||
log.debug(
|
||||
"Executing {fun} with {args} and {kwargs}".format(
|
||||
fun=fun, args=args, kwargs=kwargs
|
||||
)
|
||||
)
|
||||
fun_ret = __salt__[fun](*args, **kwargs)
|
||||
log.debug('Got the reply from the minion:')
|
||||
log.debug("Got the reply from the minion:")
|
||||
log.debug(fun_ret)
|
||||
if not fun_ret.get('result', False):
|
||||
log.error('Error whilst executing {}'.format(fun))
|
||||
if not fun_ret.get("result", False):
|
||||
log.error("Error whilst executing {}".format(fun))
|
||||
log.error(fun_ret)
|
||||
continue
|
||||
fun_ret_out = fun_ret['out']
|
||||
log.debug('Comparing to:')
|
||||
fun_ret_out = fun_ret["out"]
|
||||
log.debug("Comparing to:")
|
||||
log.debug(fun_cfg)
|
||||
try:
|
||||
fun_cmp_result = _compare(fun_cfg, fun_ret_out)
|
||||
|
@ -332,21 +346,18 @@ def beacon(config):
|
|||
# catch any exception and continue
|
||||
# to not jeopardise the execution of the next function in the list
|
||||
continue
|
||||
log.debug('Result of comparison: {res}'.format(res=fun_cmp_result))
|
||||
log.debug("Result of comparison: {res}".format(res=fun_cmp_result))
|
||||
if fun_cmp_result:
|
||||
log.info('Matched {fun} with {cfg}'.format(
|
||||
fun=fun,
|
||||
cfg=fun_cfg
|
||||
))
|
||||
event['tag'] = '{os}/{fun}'.format(os=__grains__['os'], fun=fun)
|
||||
event['fun'] = fun
|
||||
event['args'] = args
|
||||
event['kwargs'] = kwargs
|
||||
event['data'] = fun_ret
|
||||
event['match'] = fun_cfg
|
||||
log.debug('Queueing event:')
|
||||
log.info("Matched {fun} with {cfg}".format(fun=fun, cfg=fun_cfg))
|
||||
event["tag"] = "{os}/{fun}".format(os=__grains__["os"], fun=fun)
|
||||
event["fun"] = fun
|
||||
event["args"] = args
|
||||
event["kwargs"] = kwargs
|
||||
event["data"] = fun_ret
|
||||
event["match"] = fun_cfg
|
||||
log.debug("Queueing event:")
|
||||
log.debug(event)
|
||||
ret.append(event)
|
||||
log.debug('NAPALM beacon generated the events:')
|
||||
log.debug("NAPALM beacon generated the events:")
|
||||
log.debug(ret)
|
||||
return ret
|
||||
|
|
|
@ -1,39 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to monitor statistics from ethernet adapters
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import third party libs
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import salt.utils.psutil_compat as psutil
|
||||
|
||||
HAS_PSUTIL = True
|
||||
except ImportError:
|
||||
HAS_PSUTIL = False
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'network_info'
|
||||
__virtualname__ = "network_info"
|
||||
|
||||
__attrs = ['bytes_sent', 'bytes_recv', 'packets_sent',
|
||||
'packets_recv', 'errin', 'errout',
|
||||
'dropin', 'dropout']
|
||||
__attrs = [
|
||||
"bytes_sent",
|
||||
"bytes_recv",
|
||||
"packets_sent",
|
||||
"packets_recv",
|
||||
"errin",
|
||||
"errout",
|
||||
"dropin",
|
||||
"dropout",
|
||||
]
|
||||
|
||||
|
||||
def _to_list(obj):
|
||||
'''
|
||||
"""
|
||||
Convert snetinfo object to list
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
|
||||
for attr in __attrs:
|
||||
|
@ -44,42 +54,55 @@ def _to_list(obj):
|
|||
|
||||
def __virtual__():
|
||||
if not HAS_PSUTIL:
|
||||
return (False, 'cannot load network_info beacon: psutil not available')
|
||||
return (False, "cannot load network_info beacon: psutil not available")
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
|
||||
VALID_ITEMS = [
|
||||
'type', 'bytes_sent', 'bytes_recv', 'packets_sent',
|
||||
'packets_recv', 'errin', 'errout', 'dropin',
|
||||
'dropout'
|
||||
"type",
|
||||
"bytes_sent",
|
||||
"bytes_recv",
|
||||
"packets_sent",
|
||||
"packets_recv",
|
||||
"errin",
|
||||
"errout",
|
||||
"dropin",
|
||||
"dropout",
|
||||
]
|
||||
|
||||
# Configuration for load beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for network_info beacon must be a list.')
|
||||
return False, ("Configuration for network_info beacon must be a list.")
|
||||
else:
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for item in _config.get('interfaces', {}):
|
||||
if not isinstance(_config['interfaces'][item], dict):
|
||||
return False, ('Configuration for network_info beacon must '
|
||||
'be a list of dictionaries.')
|
||||
for item in _config.get("interfaces", {}):
|
||||
if not isinstance(_config["interfaces"][item], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for network_info beacon must "
|
||||
"be a list of dictionaries."
|
||||
),
|
||||
)
|
||||
else:
|
||||
if not any(j in VALID_ITEMS for j in _config['interfaces'][item]):
|
||||
return False, ('Invalid configuration item in '
|
||||
'Beacon configuration.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if not any(j in VALID_ITEMS for j in _config["interfaces"][item]):
|
||||
return (
|
||||
False,
|
||||
("Invalid configuration item in Beacon configuration."),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit the network statistics of this host.
|
||||
|
||||
Specify thresholds for each network stat
|
||||
|
@ -125,42 +148,47 @@ def beacon(config):
|
|||
dropout: 100
|
||||
|
||||
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
log.debug('psutil.net_io_counters %s', psutil.net_io_counters)
|
||||
log.debug("psutil.net_io_counters %s", psutil.net_io_counters)
|
||||
|
||||
_stats = psutil.net_io_counters(pernic=True)
|
||||
|
||||
log.debug('_stats %s', _stats)
|
||||
for interface in _config.get('interfaces', {}):
|
||||
log.debug("_stats %s", _stats)
|
||||
for interface in _config.get("interfaces", {}):
|
||||
if interface in _stats:
|
||||
interface_config = _config['interfaces'][interface]
|
||||
interface_config = _config["interfaces"][interface]
|
||||
_if_stats = _stats[interface]
|
||||
_diff = False
|
||||
for attr in __attrs:
|
||||
if attr in interface_config:
|
||||
if 'type' in interface_config and \
|
||||
interface_config['type'] == 'equal':
|
||||
if getattr(_if_stats, attr, None) == \
|
||||
int(interface_config[attr]):
|
||||
if (
|
||||
"type" in interface_config
|
||||
and interface_config["type"] == "equal"
|
||||
):
|
||||
if getattr(_if_stats, attr, None) == int(
|
||||
interface_config[attr]
|
||||
):
|
||||
_diff = True
|
||||
elif 'type' in interface_config and \
|
||||
interface_config['type'] == 'greater':
|
||||
if getattr(_if_stats, attr, None) > \
|
||||
int(interface_config[attr]):
|
||||
elif (
|
||||
"type" in interface_config
|
||||
and interface_config["type"] == "greater"
|
||||
):
|
||||
if getattr(_if_stats, attr, None) > int(interface_config[attr]):
|
||||
_diff = True
|
||||
else:
|
||||
log.debug('attr %s', getattr(_if_stats,
|
||||
attr, None))
|
||||
log.debug("attr %s", getattr(_if_stats, attr, None))
|
||||
else:
|
||||
if getattr(_if_stats, attr, None) == \
|
||||
int(interface_config[attr]):
|
||||
if getattr(_if_stats, attr, None) == int(
|
||||
interface_config[attr]
|
||||
):
|
||||
_diff = True
|
||||
if _diff:
|
||||
ret.append({'interface': interface,
|
||||
'network_info': _to_list(_if_stats)})
|
||||
ret.append(
|
||||
{"interface": interface, "network_info": _to_list(_if_stats)}
|
||||
)
|
||||
return ret
|
||||
|
|
|
@ -1,43 +1,69 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to monitor network adapter setting changes on Linux
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import ast
|
||||
import logging
|
||||
import re
|
||||
|
||||
import salt.loader
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
from pyroute2 import IPDB
|
||||
|
||||
IP = IPDB()
|
||||
HAS_PYROUTE2 = True
|
||||
except ImportError:
|
||||
IP = None
|
||||
HAS_PYROUTE2 = False
|
||||
|
||||
import ast
|
||||
import re
|
||||
import salt.loader
|
||||
import logging
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtual_name__ = 'network_settings'
|
||||
__virtual_name__ = "network_settings"
|
||||
|
||||
ATTRS = ['family', 'txqlen', 'ipdb_scope', 'index', 'operstate', 'group',
|
||||
'carrier_changes', 'ipaddr', 'neighbours', 'ifname', 'promiscuity',
|
||||
'linkmode', 'broadcast', 'address', 'num_tx_queues', 'ipdb_priority',
|
||||
'kind', 'qdisc', 'mtu', 'num_rx_queues', 'carrier', 'flags',
|
||||
'ifi_type', 'ports']
|
||||
ATTRS = [
|
||||
"family",
|
||||
"txqlen",
|
||||
"ipdb_scope",
|
||||
"index",
|
||||
"operstate",
|
||||
"group",
|
||||
"carrier_changes",
|
||||
"ipaddr",
|
||||
"neighbours",
|
||||
"ifname",
|
||||
"promiscuity",
|
||||
"linkmode",
|
||||
"broadcast",
|
||||
"address",
|
||||
"num_tx_queues",
|
||||
"ipdb_priority",
|
||||
"kind",
|
||||
"qdisc",
|
||||
"mtu",
|
||||
"num_rx_queues",
|
||||
"carrier",
|
||||
"flags",
|
||||
"ifi_type",
|
||||
"ports",
|
||||
]
|
||||
|
||||
LAST_STATS = {}
|
||||
|
||||
|
||||
class Hashabledict(dict):
|
||||
'''
|
||||
"""
|
||||
Helper class that implements a hash function for a dictionary
|
||||
'''
|
||||
"""
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(sorted(self.items())))
|
||||
|
||||
|
@ -49,35 +75,44 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for network_settings '
|
||||
'beacon must be a list.')
|
||||
return False, ("Configuration for network_settings beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
interfaces = _config.get('interfaces', {})
|
||||
interfaces = _config.get("interfaces", {})
|
||||
if isinstance(interfaces, list):
|
||||
#Old syntax
|
||||
return False, ('interfaces section for network_settings beacon'
|
||||
' must be a dictionary.')
|
||||
# Old syntax
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"interfaces section for network_settings beacon"
|
||||
" must be a dictionary."
|
||||
),
|
||||
)
|
||||
|
||||
for item in interfaces:
|
||||
if not isinstance(_config['interfaces'][item], dict):
|
||||
return False, ('Interface attributes for network_settings beacon'
|
||||
' must be a dictionary.')
|
||||
if not all(j in ATTRS for j in _config['interfaces'][item]):
|
||||
return False, ('Invalid attributes in beacon configuration.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if not isinstance(_config["interfaces"][item], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Interface attributes for network_settings beacon"
|
||||
" must be a dictionary."
|
||||
),
|
||||
)
|
||||
if not all(j in ATTRS for j in _config["interfaces"][item]):
|
||||
return False, ("Invalid attributes in beacon configuration.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def _copy_interfaces_info(interfaces):
|
||||
'''
|
||||
"""
|
||||
Return a dictionary with a copy of each interface attributes in ATTRS
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
|
||||
for interface in interfaces:
|
||||
|
@ -93,7 +128,7 @@ def _copy_interfaces_info(interfaces):
|
|||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Watch for changes on network settings
|
||||
|
||||
By default, the beacon will emit when there is a value change on one of the
|
||||
|
@ -134,13 +169,13 @@ def beacon(config):
|
|||
ipaddr:
|
||||
promiscuity:
|
||||
|
||||
'''
|
||||
"""
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
ret = []
|
||||
interfaces = []
|
||||
expanded_config = {'interfaces': {}}
|
||||
expanded_config = {"interfaces": {}}
|
||||
|
||||
global LAST_STATS
|
||||
|
||||
|
@ -151,14 +186,14 @@ def beacon(config):
|
|||
if not LAST_STATS:
|
||||
LAST_STATS = _stats
|
||||
|
||||
if 'coalesce' in _config and _config['coalesce']:
|
||||
if "coalesce" in _config and _config["coalesce"]:
|
||||
coalesce = True
|
||||
changes = {}
|
||||
|
||||
log.debug('_stats %s', _stats)
|
||||
log.debug("_stats %s", _stats)
|
||||
# Get list of interfaces included in config that are registered in the
|
||||
# system, including interfaces defined by wildcards (eth*, wlan*)
|
||||
for interface_config in _config.get('interfaces', {}):
|
||||
for interface_config in _config.get("interfaces", {}):
|
||||
if interface_config in _stats:
|
||||
interfaces.append(interface_config)
|
||||
else:
|
||||
|
@ -167,22 +202,24 @@ def beacon(config):
|
|||
match = re.search(interface_config, interface_stat)
|
||||
if match:
|
||||
interfaces.append(interface_stat)
|
||||
expanded_config['interfaces'][interface_stat] = _config['interfaces'][interface_config]
|
||||
expanded_config["interfaces"][interface_stat] = _config[
|
||||
"interfaces"
|
||||
][interface_config]
|
||||
|
||||
if expanded_config:
|
||||
_config['interfaces'].update(expanded_config['interfaces'])
|
||||
_config["interfaces"].update(expanded_config["interfaces"])
|
||||
|
||||
# config updated so update _config
|
||||
list(map(_config.update, config))
|
||||
|
||||
log.debug('interfaces %s', interfaces)
|
||||
log.debug("interfaces %s", interfaces)
|
||||
for interface in interfaces:
|
||||
_send_event = False
|
||||
_diff_stats = _stats[interface] - LAST_STATS[interface]
|
||||
_ret_diff = {}
|
||||
interface_config = _config['interfaces'][interface]
|
||||
interface_config = _config["interfaces"][interface]
|
||||
|
||||
log.debug('_diff_stats %s', _diff_stats)
|
||||
log.debug("_diff_stats %s", _diff_stats)
|
||||
if _diff_stats:
|
||||
_diff_stats_dict = {}
|
||||
LAST_STATS[interface] = _stats[interface]
|
||||
|
@ -192,9 +229,8 @@ def beacon(config):
|
|||
for attr in interface_config:
|
||||
if attr in _diff_stats_dict:
|
||||
config_value = None
|
||||
if interface_config[attr] and \
|
||||
'onvalue' in interface_config[attr]:
|
||||
config_value = interface_config[attr]['onvalue']
|
||||
if interface_config[attr] and "onvalue" in interface_config[attr]:
|
||||
config_value = interface_config[attr]["onvalue"]
|
||||
new_value = ast.literal_eval(_diff_stats_dict[attr])
|
||||
if not config_value or config_value == new_value:
|
||||
_send_event = True
|
||||
|
@ -204,13 +240,13 @@ def beacon(config):
|
|||
if coalesce:
|
||||
changes[interface] = _ret_diff
|
||||
else:
|
||||
ret.append({'tag': interface,
|
||||
'interface': interface,
|
||||
'change': _ret_diff})
|
||||
ret.append(
|
||||
{"tag": interface, "interface": interface, "change": _ret_diff}
|
||||
)
|
||||
|
||||
if coalesce and changes:
|
||||
grains_info = salt.loader.grains(__opts__, True)
|
||||
__grains__.update(grains_info)
|
||||
ret.append({'tag': 'result', 'changes': changes})
|
||||
ret.append({"tag": "result", "changes": changes})
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,51 +1,51 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Watch for pkgs that have upgrades, then fire an event.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
__virtualname__ = 'pkg'
|
||||
__virtualname__ = "pkg"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Only load if strace is installed
|
||||
'''
|
||||
return __virtualname__ if 'pkg.upgrade_available' in __salt__ else False
|
||||
"""
|
||||
return __virtualname__ if "pkg.upgrade_available" in __salt__ else False
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for pkg beacon should be a list
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for pkg beacon must be a list.')
|
||||
return False, ("Configuration for pkg beacon must be a list.")
|
||||
|
||||
# Configuration for pkg beacon should contain pkgs
|
||||
pkgs_found = False
|
||||
pkgs_not_list = False
|
||||
for config_item in config:
|
||||
if 'pkgs' in config_item:
|
||||
if "pkgs" in config_item:
|
||||
pkgs_found = True
|
||||
if isinstance(config_item['pkgs'], list):
|
||||
if isinstance(config_item["pkgs"], list):
|
||||
pkgs_not_list = True
|
||||
|
||||
if not pkgs_found or not pkgs_not_list:
|
||||
return False, 'Configuration for pkg beacon requires list of pkgs.'
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, "Configuration for pkg beacon requires list of pkgs."
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Check if installed packages are the latest versions
|
||||
and fire an event for those that have upgrades.
|
||||
|
||||
|
@ -57,23 +57,21 @@ def beacon(config):
|
|||
- zsh
|
||||
- apache2
|
||||
- refresh: True
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
_refresh = False
|
||||
pkgs = []
|
||||
for config_item in config:
|
||||
if 'pkgs' in config_item:
|
||||
pkgs += config_item['pkgs']
|
||||
if 'refresh' in config and config['refresh']:
|
||||
if "pkgs" in config_item:
|
||||
pkgs += config_item["pkgs"]
|
||||
if "refresh" in config and config["refresh"]:
|
||||
_refresh = True
|
||||
|
||||
for pkg in pkgs:
|
||||
_installed = __salt__['pkg.version'](pkg)
|
||||
_latest = __salt__['pkg.latest_version'](pkg, refresh=_refresh)
|
||||
_installed = __salt__["pkg.version"](pkg)
|
||||
_latest = __salt__["pkg.latest_version"](pkg, refresh=_refresh)
|
||||
if _installed and _latest:
|
||||
_pkg = {'pkg': pkg,
|
||||
'version': _latest
|
||||
}
|
||||
_pkg = {"pkg": pkg, "version": _latest}
|
||||
ret.append(_pkg)
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Example beacon to use with salt-proxy
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -7,10 +7,11 @@ Example beacon to use with salt-proxy
|
|||
beacons:
|
||||
proxy_example:
|
||||
endpoint: beacon
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
|
@ -19,32 +20,32 @@ from salt.ext.six.moves import map
|
|||
|
||||
# Important: If used with salt-proxy
|
||||
# this is required for the beacon to load!!!
|
||||
__proxyenabled__ = ['*']
|
||||
__proxyenabled__ = ["*"]
|
||||
|
||||
__virtualname__ = 'proxy_example'
|
||||
__virtualname__ = "proxy_example"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Trivially let the beacon load for the test example.
|
||||
For a production beacon we should probably have some expression here.
|
||||
'''
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for proxy_example beacon must be a list.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, ("Configuration for proxy_example beacon must be a list.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Called several times each second
|
||||
https://docs.saltstack.com/en/latest/topics/beacons/#the-beacon-function
|
||||
|
||||
|
@ -53,7 +54,7 @@ def beacon(config):
|
|||
beacons:
|
||||
proxy_example:
|
||||
- endpoint: beacon
|
||||
'''
|
||||
"""
|
||||
# Important!!!
|
||||
# Although this toy example makes an HTTP call
|
||||
# to get beacon information
|
||||
|
@ -63,9 +64,6 @@ def beacon(config):
|
|||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
beacon_url = '{0}{1}'.format(__opts__['proxy']['url'],
|
||||
_config['endpoint'])
|
||||
ret = salt.utils.http.query(beacon_url,
|
||||
decode_type='json',
|
||||
decode=True)
|
||||
return [ret['dict']]
|
||||
beacon_url = "{0}{1}".format(__opts__["proxy"]["url"], _config["endpoint"])
|
||||
ret = salt.utils.http.query(beacon_url, decode_type="json", decode=True)
|
||||
return [ret["dict"]]
|
||||
|
|
|
@ -1,57 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Send events covering process status
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import third party libs
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import salt.utils.psutil_compat as psutil
|
||||
|
||||
HAS_PSUTIL = True
|
||||
except ImportError:
|
||||
HAS_PSUTIL = False
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
log = logging.getLogger(__name__) # pylint: disable=invalid-name
|
||||
|
||||
__virtualname__ = 'ps'
|
||||
__virtualname__ = "ps"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PSUTIL:
|
||||
return (False, 'cannot load ps beacon: psutil not available')
|
||||
return (False, "cannot load ps beacon: psutil not available")
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for ps beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for ps beacon must be a list.')
|
||||
return False, ("Configuration for ps beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'processes' not in _config:
|
||||
return False, ('Configuration for ps beacon requires processes.')
|
||||
if "processes" not in _config:
|
||||
return False, ("Configuration for ps beacon requires processes.")
|
||||
else:
|
||||
if not isinstance(_config['processes'], dict):
|
||||
return False, ('Processes for ps beacon must be a dictionary.')
|
||||
if not isinstance(_config["processes"], dict):
|
||||
return False, ("Processes for ps beacon must be a dictionary.")
|
||||
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Scan for processes and fire events
|
||||
|
||||
Example Config
|
||||
|
@ -66,7 +69,7 @@ def beacon(config):
|
|||
|
||||
The config above sets up beacons to check that
|
||||
processes are running or stopped.
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
procs = []
|
||||
for proc in psutil.process_iter():
|
||||
|
@ -77,15 +80,15 @@ def beacon(config):
|
|||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for process in _config.get('processes', {}):
|
||||
for process in _config.get("processes", {}):
|
||||
ret_dict = {}
|
||||
if _config['processes'][process] == 'running':
|
||||
if _config["processes"][process] == "running":
|
||||
if process in procs:
|
||||
ret_dict[process] = 'Running'
|
||||
ret_dict[process] = "Running"
|
||||
ret.append(ret_dict)
|
||||
elif _config['processes'][process] == 'stopped':
|
||||
elif _config["processes"][process] == "stopped":
|
||||
if process not in procs:
|
||||
ret_dict[process] = 'Stopped'
|
||||
ret_dict[process] = "Stopped"
|
||||
ret.append(ret_dict)
|
||||
else:
|
||||
if process not in procs:
|
||||
|
|
|
@ -1,33 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to manage and report the status of
|
||||
one or more salt proxy processes
|
||||
|
||||
.. versionadded:: 2015.8.3
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _run_proxy_processes(proxies):
|
||||
'''
|
||||
"""
|
||||
Iterate over a list of proxy
|
||||
names and restart any that
|
||||
aren't running
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
for proxy in proxies:
|
||||
result = {}
|
||||
if not __salt__['salt_proxy.is_running'](proxy)['result']:
|
||||
__salt__['salt_proxy.configure_proxy'](proxy, start=True)
|
||||
result[proxy] = 'Proxy {0} was started'.format(proxy)
|
||||
if not __salt__["salt_proxy.is_running"](proxy)["result"]:
|
||||
__salt__["salt_proxy.configure_proxy"](proxy, start=True)
|
||||
result[proxy] = "Proxy {0} was started".format(proxy)
|
||||
else:
|
||||
msg = 'Proxy {0} is already running'.format(proxy)
|
||||
msg = "Proxy {0} is already running".format(proxy)
|
||||
result[proxy] = msg
|
||||
log.debug(msg)
|
||||
ret.append(result)
|
||||
|
@ -35,30 +37,28 @@ def _run_proxy_processes(proxies):
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for adb beacon should be a dictionary with states array
|
||||
if not isinstance(config, list):
|
||||
log.info('Configuration for salt_proxy beacon must be a list.')
|
||||
return False, ('Configuration for salt_proxy beacon must be a list.')
|
||||
log.info("Configuration for salt_proxy beacon must be a list.")
|
||||
return False, ("Configuration for salt_proxy beacon must be a list.")
|
||||
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'proxies' not in _config:
|
||||
return False, ('Configuration for salt_proxy'
|
||||
' beacon requires proxies.')
|
||||
if "proxies" not in _config:
|
||||
return False, ("Configuration for salt_proxy beacon requires proxies.")
|
||||
else:
|
||||
if not isinstance(_config['proxies'], dict):
|
||||
return False, ('Proxies for salt_proxy '
|
||||
'beacon must be a dictionary.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if not isinstance(_config["proxies"], dict):
|
||||
return False, ("Proxies for salt_proxy beacon must be a dictionary.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Handle configured proxies
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -68,10 +68,10 @@ def beacon(config):
|
|||
- proxies:
|
||||
p8000: {}
|
||||
p8001: {}
|
||||
'''
|
||||
log.trace('salt proxy beacon called')
|
||||
"""
|
||||
log.trace("salt proxy beacon called")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
return _run_proxy_processes(_config['proxies'])
|
||||
return _run_proxy_processes(_config["proxies"])
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Monitor temperature, humidity and pressure using the SenseHat of a Raspberry Pi
|
||||
===============================================================================
|
||||
|
||||
|
@ -8,7 +8,7 @@ Monitor temperature, humidity and pressure using the SenseHat of a Raspberry Pi
|
|||
:maintainer: Benedikt Werner <1benediktwerner@gmail.com>
|
||||
:maturity: new
|
||||
:depends: sense_hat Python module
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
|
@ -22,30 +22,28 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def __virtual__():
|
||||
return 'sensehat.get_pressure' in __salt__
|
||||
return "sensehat.get_pressure" in __salt__
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for sensehat beacon should be a list
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for sensehat beacon '
|
||||
'must be a list.')
|
||||
return False, ("Configuration for sensehat beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'sensors' not in _config:
|
||||
return False, ('Configuration for sensehat'
|
||||
' beacon requires sensors.')
|
||||
if "sensors" not in _config:
|
||||
return False, ("Configuration for sensehat beacon requires sensors.")
|
||||
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Monitor the temperature, humidity and pressure using the SenseHat sensors.
|
||||
|
||||
You can either specify a threshold for each value and only emit a beacon
|
||||
|
@ -67,43 +65,36 @@ def beacon(config):
|
|||
temperature: [20, 40]
|
||||
temperature_from_pressure: 40
|
||||
pressure: 1500
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
min_default = {
|
||||
'humidity': '0',
|
||||
'pressure': '0',
|
||||
'temperature': '-273.15'
|
||||
}
|
||||
min_default = {"humidity": "0", "pressure": "0", "temperature": "-273.15"}
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for sensor in _config.get('sensors', {}):
|
||||
sensor_function = 'sensehat.get_{0}'.format(sensor)
|
||||
for sensor in _config.get("sensors", {}):
|
||||
sensor_function = "sensehat.get_{0}".format(sensor)
|
||||
if sensor_function not in __salt__:
|
||||
log.error('No sensor for meassuring %s. Skipping.', sensor)
|
||||
log.error("No sensor for meassuring %s. Skipping.", sensor)
|
||||
continue
|
||||
|
||||
sensor_config = _config['sensors'][sensor]
|
||||
sensor_config = _config["sensors"][sensor]
|
||||
if isinstance(sensor_config, list):
|
||||
sensor_min = six.text_type(sensor_config[0])
|
||||
sensor_max = six.text_type(sensor_config[1])
|
||||
else:
|
||||
sensor_min = min_default.get(sensor, '0')
|
||||
sensor_min = min_default.get(sensor, "0")
|
||||
sensor_max = six.text_type(sensor_config)
|
||||
|
||||
if '%' in sensor_min:
|
||||
sensor_min = re.sub('%', '', sensor_min)
|
||||
if '%' in sensor_max:
|
||||
sensor_max = re.sub('%', '', sensor_max)
|
||||
if "%" in sensor_min:
|
||||
sensor_min = re.sub("%", "", sensor_min)
|
||||
if "%" in sensor_max:
|
||||
sensor_max = re.sub("%", "", sensor_max)
|
||||
sensor_min = float(sensor_min)
|
||||
sensor_max = float(sensor_max)
|
||||
|
||||
current_value = __salt__[sensor_function]()
|
||||
if not sensor_min <= current_value <= sensor_max:
|
||||
ret.append({
|
||||
'tag': 'sensehat/{0}'.format(sensor),
|
||||
sensor: current_value
|
||||
})
|
||||
ret.append({"tag": "sensehat/{0}".format(sensor), sensor: current_value})
|
||||
|
||||
return ret
|
||||
|
|
|
@ -1,48 +1,53 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Send events covering service status
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
log = logging.getLogger(__name__) # pylint: disable=invalid-name
|
||||
|
||||
LAST_STATUS = {}
|
||||
|
||||
__virtualname__ = 'service'
|
||||
__virtualname__ = "service"
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for service beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for service beacon must be a list.')
|
||||
return False, ("Configuration for service beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'services' not in _config:
|
||||
return False, ('Configuration for service beacon'
|
||||
' requires services.')
|
||||
if "services" not in _config:
|
||||
return False, ("Configuration for service beacon requires services.")
|
||||
else:
|
||||
for config_item in _config['services']:
|
||||
if not isinstance(_config['services'][config_item], dict):
|
||||
return False, ('Configuration for service beacon must '
|
||||
'be a list of dictionaries.')
|
||||
for config_item in _config["services"]:
|
||||
if not isinstance(_config["services"][config_item], dict):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for service beacon must "
|
||||
"be a list of dictionaries."
|
||||
),
|
||||
)
|
||||
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Scan for the configured services and fire events
|
||||
|
||||
Example Config
|
||||
|
@ -101,19 +106,19 @@ def beacon(config):
|
|||
onchangeonly: True
|
||||
delay: 30
|
||||
uncleanshutdown: /run/nginx.pid
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
for service in _config.get('services', {}):
|
||||
for service in _config.get("services", {}):
|
||||
ret_dict = {}
|
||||
|
||||
service_config = _config['services'][service]
|
||||
service_config = _config["services"][service]
|
||||
|
||||
ret_dict[service] = {'running': __salt__['service.status'](service)}
|
||||
ret_dict['service_name'] = service
|
||||
ret_dict['tag'] = service
|
||||
ret_dict[service] = {"running": __salt__["service.status"](service)}
|
||||
ret_dict["service_name"] = service
|
||||
ret_dict["tag"] = service
|
||||
currtime = time.time()
|
||||
|
||||
# If no options is given to the service, we fall back to the defaults
|
||||
|
@ -121,42 +126,44 @@ def beacon(config):
|
|||
# key:values are then added to the service dictionary.
|
||||
if not service_config:
|
||||
service_config = {}
|
||||
if 'oncleanshutdown' not in service_config:
|
||||
service_config['oncleanshutdown'] = False
|
||||
if 'emitatstartup' not in service_config:
|
||||
service_config['emitatstartup'] = True
|
||||
if 'onchangeonly' not in service_config:
|
||||
service_config['onchangeonly'] = False
|
||||
if 'delay' not in service_config:
|
||||
service_config['delay'] = 0
|
||||
if "oncleanshutdown" not in service_config:
|
||||
service_config["oncleanshutdown"] = False
|
||||
if "emitatstartup" not in service_config:
|
||||
service_config["emitatstartup"] = True
|
||||
if "onchangeonly" not in service_config:
|
||||
service_config["onchangeonly"] = False
|
||||
if "delay" not in service_config:
|
||||
service_config["delay"] = 0
|
||||
|
||||
# We only want to report the nature of the shutdown
|
||||
# if the current running status is False
|
||||
# as well as if the config for the beacon asks for it
|
||||
if 'uncleanshutdown' in service_config and not ret_dict[service]['running']:
|
||||
filename = service_config['uncleanshutdown']
|
||||
ret_dict[service]['uncleanshutdown'] = True if os.path.exists(filename) else False
|
||||
if 'onchangeonly' in service_config and service_config['onchangeonly'] is True:
|
||||
if "uncleanshutdown" in service_config and not ret_dict[service]["running"]:
|
||||
filename = service_config["uncleanshutdown"]
|
||||
ret_dict[service]["uncleanshutdown"] = (
|
||||
True if os.path.exists(filename) else False
|
||||
)
|
||||
if "onchangeonly" in service_config and service_config["onchangeonly"] is True:
|
||||
if service not in LAST_STATUS:
|
||||
LAST_STATUS[service] = ret_dict[service]
|
||||
if service_config['delay'] > 0:
|
||||
LAST_STATUS[service]['time'] = currtime
|
||||
elif not service_config['emitatstartup']:
|
||||
if service_config["delay"] > 0:
|
||||
LAST_STATUS[service]["time"] = currtime
|
||||
elif not service_config["emitatstartup"]:
|
||||
continue
|
||||
else:
|
||||
ret.append(ret_dict)
|
||||
|
||||
if LAST_STATUS[service]['running'] != ret_dict[service]['running']:
|
||||
if LAST_STATUS[service]["running"] != ret_dict[service]["running"]:
|
||||
LAST_STATUS[service] = ret_dict[service]
|
||||
if service_config['delay'] > 0:
|
||||
LAST_STATUS[service]['time'] = currtime
|
||||
if service_config["delay"] > 0:
|
||||
LAST_STATUS[service]["time"] = currtime
|
||||
else:
|
||||
ret.append(ret_dict)
|
||||
|
||||
if 'time' in LAST_STATUS[service]:
|
||||
elapsedtime = int(round(currtime - LAST_STATUS[service]['time']))
|
||||
if elapsedtime > service_config['delay']:
|
||||
del LAST_STATUS[service]['time']
|
||||
if "time" in LAST_STATUS[service]:
|
||||
elapsedtime = int(round(currtime - LAST_STATUS[service]["time"]))
|
||||
if elapsedtime > service_config["delay"]:
|
||||
del LAST_STATUS[service]["time"]
|
||||
ret.append(ret_dict)
|
||||
else:
|
||||
ret.append(ret_dict)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Watch the shell commands being executed actively. This beacon requires strace.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
@ -14,97 +14,97 @@ import salt.utils.path
|
|||
import salt.utils.stringutils
|
||||
import salt.utils.vt
|
||||
|
||||
__virtualname__ = 'sh'
|
||||
__virtualname__ = "sh"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Only load if strace is installed
|
||||
'''
|
||||
return __virtualname__ if salt.utils.path.which('strace') else False
|
||||
"""
|
||||
return __virtualname__ if salt.utils.path.which("strace") else False
|
||||
|
||||
|
||||
def _get_shells():
|
||||
'''
|
||||
"""
|
||||
Return the valid shells on this system
|
||||
'''
|
||||
"""
|
||||
start = time.time()
|
||||
if 'sh.last_shells' in __context__:
|
||||
if start - __context__['sh.last_shells'] > 5:
|
||||
__context__['sh.last_shells'] = start
|
||||
if "sh.last_shells" in __context__:
|
||||
if start - __context__["sh.last_shells"] > 5:
|
||||
__context__["sh.last_shells"] = start
|
||||
else:
|
||||
__context__['sh.shells'] = __salt__['cmd.shells']()
|
||||
__context__["sh.shells"] = __salt__["cmd.shells"]()
|
||||
else:
|
||||
__context__['sh.last_shells'] = start
|
||||
__context__['sh.shells'] = __salt__['cmd.shells']()
|
||||
return __context__['sh.shells']
|
||||
__context__["sh.last_shells"] = start
|
||||
__context__["sh.shells"] = __salt__["cmd.shells"]()
|
||||
return __context__["sh.shells"]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for sh beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for sh beacon must be a list.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, ("Configuration for sh beacon must be a list.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Scan the shell execve routines. This beacon will convert all login shells
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
beacons:
|
||||
sh: []
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
pkey = 'sh.vt'
|
||||
pkey = "sh.vt"
|
||||
shells = _get_shells()
|
||||
ps_out = __salt__['status.procs']()
|
||||
ps_out = __salt__["status.procs"]()
|
||||
track_pids = []
|
||||
for pid in ps_out:
|
||||
if any(ps_out[pid].get('cmd', '').lstrip('-') in shell for shell in shells):
|
||||
if any(ps_out[pid].get("cmd", "").lstrip("-") in shell for shell in shells):
|
||||
track_pids.append(pid)
|
||||
if pkey not in __context__:
|
||||
__context__[pkey] = {}
|
||||
for pid in track_pids:
|
||||
if pid not in __context__[pkey]:
|
||||
cmd = ['strace', '-f', '-e', 'execve', '-p', '{0}'.format(pid)]
|
||||
cmd = ["strace", "-f", "-e", "execve", "-p", "{0}".format(pid)]
|
||||
__context__[pkey][pid] = {}
|
||||
__context__[pkey][pid]['vt'] = salt.utils.vt.Terminal(
|
||||
cmd,
|
||||
log_stdout=True,
|
||||
log_stderr=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
__context__[pkey][pid]['user'] = ps_out[pid].get('user')
|
||||
__context__[pkey][pid]["vt"] = salt.utils.vt.Terminal(
|
||||
cmd,
|
||||
log_stdout=True,
|
||||
log_stderr=True,
|
||||
stream_stdout=False,
|
||||
stream_stderr=False,
|
||||
)
|
||||
__context__[pkey][pid]["user"] = ps_out[pid].get("user")
|
||||
for pid in list(__context__[pkey]):
|
||||
out = ''
|
||||
err = ''
|
||||
while __context__[pkey][pid]['vt'].has_unread_data:
|
||||
tout, terr = __context__[pkey][pid]['vt'].recv()
|
||||
out = ""
|
||||
err = ""
|
||||
while __context__[pkey][pid]["vt"].has_unread_data:
|
||||
tout, terr = __context__[pkey][pid]["vt"].recv()
|
||||
if not terr:
|
||||
break
|
||||
out += salt.utils.stringutils.to_unicode(tout or '')
|
||||
out += salt.utils.stringutils.to_unicode(tout or "")
|
||||
err += terr
|
||||
for line in err.split('\n'):
|
||||
event = {'args': [],
|
||||
'tag': pid}
|
||||
if 'execve' in line:
|
||||
comps = line.split('execve')[1].split('"')
|
||||
for line in err.split("\n"):
|
||||
event = {"args": [], "tag": pid}
|
||||
if "execve" in line:
|
||||
comps = line.split("execve")[1].split('"')
|
||||
for ind, field in enumerate(comps):
|
||||
if ind == 1:
|
||||
event['cmd'] = field
|
||||
event["cmd"] = field
|
||||
continue
|
||||
if ind % 2 != 0:
|
||||
event['args'].append(field)
|
||||
event['user'] = __context__[pkey][pid]['user']
|
||||
event["args"].append(field)
|
||||
event["user"] = __context__[pkey][pid]["user"]
|
||||
ret.append(event)
|
||||
if not __context__[pkey][pid]['vt'].isalive():
|
||||
__context__[pkey][pid]['vt'].close()
|
||||
if not __context__[pkey][pid]["vt"].isalive():
|
||||
__context__[pkey][pid]["vt"].close()
|
||||
__context__[pkey].pop(pid)
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon that fires events on image import/delete.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -17,79 +17,81 @@ Beacon that fires events on image import/delete.
|
|||
imgadm:
|
||||
- interval: 60
|
||||
- startup_import_event: True
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
__virtualname__ = 'imgadm'
|
||||
__virtualname__ = "imgadm"
|
||||
IMGADM_STATE = {
|
||||
'first_run': True,
|
||||
'images': [],
|
||||
"first_run": True,
|
||||
"images": [],
|
||||
}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Provides imgadm beacon on SmartOS
|
||||
'''
|
||||
if 'imgadm.list' in __salt__:
|
||||
"""
|
||||
if "imgadm.list" in __salt__:
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
False,
|
||||
'{0} beacon can only be loaded on SmartOS compute nodes'.format(
|
||||
"{0} beacon can only be loaded on SmartOS compute nodes".format(
|
||||
__virtualname__
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
vcfg_ret = True
|
||||
vcfg_msg = 'Valid beacon configuration'
|
||||
vcfg_msg = "Valid beacon configuration"
|
||||
|
||||
if not isinstance(config, list):
|
||||
vcfg_ret = False
|
||||
vcfg_msg = 'Configuration for imgadm beacon must be a list!'
|
||||
vcfg_msg = "Configuration for imgadm beacon must be a list!"
|
||||
|
||||
return vcfg_ret, vcfg_msg
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Poll imgadm and compare available images
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
# NOTE: lookup current images
|
||||
current_images = __salt__['imgadm.list'](verbose=True)
|
||||
current_images = __salt__["imgadm.list"](verbose=True)
|
||||
|
||||
# NOTE: apply configuration
|
||||
if IMGADM_STATE['first_run']:
|
||||
log.info('Applying configuration for imgadm beacon')
|
||||
if IMGADM_STATE["first_run"]:
|
||||
log.info("Applying configuration for imgadm beacon")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'startup_import_event' not in _config or not _config['startup_import_event']:
|
||||
IMGADM_STATE['images'] = current_images
|
||||
if "startup_import_event" not in _config or not _config["startup_import_event"]:
|
||||
IMGADM_STATE["images"] = current_images
|
||||
|
||||
# NOTE: import events
|
||||
for uuid in current_images:
|
||||
event = {}
|
||||
if uuid not in IMGADM_STATE['images']:
|
||||
event['tag'] = "imported/{}".format(uuid)
|
||||
if uuid not in IMGADM_STATE["images"]:
|
||||
event["tag"] = "imported/{}".format(uuid)
|
||||
for label in current_images[uuid]:
|
||||
event[label] = current_images[uuid][label]
|
||||
|
||||
|
@ -97,23 +99,24 @@ def beacon(config):
|
|||
ret.append(event)
|
||||
|
||||
# NOTE: delete events
|
||||
for uuid in IMGADM_STATE['images']:
|
||||
for uuid in IMGADM_STATE["images"]:
|
||||
event = {}
|
||||
if uuid not in current_images:
|
||||
event['tag'] = "deleted/{}".format(uuid)
|
||||
for label in IMGADM_STATE['images'][uuid]:
|
||||
event[label] = IMGADM_STATE['images'][uuid][label]
|
||||
event["tag"] = "deleted/{}".format(uuid)
|
||||
for label in IMGADM_STATE["images"][uuid]:
|
||||
event[label] = IMGADM_STATE["images"][uuid][label]
|
||||
|
||||
if event:
|
||||
ret.append(event)
|
||||
|
||||
# NOTE: update stored state
|
||||
IMGADM_STATE['images'] = current_images
|
||||
IMGADM_STATE["images"] = current_images
|
||||
|
||||
# NOTE: disable first_run
|
||||
if IMGADM_STATE['first_run']:
|
||||
IMGADM_STATE['first_run'] = False
|
||||
if IMGADM_STATE["first_run"]:
|
||||
IMGADM_STATE["first_run"] = False
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon that fires events on vm state changes
|
||||
|
||||
.. code-block:: yaml
|
||||
|
@ -17,84 +17,85 @@ Beacon that fires events on vm state changes
|
|||
vmadm:
|
||||
- interval: 60
|
||||
- startup_create_event: True
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
__virtualname__ = 'vmadm'
|
||||
__virtualname__ = "vmadm"
|
||||
VMADM_STATE = {
|
||||
'first_run': True,
|
||||
'vms': [],
|
||||
"first_run": True,
|
||||
"vms": [],
|
||||
}
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Provides vmadm beacon on SmartOS
|
||||
'''
|
||||
if 'vmadm.list' in __salt__:
|
||||
"""
|
||||
if "vmadm.list" in __salt__:
|
||||
return True
|
||||
else:
|
||||
return (
|
||||
False,
|
||||
'{0} beacon can only be loaded on SmartOS compute nodes'.format(
|
||||
"{0} beacon can only be loaded on SmartOS compute nodes".format(
|
||||
__virtualname__
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
vcfg_ret = True
|
||||
vcfg_msg = 'Valid beacon configuration'
|
||||
vcfg_msg = "Valid beacon configuration"
|
||||
|
||||
if not isinstance(config, list):
|
||||
vcfg_ret = False
|
||||
vcfg_msg = 'Configuration for vmadm beacon must be a list!'
|
||||
vcfg_msg = "Configuration for vmadm beacon must be a list!"
|
||||
|
||||
return vcfg_ret, vcfg_msg
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Poll vmadm for changes
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
# NOTE: lookup current images
|
||||
current_vms = __salt__['vmadm.list'](
|
||||
keyed=True,
|
||||
order='uuid,state,alias,hostname,dns_domain',
|
||||
current_vms = __salt__["vmadm.list"](
|
||||
keyed=True, order="uuid,state,alias,hostname,dns_domain",
|
||||
)
|
||||
|
||||
# NOTE: apply configuration
|
||||
if VMADM_STATE['first_run']:
|
||||
log.info('Applying configuration for vmadm beacon')
|
||||
if VMADM_STATE["first_run"]:
|
||||
log.info("Applying configuration for vmadm beacon")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'startup_create_event' not in _config or not _config['startup_create_event']:
|
||||
VMADM_STATE['vms'] = current_vms
|
||||
if "startup_create_event" not in _config or not _config["startup_create_event"]:
|
||||
VMADM_STATE["vms"] = current_vms
|
||||
|
||||
# NOTE: create events
|
||||
for uuid in current_vms:
|
||||
event = {}
|
||||
if uuid not in VMADM_STATE['vms']:
|
||||
event['tag'] = "created/{}".format(uuid)
|
||||
if uuid not in VMADM_STATE["vms"]:
|
||||
event["tag"] = "created/{}".format(uuid)
|
||||
for label in current_vms[uuid]:
|
||||
if label == 'state':
|
||||
if label == "state":
|
||||
continue
|
||||
event[label] = current_vms[uuid][label]
|
||||
|
||||
|
@ -102,14 +103,14 @@ def beacon(config):
|
|||
ret.append(event)
|
||||
|
||||
# NOTE: deleted events
|
||||
for uuid in VMADM_STATE['vms']:
|
||||
for uuid in VMADM_STATE["vms"]:
|
||||
event = {}
|
||||
if uuid not in current_vms:
|
||||
event['tag'] = "deleted/{}".format(uuid)
|
||||
for label in VMADM_STATE['vms'][uuid]:
|
||||
if label == 'state':
|
||||
event["tag"] = "deleted/{}".format(uuid)
|
||||
for label in VMADM_STATE["vms"][uuid]:
|
||||
if label == "state":
|
||||
continue
|
||||
event[label] = VMADM_STATE['vms'][uuid][label]
|
||||
event[label] = VMADM_STATE["vms"][uuid][label]
|
||||
|
||||
if event:
|
||||
ret.append(event)
|
||||
|
@ -117,12 +118,17 @@ def beacon(config):
|
|||
# NOTE: state change events
|
||||
for uuid in current_vms:
|
||||
event = {}
|
||||
if VMADM_STATE['first_run'] or \
|
||||
uuid not in VMADM_STATE['vms'] or \
|
||||
current_vms[uuid].get('state', 'unknown') != VMADM_STATE['vms'][uuid].get('state', 'unknown'):
|
||||
event['tag'] = "{}/{}".format(current_vms[uuid].get('state', 'unknown'), uuid)
|
||||
if (
|
||||
VMADM_STATE["first_run"]
|
||||
or uuid not in VMADM_STATE["vms"]
|
||||
or current_vms[uuid].get("state", "unknown")
|
||||
!= VMADM_STATE["vms"][uuid].get("state", "unknown")
|
||||
):
|
||||
event["tag"] = "{}/{}".format(
|
||||
current_vms[uuid].get("state", "unknown"), uuid
|
||||
)
|
||||
for label in current_vms[uuid]:
|
||||
if label == 'state':
|
||||
if label == "state":
|
||||
continue
|
||||
event[label] = current_vms[uuid][label]
|
||||
|
||||
|
@ -130,12 +136,13 @@ def beacon(config):
|
|||
ret.append(event)
|
||||
|
||||
# NOTE: update stored state
|
||||
VMADM_STATE['vms'] = current_vms
|
||||
VMADM_STATE["vms"] = current_vms
|
||||
|
||||
# NOTE: disable first_run
|
||||
if VMADM_STATE['first_run']:
|
||||
VMADM_STATE['first_run'] = False
|
||||
if VMADM_STATE["first_run"]:
|
||||
VMADM_STATE["first_run"] = False
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The status beacon is intended to send a basic health check event up to the
|
||||
master, this allows for event driven routines based on presence to be set up.
|
||||
|
||||
|
@ -87,12 +87,14 @@ markers for specific list items:
|
|||
Not all status functions are supported for every operating system. Be certain
|
||||
to check the minion log for errors after configuring this beacon.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import salt.exceptions
|
||||
|
||||
# Import salt libs
|
||||
|
@ -100,16 +102,16 @@ import salt.utils.platform
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'status'
|
||||
__virtualname__ = "status"
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the the config is a dict
|
||||
'''
|
||||
"""
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for status beacon must be a list.')
|
||||
return True, 'Valid beacon configuration'
|
||||
return False, ("Configuration for status beacon must be a list.")
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -117,20 +119,22 @@ def __virtual__():
|
|||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Return status for requested information
|
||||
'''
|
||||
"""
|
||||
log.debug(config)
|
||||
ctime = datetime.datetime.utcnow().isoformat()
|
||||
|
||||
if len(config) < 1:
|
||||
config = [{
|
||||
'loadavg': ['all'],
|
||||
'cpustats': ['all'],
|
||||
'meminfo': ['all'],
|
||||
'vmstats': ['all'],
|
||||
'time': ['all'],
|
||||
}]
|
||||
config = [
|
||||
{
|
||||
"loadavg": ["all"],
|
||||
"cpustats": ["all"],
|
||||
"meminfo": ["all"],
|
||||
"vmstats": ["all"],
|
||||
"time": ["all"],
|
||||
}
|
||||
]
|
||||
|
||||
if not isinstance(config, list):
|
||||
# To support the old dictionary config format
|
||||
|
@ -141,17 +145,21 @@ def beacon(config):
|
|||
for func in entry:
|
||||
ret[func] = {}
|
||||
try:
|
||||
data = __salt__['status.{0}'.format(func)]()
|
||||
data = __salt__["status.{0}".format(func)]()
|
||||
except salt.exceptions.CommandExecutionError as exc:
|
||||
log.debug('Status beacon attempted to process function %s '
|
||||
'but encountered error: %s', func, exc)
|
||||
log.debug(
|
||||
"Status beacon attempted to process function %s "
|
||||
"but encountered error: %s",
|
||||
func,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
if not isinstance(entry[func], list):
|
||||
func_items = [entry[func]]
|
||||
else:
|
||||
func_items = entry[func]
|
||||
for item in func_items:
|
||||
if item == 'all':
|
||||
if item == "all":
|
||||
ret[func] = data
|
||||
else:
|
||||
try:
|
||||
|
@ -160,9 +168,8 @@ def beacon(config):
|
|||
except TypeError:
|
||||
ret[func][item] = data[int(item)]
|
||||
except KeyError as exc:
|
||||
ret[func] = 'Status beacon is incorrectly configured: {0}'.format(exc)
|
||||
ret[
|
||||
func
|
||||
] = "Status beacon is incorrectly configured: {0}".format(exc)
|
||||
|
||||
return [{
|
||||
'tag': ctime,
|
||||
'data': ret,
|
||||
}]
|
||||
return [{"tag": ctime, "data": ret}]
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to emit Telegram messages
|
||||
|
||||
Requires the python-telegram-bot library
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import 3rd Party libs
|
||||
try:
|
||||
import telegram
|
||||
logging.getLogger('telegram').setLevel(logging.CRITICAL)
|
||||
|
||||
logging.getLogger("telegram").setLevel(logging.CRITICAL)
|
||||
HAS_TELEGRAM = True
|
||||
except ImportError:
|
||||
HAS_TELEGRAM = False
|
||||
|
@ -22,7 +25,7 @@ except ImportError:
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__virtualname__ = 'telegram_bot_msg'
|
||||
__virtualname__ = "telegram_bot_msg"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -33,30 +36,37 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for telegram_bot_msg '
|
||||
'beacon must be a list.')
|
||||
return False, ("Configuration for telegram_bot_msg beacon must be a list.")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if not all(_config.get(required_config)
|
||||
for required_config in ['token', 'accept_from']):
|
||||
return False, ('Not all required configuration for '
|
||||
'telegram_bot_msg are set.')
|
||||
if not all(
|
||||
_config.get(required_config) for required_config in ["token", "accept_from"]
|
||||
):
|
||||
return (
|
||||
False,
|
||||
("Not all required configuration for telegram_bot_msg are set."),
|
||||
)
|
||||
|
||||
if not isinstance(_config.get('accept_from'), list):
|
||||
return False, ('Configuration for telegram_bot_msg, '
|
||||
'accept_from must be a list of usernames.')
|
||||
if not isinstance(_config.get("accept_from"), list):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for telegram_bot_msg, "
|
||||
"accept_from must be a list of usernames."
|
||||
),
|
||||
)
|
||||
|
||||
return True, 'Valid beacon configuration.'
|
||||
return True, "Valid beacon configuration."
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit a dict with a key "msgs" whose value is a list of messages
|
||||
sent to the configured bot by one of the allowed usernames.
|
||||
|
||||
|
@ -69,22 +79,22 @@ def beacon(config):
|
|||
- "<valid username>"
|
||||
- interval: 10
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
log.debug('telegram_bot_msg beacon starting')
|
||||
log.debug("telegram_bot_msg beacon starting")
|
||||
ret = []
|
||||
output = {}
|
||||
output['msgs'] = []
|
||||
output["msgs"] = []
|
||||
|
||||
bot = telegram.Bot(_config['token'])
|
||||
bot = telegram.Bot(_config["token"])
|
||||
updates = bot.get_updates(limit=100, timeout=0, network_delay=10)
|
||||
|
||||
log.debug('Num updates: %d', len(updates))
|
||||
log.debug("Num updates: %d", len(updates))
|
||||
if not updates:
|
||||
log.debug('Telegram Bot beacon has no new messages')
|
||||
log.debug("Telegram Bot beacon has no new messages")
|
||||
return ret
|
||||
|
||||
latest_update_id = 0
|
||||
|
@ -94,13 +104,13 @@ def beacon(config):
|
|||
if update.update_id > latest_update_id:
|
||||
latest_update_id = update.update_id
|
||||
|
||||
if message.chat.username in _config['accept_from']:
|
||||
output['msgs'].append(message.to_dict())
|
||||
if message.chat.username in _config["accept_from"]:
|
||||
output["msgs"].append(message.to_dict())
|
||||
|
||||
# mark in the server that previous messages are processed
|
||||
bot.get_updates(offset=latest_update_id + 1)
|
||||
|
||||
log.debug('Emitting %d messages.', len(output['msgs']))
|
||||
if output['msgs']:
|
||||
log.debug("Emitting %d messages.", len(output["msgs"]))
|
||||
if output["msgs"]:
|
||||
ret.append(output)
|
||||
return ret
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to emit Twilio text messages
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
@ -14,9 +14,10 @@ from salt.ext.six.moves import map
|
|||
# Import 3rd Party libs
|
||||
try:
|
||||
import twilio
|
||||
|
||||
# Grab version, ensure elements are ints
|
||||
twilio_version = tuple([int(x) for x in twilio.__version_info__])
|
||||
if twilio_version > (5, ):
|
||||
if twilio_version > (5,):
|
||||
from twilio.rest import Client as TwilioRestClient
|
||||
else:
|
||||
from twilio.rest import TwilioRestClient
|
||||
|
@ -26,7 +27,7 @@ except ImportError:
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'twilio_txt_msg'
|
||||
__virtualname__ = "twilio_txt_msg"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -37,28 +38,32 @@ def __virtual__():
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
# Configuration for twilio_txt_msg beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
return False, ('Configuration for twilio_txt_msg beacon '
|
||||
'must be a list.')
|
||||
return False, ("Configuration for twilio_txt_msg beacon must be a list.")
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if not all(x in _config for x in ('account_sid',
|
||||
'auth_token',
|
||||
'twilio_number')):
|
||||
return False, ('Configuration for twilio_txt_msg beacon '
|
||||
'must contain account_sid, auth_token '
|
||||
'and twilio_number items.')
|
||||
return True, 'Valid beacon configuration'
|
||||
if not all(
|
||||
x in _config for x in ("account_sid", "auth_token", "twilio_number")
|
||||
):
|
||||
return (
|
||||
False,
|
||||
(
|
||||
"Configuration for twilio_txt_msg beacon "
|
||||
"must contain account_sid, auth_token "
|
||||
"and twilio_number items."
|
||||
),
|
||||
)
|
||||
return True, "Valid beacon configuration"
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Emit a dict name "texts" whose value is a list
|
||||
of texts.
|
||||
|
||||
|
@ -71,40 +76,40 @@ def beacon(config):
|
|||
- twilio_number: "+15555555555"
|
||||
- interval: 10
|
||||
|
||||
'''
|
||||
log.trace('twilio_txt_msg beacon starting')
|
||||
"""
|
||||
log.trace("twilio_txt_msg beacon starting")
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
ret = []
|
||||
if not all([_config['account_sid'],
|
||||
_config['auth_token'],
|
||||
_config['twilio_number']]):
|
||||
if not all(
|
||||
[_config["account_sid"], _config["auth_token"], _config["twilio_number"]]
|
||||
):
|
||||
return ret
|
||||
output = {}
|
||||
output['texts'] = []
|
||||
client = TwilioRestClient(_config['account_sid'], _config['auth_token'])
|
||||
messages = client.messages.list(to=_config['twilio_number'])
|
||||
log.trace('Num messages: %d', len(messages))
|
||||
output["texts"] = []
|
||||
client = TwilioRestClient(_config["account_sid"], _config["auth_token"])
|
||||
messages = client.messages.list(to=_config["twilio_number"])
|
||||
log.trace("Num messages: %d", len(messages))
|
||||
if len(messages) < 1:
|
||||
log.trace('Twilio beacon has no texts')
|
||||
log.trace("Twilio beacon has no texts")
|
||||
return ret
|
||||
|
||||
for message in messages:
|
||||
item = {}
|
||||
item['id'] = six.text_type(message.sid)
|
||||
item['body'] = six.text_type(message.body)
|
||||
item['from'] = six.text_type(message.from_)
|
||||
item['sent'] = six.text_type(message.date_sent)
|
||||
item['images'] = []
|
||||
item["id"] = six.text_type(message.sid)
|
||||
item["body"] = six.text_type(message.body)
|
||||
item["from"] = six.text_type(message.from_)
|
||||
item["sent"] = six.text_type(message.date_sent)
|
||||
item["images"] = []
|
||||
|
||||
if int(message.num_media):
|
||||
media = client.media(message.sid).list()
|
||||
if media:
|
||||
for pic in media:
|
||||
item['images'].append(six.text_type(pic.uri))
|
||||
output['texts'].append(item)
|
||||
item["images"].append(six.text_type(pic.uri))
|
||||
output["texts"].append(item)
|
||||
message.delete()
|
||||
ret.append(output)
|
||||
return ret
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
watchdog beacon
|
||||
|
||||
.. versionadded:: 2019.2.0
|
||||
|
@ -8,9 +8,10 @@ Watch files and translate the changes into salt events
|
|||
|
||||
:depends: - watchdog Python module >= 0.8.3
|
||||
|
||||
'''
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import collections
|
||||
import logging
|
||||
|
||||
|
@ -20,24 +21,27 @@ from salt.ext.six.moves import map
|
|||
try:
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
|
||||
HAS_WATCHDOG = True
|
||||
except ImportError:
|
||||
HAS_WATCHDOG = False
|
||||
|
||||
class FileSystemEventHandler(object):
|
||||
""" A dummy class to make the import work """
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
__virtualname__ = 'watchdog'
|
||||
|
||||
__virtualname__ = "watchdog"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_MASK = [
|
||||
'create',
|
||||
'delete',
|
||||
'modify',
|
||||
'move',
|
||||
"create",
|
||||
"delete",
|
||||
"modify",
|
||||
"move",
|
||||
]
|
||||
|
||||
|
||||
|
@ -48,16 +52,16 @@ class Handler(FileSystemEventHandler):
|
|||
self.queue = queue
|
||||
|
||||
def on_created(self, event):
|
||||
self._append_if_mask(event, 'create')
|
||||
self._append_if_mask(event, "create")
|
||||
|
||||
def on_modified(self, event):
|
||||
self._append_if_mask(event, 'modify')
|
||||
self._append_if_mask(event, "modify")
|
||||
|
||||
def on_deleted(self, event):
|
||||
self._append_if_mask(event, 'delete')
|
||||
self._append_if_mask(event, "delete")
|
||||
|
||||
def on_moved(self, event):
|
||||
self._append_if_mask(event, 'move')
|
||||
self._append_if_mask(event, "move")
|
||||
|
||||
def _append_if_mask(self, event, mask):
|
||||
logging.debug(event)
|
||||
|
@ -76,25 +80,25 @@ def __virtual__():
|
|||
|
||||
|
||||
def _get_queue(config):
|
||||
'''
|
||||
"""
|
||||
Check the context for the notifier and construct it if not present
|
||||
'''
|
||||
"""
|
||||
|
||||
if 'watchdog.observer' not in __context__:
|
||||
if "watchdog.observer" not in __context__:
|
||||
queue = collections.deque()
|
||||
observer = Observer()
|
||||
for path in config.get('directories', {}):
|
||||
path_params = config.get('directories').get(path)
|
||||
masks = path_params.get('mask', DEFAULT_MASK)
|
||||
for path in config.get("directories", {}):
|
||||
path_params = config.get("directories").get(path)
|
||||
masks = path_params.get("mask", DEFAULT_MASK)
|
||||
event_handler = Handler(queue, masks)
|
||||
observer.schedule(event_handler, path)
|
||||
|
||||
observer.start()
|
||||
|
||||
__context__['watchdog.observer'] = observer
|
||||
__context__['watchdog.queue'] = queue
|
||||
__context__["watchdog.observer"] = observer
|
||||
__context__["watchdog.queue"] = queue
|
||||
|
||||
return __context__['watchdog.queue']
|
||||
return __context__["watchdog.queue"]
|
||||
|
||||
|
||||
class ValidationError(Exception):
|
||||
|
@ -102,76 +106,74 @@ class ValidationError(Exception):
|
|||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
|
||||
try:
|
||||
_validate(config)
|
||||
return True, 'Valid beacon configuration'
|
||||
return True, "Valid beacon configuration"
|
||||
except ValidationError as error:
|
||||
return False, str(error)
|
||||
|
||||
|
||||
def _validate(config):
|
||||
if not isinstance(config, list):
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon must be a list.')
|
||||
raise ValidationError("Configuration for watchdog beacon must be a list.")
|
||||
|
||||
_config = {}
|
||||
for part in config:
|
||||
_config.update(part)
|
||||
|
||||
if 'directories' not in _config:
|
||||
if "directories" not in _config:
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon must include directories.')
|
||||
"Configuration for watchdog beacon must include directories."
|
||||
)
|
||||
|
||||
if not isinstance(_config['directories'], dict):
|
||||
if not isinstance(_config["directories"], dict):
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon directories must be a '
|
||||
'dictionary.')
|
||||
"Configuration for watchdog beacon directories must be a " "dictionary."
|
||||
)
|
||||
|
||||
for path in _config['directories']:
|
||||
_validate_path(_config['directories'][path])
|
||||
for path in _config["directories"]:
|
||||
_validate_path(_config["directories"][path])
|
||||
|
||||
|
||||
def _validate_path(path_config):
|
||||
if not isinstance(path_config, dict):
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon directory path must be '
|
||||
'a dictionary.')
|
||||
"Configuration for watchdog beacon directory path must be " "a dictionary."
|
||||
)
|
||||
|
||||
if 'mask' in path_config:
|
||||
_validate_mask(path_config['mask'])
|
||||
if "mask" in path_config:
|
||||
_validate_mask(path_config["mask"])
|
||||
|
||||
|
||||
def _validate_mask(mask_config):
|
||||
valid_mask = [
|
||||
'create',
|
||||
'modify',
|
||||
'delete',
|
||||
'move',
|
||||
"create",
|
||||
"modify",
|
||||
"delete",
|
||||
"move",
|
||||
]
|
||||
|
||||
if not isinstance(mask_config, list):
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon mask must be list.')
|
||||
raise ValidationError("Configuration for watchdog beacon mask must be list.")
|
||||
|
||||
if any(mask not in valid_mask for mask in mask_config):
|
||||
raise ValidationError(
|
||||
'Configuration for watchdog beacon contains invalid mask')
|
||||
raise ValidationError("Configuration for watchdog beacon contains invalid mask")
|
||||
|
||||
|
||||
def to_salt_event(event):
|
||||
return {
|
||||
'tag': __virtualname__,
|
||||
'path': event.src_path,
|
||||
'change': event.event_type,
|
||||
"tag": __virtualname__,
|
||||
"path": event.src_path,
|
||||
"change": event.event_type,
|
||||
}
|
||||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Watch the configured directories
|
||||
|
||||
Example Config
|
||||
|
@ -194,7 +196,7 @@ def beacon(config):
|
|||
* modify - The watched directory is modified
|
||||
* delete - File or directory is deleted from watched directory
|
||||
* move - File or directory is moved or renamed in the watched directory
|
||||
'''
|
||||
"""
|
||||
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
@ -209,7 +211,7 @@ def beacon(config):
|
|||
|
||||
|
||||
def close(config):
|
||||
observer = __context__.pop('watchdog.observer', None)
|
||||
observer = __context__.pop("watchdog.observer", None)
|
||||
|
||||
if observer:
|
||||
observer.stop()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Beacon to fire events at login of users as registered in the wtmp file
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
@ -115,43 +115,47 @@ Match the event like so in the master config file:
|
|||
API key to post to Slack, a bot user is likely better suited for this. The
|
||||
:py:mod:`slack engine <salt.engines.slack>` documentation has information
|
||||
on how to set up a bot user.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import struct
|
||||
|
||||
import salt.utils.files
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.files
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
# pylint: disable=import-error
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
__virtualname__ = 'wtmp'
|
||||
WTMP = '/var/log/wtmp'
|
||||
FMT = b'hi32s4s32s256shhiii4i20x'
|
||||
__virtualname__ = "wtmp"
|
||||
WTMP = "/var/log/wtmp"
|
||||
FMT = b"hi32s4s32s256shhiii4i20x"
|
||||
FIELDS = [
|
||||
'type',
|
||||
'PID',
|
||||
'line',
|
||||
'inittab',
|
||||
'user',
|
||||
'hostname',
|
||||
'exit_status',
|
||||
'session',
|
||||
'time',
|
||||
'addr'
|
||||
"type",
|
||||
"PID",
|
||||
"line",
|
||||
"inittab",
|
||||
"user",
|
||||
"hostname",
|
||||
"exit_status",
|
||||
"session",
|
||||
"time",
|
||||
"addr",
|
||||
]
|
||||
SIZE = struct.calcsize(FMT)
|
||||
LOC_KEY = 'wtmp.loc'
|
||||
TTY_KEY_PREFIX = 'wtmp.tty.'
|
||||
LOC_KEY = "wtmp.loc"
|
||||
TTY_KEY_PREFIX = "wtmp.tty."
|
||||
LOGIN_TYPE = 7
|
||||
LOGOUT_TYPE = 8
|
||||
|
||||
|
@ -160,6 +164,7 @@ log = logging.getLogger(__name__)
|
|||
# pylint: disable=import-error
|
||||
try:
|
||||
import dateutil.parser as dateutil_parser
|
||||
|
||||
_TIME_SUPPORTED = True
|
||||
except ImportError:
|
||||
_TIME_SUPPORTED = False
|
||||
|
@ -172,119 +177,110 @@ def __virtual__():
|
|||
|
||||
|
||||
def _validate_time_range(trange, status, msg):
|
||||
'''
|
||||
"""
|
||||
Check time range
|
||||
'''
|
||||
"""
|
||||
# If trange is empty, just return the current status & msg
|
||||
if not trange:
|
||||
return status, msg
|
||||
|
||||
if not isinstance(trange, dict):
|
||||
status = False
|
||||
msg = ('The time_range parameter for '
|
||||
'wtmp beacon must '
|
||||
'be a dictionary.')
|
||||
msg = "The time_range parameter for " "wtmp beacon must " "be a dictionary."
|
||||
|
||||
if not all(k in trange for k in ('start', 'end')):
|
||||
if not all(k in trange for k in ("start", "end")):
|
||||
status = False
|
||||
msg = ('The time_range parameter for '
|
||||
'wtmp beacon must contain '
|
||||
'start & end options.')
|
||||
msg = (
|
||||
"The time_range parameter for "
|
||||
"wtmp beacon must contain "
|
||||
"start & end options."
|
||||
)
|
||||
|
||||
return status, msg
|
||||
|
||||
|
||||
def _gather_group_members(group, groups, users):
|
||||
'''
|
||||
"""
|
||||
Gather group members
|
||||
'''
|
||||
_group = __salt__['group.info'](group)
|
||||
"""
|
||||
_group = __salt__["group.info"](group)
|
||||
|
||||
if not _group:
|
||||
log.warning('Group %s does not exist, ignoring.', group)
|
||||
log.warning("Group %s does not exist, ignoring.", group)
|
||||
return
|
||||
|
||||
for member in _group['members']:
|
||||
for member in _group["members"]:
|
||||
if member not in users:
|
||||
users[member] = groups[group]
|
||||
|
||||
|
||||
def _check_time_range(time_range, now):
|
||||
'''
|
||||
"""
|
||||
Check time range
|
||||
'''
|
||||
"""
|
||||
if _TIME_SUPPORTED:
|
||||
_start = dateutil_parser.parse(time_range['start'])
|
||||
_end = dateutil_parser.parse(time_range['end'])
|
||||
_start = dateutil_parser.parse(time_range["start"])
|
||||
_end = dateutil_parser.parse(time_range["end"])
|
||||
|
||||
return bool(_start <= now <= _end)
|
||||
else:
|
||||
log.error('Dateutil is required.')
|
||||
log.error("Dateutil is required.")
|
||||
return False
|
||||
|
||||
|
||||
def _get_loc():
|
||||
'''
|
||||
"""
|
||||
return the active file location
|
||||
'''
|
||||
"""
|
||||
if LOC_KEY in __context__:
|
||||
return __context__[LOC_KEY]
|
||||
|
||||
|
||||
def validate(config):
|
||||
'''
|
||||
"""
|
||||
Validate the beacon configuration
|
||||
'''
|
||||
"""
|
||||
vstatus = True
|
||||
vmsg = 'Valid beacon configuration'
|
||||
vmsg = "Valid beacon configuration"
|
||||
|
||||
# Configuration for wtmp beacon should be a list of dicts
|
||||
if not isinstance(config, list):
|
||||
vstatus = False
|
||||
vmsg = ('Configuration for wtmp beacon must be a list.')
|
||||
vmsg = "Configuration for wtmp beacon must be a list."
|
||||
else:
|
||||
_config = {}
|
||||
list(map(_config.update, config))
|
||||
|
||||
if 'users' in _config:
|
||||
if not isinstance(_config['users'], dict):
|
||||
if "users" in _config:
|
||||
if not isinstance(_config["users"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('User configuration for wtmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "User configuration for wtmp beacon must " "be a dictionary."
|
||||
else:
|
||||
for user in _config['users']:
|
||||
_time_range = _config['users'][user].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
for user in _config["users"]:
|
||||
_time_range = _config["users"][user].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
if 'groups' in _config:
|
||||
if not isinstance(_config['groups'], dict):
|
||||
if "groups" in _config:
|
||||
if not isinstance(_config["groups"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('Group configuration for wtmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "Group configuration for wtmp beacon must " "be a dictionary."
|
||||
else:
|
||||
for group in _config['groups']:
|
||||
_time_range = _config['groups'][group].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
for group in _config["groups"]:
|
||||
_time_range = _config["groups"][group].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
if 'defaults' in _config:
|
||||
if not isinstance(_config['defaults'], dict):
|
||||
if "defaults" in _config:
|
||||
if not isinstance(_config["defaults"], dict):
|
||||
vstatus = False
|
||||
vmsg = ('Defaults configuration for wtmp beacon must '
|
||||
'be a dictionary.')
|
||||
vmsg = "Defaults configuration for wtmp beacon must " "be a dictionary."
|
||||
else:
|
||||
_time_range = _config['defaults'].get('time_range', {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range,
|
||||
vstatus,
|
||||
vmsg)
|
||||
_time_range = _config["defaults"].get("time_range", {})
|
||||
vstatus, vmsg = _validate_time_range(_time_range, vstatus, vmsg)
|
||||
if not vstatus:
|
||||
return vstatus, vmsg
|
||||
|
||||
|
@ -292,9 +288,9 @@ def validate(config):
|
|||
|
||||
|
||||
def beacon(config):
|
||||
'''
|
||||
"""
|
||||
Read the last wtmp file and return information on the logins
|
||||
'''
|
||||
"""
|
||||
ret = []
|
||||
|
||||
users = {}
|
||||
|
@ -305,26 +301,26 @@ def beacon(config):
|
|||
logout_type = LOGOUT_TYPE
|
||||
|
||||
for config_item in config:
|
||||
if 'users' in config_item:
|
||||
users = config_item['users']
|
||||
if "users" in config_item:
|
||||
users = config_item["users"]
|
||||
|
||||
if 'groups' in config_item:
|
||||
groups = config_item['groups']
|
||||
if "groups" in config_item:
|
||||
groups = config_item["groups"]
|
||||
|
||||
if 'defaults' in config_item:
|
||||
defaults = config_item['defaults']
|
||||
if "defaults" in config_item:
|
||||
defaults = config_item["defaults"]
|
||||
|
||||
if config_item == 'ut_type':
|
||||
if config_item == "ut_type":
|
||||
try:
|
||||
login_type = config_item['ut_type']['login']
|
||||
login_type = config_item["ut_type"]["login"]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
logout_type = config_item['ut_type']['logout']
|
||||
logout_type = config_item["ut_type"]["logout"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
with salt.utils.files.fopen(WTMP, 'rb') as fp_:
|
||||
with salt.utils.files.fopen(WTMP, "rb") as fp_:
|
||||
loc = __context__.get(LOC_KEY, 0)
|
||||
if loc == 0:
|
||||
fp_.seek(0, 2)
|
||||
|
@ -345,16 +341,20 @@ def beacon(config):
|
|||
if isinstance(event[field], six.string_types):
|
||||
if isinstance(event[field], bytes):
|
||||
event[field] = salt.utils.stringutils.to_unicode(event[field])
|
||||
event[field] = event[field].strip('\x00')
|
||||
event[field] = event[field].strip("\x00")
|
||||
|
||||
if event['type'] == login_type:
|
||||
event['action'] = 'login'
|
||||
if event["type"] == login_type:
|
||||
event["action"] = "login"
|
||||
# Store the tty to identify the logout event
|
||||
__context__['{0}{1}'.format(TTY_KEY_PREFIX, event['line'])] = event['user']
|
||||
elif event['type'] == logout_type:
|
||||
event['action'] = 'logout'
|
||||
__context__["{0}{1}".format(TTY_KEY_PREFIX, event["line"])] = event[
|
||||
"user"
|
||||
]
|
||||
elif event["type"] == logout_type:
|
||||
event["action"] = "logout"
|
||||
try:
|
||||
event['user'] = __context__.pop('{0}{1}'.format(TTY_KEY_PREFIX, event['line']))
|
||||
event["user"] = __context__.pop(
|
||||
"{0}{1}".format(TTY_KEY_PREFIX, event["line"])
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
@ -362,21 +362,20 @@ def beacon(config):
|
|||
_gather_group_members(group, groups, users)
|
||||
|
||||
if users:
|
||||
if event['user'] in users:
|
||||
_user = users[event['user']]
|
||||
if isinstance(_user, dict) and 'time_range' in _user:
|
||||
if _check_time_range(_user['time_range'], now):
|
||||
if event["user"] in users:
|
||||
_user = users[event["user"]]
|
||||
if isinstance(_user, dict) and "time_range" in _user:
|
||||
if _check_time_range(_user["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
if defaults and 'time_range' in defaults:
|
||||
if _check_time_range(defaults['time_range'],
|
||||
now):
|
||||
if defaults and "time_range" in defaults:
|
||||
if _check_time_range(defaults["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
ret.append(event)
|
||||
else:
|
||||
if defaults and 'time_range' in defaults:
|
||||
if _check_time_range(defaults['time_range'], now):
|
||||
if defaults and "time_range" in defaults:
|
||||
if _check_time_range(defaults["time_range"], now):
|
||||
ret.append(event)
|
||||
else:
|
||||
ret.append(event)
|
||||
|
|
91
salt/cache/__init__.py
vendored
91
salt/cache/__init__.py
vendored
|
@ -1,33 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Loader mechanism for caching data, with data expiration, etc.
|
||||
|
||||
.. versionadded:: 2016.11.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.syspaths
|
||||
from salt.ext import six
|
||||
from salt.payload import Serial
|
||||
from salt.utils.odict import OrderedDict
|
||||
import salt.loader
|
||||
import salt.syspaths
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def factory(opts, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Creates and returns the cache class.
|
||||
If memory caching is enabled by opts MemCache class will be instantiated.
|
||||
If not Cache class will be returned.
|
||||
'''
|
||||
if opts.get('memcache_expire_seconds', 0):
|
||||
"""
|
||||
if opts.get("memcache_expire_seconds", 0):
|
||||
cls = MemCache
|
||||
else:
|
||||
cls = Cache
|
||||
|
@ -35,7 +36,7 @@ def factory(opts, **kwargs):
|
|||
|
||||
|
||||
class Cache(object):
|
||||
'''
|
||||
"""
|
||||
Base caching object providing access to the modular cache subsystem.
|
||||
|
||||
Related configuration options:
|
||||
|
@ -66,22 +67,23 @@ class Cache(object):
|
|||
|
||||
Key name is a string identifier of a data container (like a file inside a
|
||||
directory) which will hold the data.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, cachedir=None, **kwargs):
|
||||
self.opts = opts
|
||||
if cachedir is None:
|
||||
self.cachedir = opts.get('cachedir', salt.syspaths.CACHE_DIR)
|
||||
self.cachedir = opts.get("cachedir", salt.syspaths.CACHE_DIR)
|
||||
else:
|
||||
self.cachedir = cachedir
|
||||
self.driver = opts.get('cache', salt.config.DEFAULT_MASTER_OPTS['cache'])
|
||||
self.driver = opts.get("cache", salt.config.DEFAULT_MASTER_OPTS["cache"])
|
||||
self.serial = Serial(opts)
|
||||
self._modules = None
|
||||
self._kwargs = kwargs
|
||||
self._kwargs['cachedir'] = self.cachedir
|
||||
self._kwargs["cachedir"] = self.cachedir
|
||||
|
||||
def __lazy_init(self):
|
||||
self._modules = salt.loader.cache(self.opts, self.serial)
|
||||
fun = '{0}.init_kwargs'.format(self.driver)
|
||||
fun = "{0}.init_kwargs".format(self.driver)
|
||||
if fun in self.modules:
|
||||
self._kwargs = self.modules[fun](self._kwargs)
|
||||
else:
|
||||
|
@ -94,7 +96,7 @@ class Cache(object):
|
|||
return self._modules
|
||||
|
||||
def cache(self, bank, key, fun, loop_fun=None, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Check cache for the data. If it is there, check to see if it needs to
|
||||
be refreshed.
|
||||
|
||||
|
@ -106,8 +108,8 @@ class Cache(object):
|
|||
the second function is passed in as ``loop_fun``. Each item in the
|
||||
return list from the first function will be the only argument for the
|
||||
second function.
|
||||
'''
|
||||
expire_seconds = kwargs.get('expire', 86400) # 1 day
|
||||
"""
|
||||
expire_seconds = kwargs.get("expire", 86400) # 1 day
|
||||
|
||||
updated = self.updated(bank, key)
|
||||
update_cache = False
|
||||
|
@ -132,7 +134,7 @@ class Cache(object):
|
|||
return data
|
||||
|
||||
def store(self, bank, key, data):
|
||||
'''
|
||||
"""
|
||||
Store data using the specified module
|
||||
|
||||
:param bank:
|
||||
|
@ -151,12 +153,12 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.store'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.store".format(self.driver)
|
||||
return self.modules[fun](bank, key, data, **self._kwargs)
|
||||
|
||||
def fetch(self, bank, key):
|
||||
'''
|
||||
"""
|
||||
Fetch data using the specified module
|
||||
|
||||
:param bank:
|
||||
|
@ -175,12 +177,12 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.fetch'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.fetch".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
def updated(self, bank, key):
|
||||
'''
|
||||
"""
|
||||
Get the last updated epoch for the specified key
|
||||
|
||||
:param bank:
|
||||
|
@ -199,12 +201,12 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.updated'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.updated".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
def flush(self, bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content. If no key is specified remove
|
||||
the entire bank with all keys and sub-banks inside.
|
||||
|
||||
|
@ -220,12 +222,12 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.flush'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.flush".format(self.driver)
|
||||
return self.modules[fun](bank, key=key, **self._kwargs)
|
||||
|
||||
def list(self, bank):
|
||||
'''
|
||||
"""
|
||||
Lists entries stored in the specified bank.
|
||||
|
||||
:param bank:
|
||||
|
@ -239,12 +241,12 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.list'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.list".format(self.driver)
|
||||
return self.modules[fun](bank, **self._kwargs)
|
||||
|
||||
def contains(self, bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
|
||||
:param bank:
|
||||
|
@ -264,25 +266,26 @@ class Cache(object):
|
|||
:raises SaltCacheError:
|
||||
Raises an exception if cache driver detected an error accessing data
|
||||
in the cache backend (auth, permissions, etc).
|
||||
'''
|
||||
fun = '{0}.contains'.format(self.driver)
|
||||
"""
|
||||
fun = "{0}.contains".format(self.driver)
|
||||
return self.modules[fun](bank, key, **self._kwargs)
|
||||
|
||||
|
||||
class MemCache(Cache):
|
||||
'''
|
||||
"""
|
||||
Short-lived in-memory cache store keeping values on time and/or size (count)
|
||||
basis.
|
||||
'''
|
||||
"""
|
||||
|
||||
# {<storage_id>: odict({<key>: [atime, data], ...}), ...}
|
||||
data = {}
|
||||
|
||||
def __init__(self, opts, **kwargs):
|
||||
super(MemCache, self).__init__(opts, **kwargs)
|
||||
self.expire = opts.get('memcache_expire_seconds', 10)
|
||||
self.max = opts.get('memcache_max_items', 1024)
|
||||
self.cleanup = opts.get('memcache_full_cleanup', False)
|
||||
self.debug = opts.get('memcache_debug', False)
|
||||
self.expire = opts.get("memcache_expire_seconds", 10)
|
||||
self.max = opts.get("memcache_max_items", 1024)
|
||||
self.cleanup = opts.get("memcache_full_cleanup", False)
|
||||
self.debug = opts.get("memcache_debug", False)
|
||||
if self.debug:
|
||||
self.call = 0
|
||||
self.hit = 0
|
||||
|
@ -299,7 +302,7 @@ class MemCache(Cache):
|
|||
break
|
||||
|
||||
def _get_storage_id(self):
|
||||
fun = '{0}.storage_id'.format(self.driver)
|
||||
fun = "{0}.storage_id".format(self.driver)
|
||||
if fun in self.modules:
|
||||
return self.modules[fun](self.kwargs)
|
||||
else:
|
||||
|
@ -324,8 +327,10 @@ class MemCache(Cache):
|
|||
if self.debug:
|
||||
self.hit += 1
|
||||
log.debug(
|
||||
'MemCache stats (call/hit/rate): %s/%s/%s',
|
||||
self.call, self.hit, float(self.hit) / self.call
|
||||
"MemCache stats (call/hit/rate): %s/%s/%s",
|
||||
self.call,
|
||||
self.hit,
|
||||
float(self.hit) / self.call,
|
||||
)
|
||||
# update atime and return
|
||||
record[0] = now
|
||||
|
|
100
salt/cache/consul.py
vendored
100
salt/cache/consul.py
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Minion data cache plugin for Consul key/value data store.
|
||||
|
||||
.. versionadded:: 2016.11.2
|
||||
|
@ -45,116 +45,118 @@ value to ``consul``:
|
|||
.. _`Consul documentation`: https://www.consul.io/docs/index.html
|
||||
.. _`python-consul documentation`: https://python-consul.readthedocs.io/en/latest/#consul
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
try:
|
||||
import consul
|
||||
|
||||
HAS_CONSUL = True
|
||||
except ImportError:
|
||||
HAS_CONSUL = False
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
api = None
|
||||
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'consul'
|
||||
__virtualname__ = "consul"
|
||||
|
||||
__func_alias__ = {'list_': 'list'}
|
||||
__func_alias__ = {"list_": "list"}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Confirm this python-consul package is installed
|
||||
'''
|
||||
"""
|
||||
if not HAS_CONSUL:
|
||||
return (False, "Please install python-consul package to use consul data cache driver")
|
||||
return (
|
||||
False,
|
||||
"Please install python-consul package to use consul data cache driver",
|
||||
)
|
||||
|
||||
consul_kwargs = {
|
||||
'host': __opts__.get('consul.host', '127.0.0.1'),
|
||||
'port': __opts__.get('consul.port', 8500),
|
||||
'token': __opts__.get('consul.token', None),
|
||||
'scheme': __opts__.get('consul.scheme', 'http'),
|
||||
'consistency': __opts__.get('consul.consistency', 'default'),
|
||||
'dc': __opts__.get('consul.dc', None),
|
||||
'verify': __opts__.get('consul.verify', True),
|
||||
}
|
||||
"host": __opts__.get("consul.host", "127.0.0.1"),
|
||||
"port": __opts__.get("consul.port", 8500),
|
||||
"token": __opts__.get("consul.token", None),
|
||||
"scheme": __opts__.get("consul.scheme", "http"),
|
||||
"consistency": __opts__.get("consul.consistency", "default"),
|
||||
"dc": __opts__.get("consul.dc", None),
|
||||
"verify": __opts__.get("consul.verify", True),
|
||||
}
|
||||
|
||||
try:
|
||||
global api
|
||||
api = consul.Consul(**consul_kwargs)
|
||||
except AttributeError:
|
||||
return (False, "Failed to invoke consul.Consul, please make sure you have python-consul >= 0.2.0 installed")
|
||||
return (
|
||||
False,
|
||||
"Failed to invoke consul.Consul, please make sure you have python-consul >= 0.2.0 installed",
|
||||
)
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def store(bank, key, data):
|
||||
'''
|
||||
"""
|
||||
Store a key value.
|
||||
'''
|
||||
c_key = '{0}/{1}'.format(bank, key)
|
||||
"""
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
try:
|
||||
c_data = __context__['serial'].dumps(data)
|
||||
c_data = __context__["serial"].dumps(data)
|
||||
api.kv.put(c_key, c_data)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error writing the key, {0}: {1}'.format(
|
||||
c_key, exc
|
||||
)
|
||||
"There was an error writing the key, {0}: {1}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def fetch(bank, key):
|
||||
'''
|
||||
"""
|
||||
Fetch a key value.
|
||||
'''
|
||||
c_key = '{0}/{1}'.format(bank, key)
|
||||
"""
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
try:
|
||||
_, value = api.kv.get(c_key)
|
||||
if value is None:
|
||||
return {}
|
||||
return __context__['serial'].loads(value['Value'])
|
||||
return __context__["serial"].loads(value["Value"])
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error reading the key, {0}: {1}'.format(
|
||||
c_key, exc
|
||||
)
|
||||
"There was an error reading the key, {0}: {1}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def flush(bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content.
|
||||
'''
|
||||
"""
|
||||
if key is None:
|
||||
c_key = bank
|
||||
else:
|
||||
c_key = '{0}/{1}'.format(bank, key)
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
try:
|
||||
return api.kv.delete(c_key, recurse=key is None)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error removing the key, {0}: {1}'.format(
|
||||
c_key, exc
|
||||
)
|
||||
"There was an error removing the key, {0}: {1}".format(c_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def list_(bank):
|
||||
'''
|
||||
"""
|
||||
Return an iterable object containing all entries stored in the specified bank.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
_, keys = api.kv.get(bank + '/', keys=True, separator='/')
|
||||
_, keys = api.kv.get(bank + "/", keys=True, separator="/")
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error getting the key "{0}": {1}'.format(
|
||||
bank, exc
|
||||
)
|
||||
'There was an error getting the key "{0}": {1}'.format(bank, exc)
|
||||
)
|
||||
if keys is None:
|
||||
keys = []
|
||||
|
@ -163,25 +165,23 @@ def list_(bank):
|
|||
# so we have to return a list of unique names only.
|
||||
out = set()
|
||||
for key in keys:
|
||||
out.add(key[len(bank) + 1:].rstrip('/'))
|
||||
out.add(key[len(bank) + 1 :].rstrip("/"))
|
||||
keys = list(out)
|
||||
return keys
|
||||
|
||||
|
||||
def contains(bank, key):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
"""
|
||||
if key is None:
|
||||
return True # any key could be a branch and a leaf at the same time in Consul
|
||||
else:
|
||||
try:
|
||||
c_key = '{0}/{1}'.format(bank, key)
|
||||
c_key = "{0}/{1}".format(bank, key)
|
||||
_, value = api.kv.get(c_key)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error getting the key, {0}: {1}'.format(
|
||||
c_key, exc
|
||||
)
|
||||
"There was an error getting the key, {0}: {1}".format(c_key, exc)
|
||||
)
|
||||
return value is not None
|
||||
|
|
116
salt/cache/etcd_cache.py
vendored
116
salt/cache/etcd_cache.py
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Minion data cache plugin for Etcd key/value data store.
|
||||
|
||||
.. versionadded:: develop
|
||||
|
@ -47,17 +47,21 @@ value to ``etcd``:
|
|||
.. _`Etcd documentation`: https://github.com/coreos/etcd
|
||||
.. _`python-etcd documentation`: http://python-etcd.readthedocs.io/en/latest/
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
|
||||
import base64
|
||||
import logging
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
try:
|
||||
import etcd
|
||||
|
||||
HAS_ETCD = True
|
||||
except ImportError:
|
||||
HAS_ETCD = False
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
_DEFAULT_PATH_PREFIX = "/salt_cache"
|
||||
|
||||
|
@ -72,44 +76,46 @@ path_prefix = None
|
|||
|
||||
# Module properties
|
||||
|
||||
__virtualname__ = 'etcd'
|
||||
__func_alias__ = {'ls': 'list'}
|
||||
__virtualname__ = "etcd"
|
||||
__func_alias__ = {"ls": "list"}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Confirm that python-etcd package is installed.
|
||||
'''
|
||||
"""
|
||||
if not HAS_ETCD:
|
||||
return (False, "Please install python-etcd package to use etcd data "
|
||||
"cache driver")
|
||||
return (
|
||||
False,
|
||||
"Please install python-etcd package to use etcd data cache driver",
|
||||
)
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def _init_client():
|
||||
'''Setup client and init datastore.
|
||||
'''
|
||||
"""Setup client and init datastore.
|
||||
"""
|
||||
global client, path_prefix
|
||||
if client is not None:
|
||||
return
|
||||
|
||||
etcd_kwargs = {
|
||||
'host': __opts__.get('etcd.host', '127.0.0.1'),
|
||||
'port': __opts__.get('etcd.port', 2379),
|
||||
'protocol': __opts__.get('etcd.protocol', 'http'),
|
||||
'allow_reconnect': __opts__.get('etcd.allow_reconnect', True),
|
||||
'allow_redirect': __opts__.get('etcd.allow_redirect', False),
|
||||
'srv_domain': __opts__.get('etcd.srv_domain', None),
|
||||
'read_timeout': __opts__.get('etcd.read_timeout', 60),
|
||||
'username': __opts__.get('etcd.username', None),
|
||||
'password': __opts__.get('etcd.password', None),
|
||||
'cert': __opts__.get('etcd.cert', None),
|
||||
'ca_cert': __opts__.get('etcd.ca_cert', None),
|
||||
"host": __opts__.get("etcd.host", "127.0.0.1"),
|
||||
"port": __opts__.get("etcd.port", 2379),
|
||||
"protocol": __opts__.get("etcd.protocol", "http"),
|
||||
"allow_reconnect": __opts__.get("etcd.allow_reconnect", True),
|
||||
"allow_redirect": __opts__.get("etcd.allow_redirect", False),
|
||||
"srv_domain": __opts__.get("etcd.srv_domain", None),
|
||||
"read_timeout": __opts__.get("etcd.read_timeout", 60),
|
||||
"username": __opts__.get("etcd.username", None),
|
||||
"password": __opts__.get("etcd.password", None),
|
||||
"cert": __opts__.get("etcd.cert", None),
|
||||
"ca_cert": __opts__.get("etcd.ca_cert", None),
|
||||
}
|
||||
path_prefix = __opts__.get('etcd.path_prefix', _DEFAULT_PATH_PREFIX)
|
||||
path_prefix = __opts__.get("etcd.path_prefix", _DEFAULT_PATH_PREFIX)
|
||||
if path_prefix != "":
|
||||
path_prefix = '/{0}'.format(path_prefix.strip('/'))
|
||||
path_prefix = "/{0}".format(path_prefix.strip("/"))
|
||||
log.info("etcd: Setting up client with params: %r", etcd_kwargs)
|
||||
client = etcd.Client(**etcd_kwargs)
|
||||
try:
|
||||
|
@ -120,48 +126,46 @@ def _init_client():
|
|||
|
||||
|
||||
def store(bank, key, data):
|
||||
'''
|
||||
"""
|
||||
Store a key value.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key)
|
||||
etcd_key = "{0}/{1}/{2}".format(path_prefix, bank, key)
|
||||
try:
|
||||
value = __context__['serial'].dumps(data)
|
||||
value = __context__["serial"].dumps(data)
|
||||
client.write(etcd_key, base64.b64encode(value))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error writing the key, {0}: {1}'.format(etcd_key, exc)
|
||||
"There was an error writing the key, {0}: {1}".format(etcd_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def fetch(bank, key):
|
||||
'''
|
||||
"""
|
||||
Fetch a key value.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key)
|
||||
etcd_key = "{0}/{1}/{2}".format(path_prefix, bank, key)
|
||||
try:
|
||||
value = client.read(etcd_key).value
|
||||
return __context__['serial'].loads(base64.b64decode(value))
|
||||
return __context__["serial"].loads(base64.b64decode(value))
|
||||
except etcd.EtcdKeyNotFound:
|
||||
return {}
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error reading the key, {0}: {1}'.format(
|
||||
etcd_key, exc
|
||||
)
|
||||
"There was an error reading the key, {0}: {1}".format(etcd_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def flush(bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
if key is None:
|
||||
etcd_key = '{0}/{1}'.format(path_prefix, bank)
|
||||
etcd_key = "{0}/{1}".format(path_prefix, bank)
|
||||
else:
|
||||
etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key)
|
||||
etcd_key = "{0}/{1}/{2}".format(path_prefix, bank, key)
|
||||
try:
|
||||
client.read(etcd_key)
|
||||
except etcd.EtcdKeyNotFound:
|
||||
|
@ -170,19 +174,17 @@ def flush(bank, key=None):
|
|||
client.delete(etcd_key, recursive=True)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error removing the key, {0}: {1}'.format(
|
||||
etcd_key, exc
|
||||
)
|
||||
"There was an error removing the key, {0}: {1}".format(etcd_key, exc)
|
||||
)
|
||||
|
||||
|
||||
def _walk(r):
|
||||
'''
|
||||
"""
|
||||
Recursively walk dirs. Return flattened list of keys.
|
||||
r: etcd.EtcdResult
|
||||
'''
|
||||
"""
|
||||
if not r.dir:
|
||||
return [r.key.split('/', 3)[3]]
|
||||
return [r.key.split("/", 3)[3]]
|
||||
|
||||
keys = []
|
||||
for c in client.read(r.key).children:
|
||||
|
@ -191,28 +193,26 @@ def _walk(r):
|
|||
|
||||
|
||||
def ls(bank):
|
||||
'''
|
||||
"""
|
||||
Return an iterable object containing all entries stored in the specified
|
||||
bank.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
path = '{0}/{1}'.format(path_prefix, bank)
|
||||
path = "{0}/{1}".format(path_prefix, bank)
|
||||
try:
|
||||
return _walk(client.read(path))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error getting the key "{0}": {1}'.format(
|
||||
bank, exc
|
||||
)
|
||||
'There was an error getting the key "{0}": {1}'.format(bank, exc)
|
||||
)
|
||||
|
||||
|
||||
def contains(bank, key):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
etcd_key = '{0}/{1}/{2}'.format(path_prefix, bank, key)
|
||||
etcd_key = "{0}/{1}/{2}".format(path_prefix, bank, key)
|
||||
try:
|
||||
r = client.read(etcd_key)
|
||||
# return True for keys, not dirs
|
||||
|
@ -221,7 +221,5 @@ def contains(bank, key):
|
|||
return False
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise SaltCacheError(
|
||||
'There was an error getting the key, {0}: {1}'.format(
|
||||
etcd_key, exc
|
||||
)
|
||||
"There was an error getting the key, {0}: {1}".format(etcd_key, exc)
|
||||
)
|
||||
|
|
93
salt/cache/localfs.py
vendored
93
salt/cache/localfs.py
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Cache data in filesystem.
|
||||
|
||||
.. versionadded:: 2016.11.0
|
||||
|
@ -9,102 +9,97 @@ require any configuration.
|
|||
|
||||
Expiration values can be set in the relevant config file (``/etc/salt/master`` for
|
||||
the master, ``/etc/salt/cloud`` for Salt Cloud, etc).
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import errno
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__func_alias__ = {'list_': 'list'}
|
||||
__func_alias__ = {"list_": "list"}
|
||||
|
||||
|
||||
def __cachedir(kwargs=None):
|
||||
if kwargs and 'cachedir' in kwargs:
|
||||
return kwargs['cachedir']
|
||||
return __opts__.get('cachedir', salt.syspaths.CACHE_DIR)
|
||||
if kwargs and "cachedir" in kwargs:
|
||||
return kwargs["cachedir"]
|
||||
return __opts__.get("cachedir", salt.syspaths.CACHE_DIR)
|
||||
|
||||
|
||||
def init_kwargs(kwargs):
|
||||
return {'cachedir': __cachedir(kwargs)}
|
||||
return {"cachedir": __cachedir(kwargs)}
|
||||
|
||||
|
||||
def get_storage_id(kwargs):
|
||||
return ('localfs', __cachedir(kwargs))
|
||||
return ("localfs", __cachedir(kwargs))
|
||||
|
||||
|
||||
def store(bank, key, data, cachedir):
|
||||
'''
|
||||
"""
|
||||
Store information in a file.
|
||||
'''
|
||||
"""
|
||||
base = os.path.join(cachedir, os.path.normpath(bank))
|
||||
try:
|
||||
os.makedirs(base)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise SaltCacheError(
|
||||
'The cache directory, {0}, could not be created: {1}'.format(
|
||||
base, exc
|
||||
)
|
||||
"The cache directory, {0}, could not be created: {1}".format(base, exc)
|
||||
)
|
||||
|
||||
outfile = os.path.join(base, '{0}.p'.format(key))
|
||||
outfile = os.path.join(base, "{0}.p".format(key))
|
||||
tmpfh, tmpfname = tempfile.mkstemp(dir=base)
|
||||
os.close(tmpfh)
|
||||
try:
|
||||
with salt.utils.files.fopen(tmpfname, 'w+b') as fh_:
|
||||
fh_.write(__context__['serial'].dumps(data))
|
||||
with salt.utils.files.fopen(tmpfname, "w+b") as fh_:
|
||||
fh_.write(__context__["serial"].dumps(data))
|
||||
# On Windows, os.rename will fail if the destination file exists.
|
||||
salt.utils.atomicfile.atomic_rename(tmpfname, outfile)
|
||||
except IOError as exc:
|
||||
raise SaltCacheError(
|
||||
'There was an error writing the cache file, {0}: {1}'.format(
|
||||
base, exc
|
||||
)
|
||||
"There was an error writing the cache file, {0}: {1}".format(base, exc)
|
||||
)
|
||||
|
||||
|
||||
def fetch(bank, key, cachedir):
|
||||
'''
|
||||
"""
|
||||
Fetch information from a file.
|
||||
'''
|
||||
"""
|
||||
inkey = False
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank), '{0}.p'.format(key))
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank), "{0}.p".format(key))
|
||||
if not os.path.isfile(key_file):
|
||||
# The bank includes the full filename, and the key is inside the file
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank) + '.p')
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank) + ".p")
|
||||
inkey = True
|
||||
|
||||
if not os.path.isfile(key_file):
|
||||
log.debug('Cache file "%s" does not exist', key_file)
|
||||
return {}
|
||||
try:
|
||||
with salt.utils.files.fopen(key_file, 'rb') as fh_:
|
||||
with salt.utils.files.fopen(key_file, "rb") as fh_:
|
||||
if inkey:
|
||||
return __context__['serial'].load(fh_)[key]
|
||||
return __context__["serial"].load(fh_)[key]
|
||||
else:
|
||||
return __context__['serial'].load(fh_)
|
||||
return __context__["serial"].load(fh_)
|
||||
except IOError as exc:
|
||||
raise SaltCacheError(
|
||||
'There was an error reading the cache file "{0}": {1}'.format(
|
||||
key_file, exc
|
||||
)
|
||||
'There was an error reading the cache file "{0}": {1}'.format(key_file, exc)
|
||||
)
|
||||
|
||||
|
||||
def updated(bank, key, cachedir):
|
||||
'''
|
||||
"""
|
||||
Return the epoch of the mtime for this cache file
|
||||
'''
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank), '{0}.p'.format(key))
|
||||
"""
|
||||
key_file = os.path.join(cachedir, os.path.normpath(bank), "{0}.p".format(key))
|
||||
if not os.path.isfile(key_file):
|
||||
log.warning('Cache file "%s" does not exist', key_file)
|
||||
return None
|
||||
|
@ -112,16 +107,14 @@ def updated(bank, key, cachedir):
|
|||
return int(os.path.getmtime(key_file))
|
||||
except IOError as exc:
|
||||
raise SaltCacheError(
|
||||
'There was an error reading the mtime for "{0}": {1}'.format(
|
||||
key_file, exc
|
||||
)
|
||||
'There was an error reading the mtime for "{0}": {1}'.format(key_file, exc)
|
||||
)
|
||||
|
||||
|
||||
def flush(bank, key=None, cachedir=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content.
|
||||
'''
|
||||
"""
|
||||
if cachedir is None:
|
||||
cachedir = __cachedir()
|
||||
|
||||
|
@ -132,23 +125,21 @@ def flush(bank, key=None, cachedir=None):
|
|||
return False
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target = os.path.join(cachedir, os.path.normpath(bank), '{0}.p'.format(key))
|
||||
target = os.path.join(cachedir, os.path.normpath(bank), "{0}.p".format(key))
|
||||
if not os.path.isfile(target):
|
||||
return False
|
||||
os.remove(target)
|
||||
except OSError as exc:
|
||||
raise SaltCacheError(
|
||||
'There was an error removing "{0}": {1}'.format(
|
||||
target, exc
|
||||
)
|
||||
'There was an error removing "{0}": {1}'.format(target, exc)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def list_(bank, cachedir):
|
||||
'''
|
||||
"""
|
||||
Return an iterable object containing all entries stored in the specified bank.
|
||||
'''
|
||||
"""
|
||||
base = os.path.join(cachedir, os.path.normpath(bank))
|
||||
if not os.path.isdir(base):
|
||||
return []
|
||||
|
@ -156,13 +147,11 @@ def list_(bank, cachedir):
|
|||
items = os.listdir(base)
|
||||
except OSError as exc:
|
||||
raise SaltCacheError(
|
||||
'There was an error accessing directory "{0}": {1}'.format(
|
||||
base, exc
|
||||
)
|
||||
'There was an error accessing directory "{0}": {1}'.format(base, exc)
|
||||
)
|
||||
ret = []
|
||||
for item in items:
|
||||
if item.endswith('.p'):
|
||||
if item.endswith(".p"):
|
||||
ret.append(item.rstrip(item[-2:]))
|
||||
else:
|
||||
ret.append(item)
|
||||
|
@ -170,12 +159,12 @@ def list_(bank, cachedir):
|
|||
|
||||
|
||||
def contains(bank, key, cachedir):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
"""
|
||||
if key is None:
|
||||
base = os.path.join(cachedir, os.path.normpath(bank))
|
||||
return os.path.isdir(base)
|
||||
else:
|
||||
keyfile = os.path.join(cachedir, os.path.normpath(bank), '{0}.p'.format(key))
|
||||
keyfile = os.path.join(cachedir, os.path.normpath(bank), "{0}.p".format(key))
|
||||
return os.path.isfile(keyfile)
|
||||
|
|
104
salt/cache/mysql_cache.py
vendored
104
salt/cache/mysql_cache.py
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Minion data cache plugin for MySQL database.
|
||||
|
||||
.. versionadded:: develop
|
||||
|
@ -42,10 +42,13 @@ value to ``mysql``:
|
|||
.. _`MySQL documentation`: https://github.com/coreos/mysql
|
||||
.. _`python-mysql documentation`: http://python-mysql.readthedocs.io/en/latest/
|
||||
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from time import sleep
|
||||
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
try:
|
||||
# Trying to import MySQLdb
|
||||
|
@ -57,6 +60,7 @@ except ImportError:
|
|||
try:
|
||||
# MySQLdb import failed, try to import PyMySQL
|
||||
import pymysql
|
||||
|
||||
pymysql.install_as_MySQLdb()
|
||||
import MySQLdb
|
||||
import MySQLdb.cursors
|
||||
|
@ -65,7 +69,6 @@ except ImportError:
|
|||
except ImportError:
|
||||
MySQLdb = None
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
_DEFAULT_DATABASE_NAME = "salt_cache"
|
||||
_DEFAULT_CACHE_TABLE_NAME = "cache"
|
||||
|
@ -78,24 +81,24 @@ _table_name = None
|
|||
|
||||
# Module properties
|
||||
|
||||
__virtualname__ = 'mysql'
|
||||
__func_alias__ = {'ls': 'list'}
|
||||
__virtualname__ = "mysql"
|
||||
__func_alias__ = {"ls": "list"}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
Confirm that a python mysql client is installed.
|
||||
'''
|
||||
return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
|
||||
"""
|
||||
return bool(MySQLdb), "No python mysql client installed." if MySQLdb is None else ""
|
||||
|
||||
|
||||
def run_query(conn, query, retries=3):
|
||||
'''
|
||||
"""
|
||||
Get a cursor and run a query. Reconnect up to `retries` times if
|
||||
needed.
|
||||
Returns: cursor, affected rows counter
|
||||
Raises: SaltCacheError, AttributeError, OperationalError
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
cur = conn.cursor()
|
||||
out = cur.execute(query)
|
||||
|
@ -119,16 +122,15 @@ def run_query(conn, query, retries=3):
|
|||
|
||||
|
||||
def _create_table():
|
||||
'''
|
||||
"""
|
||||
Create table if needed
|
||||
'''
|
||||
"""
|
||||
# Explicitely check if the table already exists as the library logs a
|
||||
# warning on CREATE TABLE
|
||||
query = """SELECT COUNT(TABLE_NAME) FROM information_schema.tables
|
||||
WHERE table_schema = '{0}' AND table_name = '{1}'""".format(
|
||||
_mysql_kwargs['db'],
|
||||
_table_name,
|
||||
)
|
||||
_mysql_kwargs["db"], _table_name,
|
||||
)
|
||||
cur, _ = run_query(client, query)
|
||||
r = cur.fetchone()
|
||||
cur.close()
|
||||
|
@ -140,7 +142,9 @@ def _create_table():
|
|||
etcd_key CHAR(255),
|
||||
data MEDIUMBLOB,
|
||||
PRIMARY KEY(bank, etcd_key)
|
||||
);""".format(_table_name)
|
||||
);""".format(
|
||||
_table_name
|
||||
)
|
||||
log.info("mysql_cache: creating table %s", _table_name)
|
||||
cur, _ = run_query(client, query)
|
||||
cur.close()
|
||||
|
@ -154,67 +158,67 @@ def _init_client():
|
|||
|
||||
global _mysql_kwargs, _table_name
|
||||
_mysql_kwargs = {
|
||||
'host': __opts__.get('mysql.host', '127.0.0.1'),
|
||||
'user': __opts__.get('mysql.user', None),
|
||||
'passwd': __opts__.get('mysql.password', None),
|
||||
'db': __opts__.get('mysql.database', _DEFAULT_DATABASE_NAME),
|
||||
'port': __opts__.get('mysql.port', 3306),
|
||||
'unix_socket': __opts__.get('mysql.unix_socket', None),
|
||||
'connect_timeout': __opts__.get('mysql.connect_timeout', None),
|
||||
'autocommit': True,
|
||||
"host": __opts__.get("mysql.host", "127.0.0.1"),
|
||||
"user": __opts__.get("mysql.user", None),
|
||||
"passwd": __opts__.get("mysql.password", None),
|
||||
"db": __opts__.get("mysql.database", _DEFAULT_DATABASE_NAME),
|
||||
"port": __opts__.get("mysql.port", 3306),
|
||||
"unix_socket": __opts__.get("mysql.unix_socket", None),
|
||||
"connect_timeout": __opts__.get("mysql.connect_timeout", None),
|
||||
"autocommit": True,
|
||||
}
|
||||
_table_name = __opts__.get('mysql.table_name', _table_name)
|
||||
_table_name = __opts__.get("mysql.table_name", _table_name)
|
||||
# TODO: handle SSL connection parameters
|
||||
|
||||
for k, v in _mysql_kwargs.items():
|
||||
if v is None:
|
||||
_mysql_kwargs.pop(k)
|
||||
kwargs_copy = _mysql_kwargs.copy()
|
||||
kwargs_copy['passwd'] = "<hidden>"
|
||||
kwargs_copy["passwd"] = "<hidden>"
|
||||
log.info("mysql_cache: Setting up client with params: %r", kwargs_copy)
|
||||
# The MySQL client is created later on by run_query
|
||||
_create_table()
|
||||
|
||||
|
||||
def store(bank, key, data):
|
||||
'''
|
||||
"""
|
||||
Store a key value.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
data = __context__['serial'].dumps(data)
|
||||
query = b"REPLACE INTO {0} (bank, etcd_key, data) values('{1}', '{2}', " \
|
||||
b"'{3}')".format(_table_name,
|
||||
bank,
|
||||
key,
|
||||
data)
|
||||
data = __context__["serial"].dumps(data)
|
||||
query = (
|
||||
b"REPLACE INTO {0} (bank, etcd_key, data) values('{1}', '{2}', "
|
||||
b"'{3}')".format(_table_name, bank, key, data)
|
||||
)
|
||||
|
||||
cur, cnt = run_query(client, query)
|
||||
cur.close()
|
||||
if cnt not in (1, 2):
|
||||
raise SaltCacheError(
|
||||
'Error storing {0} {1} returned {2}'.format(bank, key, cnt)
|
||||
"Error storing {0} {1} returned {2}".format(bank, key, cnt)
|
||||
)
|
||||
|
||||
|
||||
def fetch(bank, key):
|
||||
'''
|
||||
"""
|
||||
Fetch a key value.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
query = "SELECT data FROM {0} WHERE bank='{1}' AND etcd_key='{2}'".format(
|
||||
_table_name, bank, key)
|
||||
_table_name, bank, key
|
||||
)
|
||||
cur, _ = run_query(client, query)
|
||||
r = cur.fetchone()
|
||||
cur.close()
|
||||
if r is None:
|
||||
return {}
|
||||
return __context__['serial'].loads(r[0])
|
||||
return __context__["serial"].loads(r[0])
|
||||
|
||||
|
||||
def flush(bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
query = "DELETE FROM {0} WHERE bank='{1}'".format(_table_name, bank)
|
||||
if key is not None:
|
||||
|
@ -225,13 +229,12 @@ def flush(bank, key=None):
|
|||
|
||||
|
||||
def ls(bank):
|
||||
'''
|
||||
"""
|
||||
Return an iterable object containing all entries stored in the specified
|
||||
bank.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
query = "SELECT etcd_key FROM {0} WHERE bank='{1}'".format(
|
||||
_table_name, bank)
|
||||
query = "SELECT etcd_key FROM {0} WHERE bank='{1}'".format(_table_name, bank)
|
||||
cur, _ = run_query(client, query)
|
||||
out = [row[0] for row in cur.fetchall()]
|
||||
cur.close()
|
||||
|
@ -239,12 +242,13 @@ def ls(bank):
|
|||
|
||||
|
||||
def contains(bank, key):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
"""
|
||||
_init_client()
|
||||
query = "SELECT COUNT(data) FROM {0} WHERE bank='{1}' " \
|
||||
"AND etcd_key='{2}'".format(_table_name, bank, key)
|
||||
query = "SELECT COUNT(data) FROM {0} WHERE bank='{1}' " "AND etcd_key='{2}'".format(
|
||||
_table_name, bank, key
|
||||
)
|
||||
cur, _ = run_query(client, query)
|
||||
r = cur.fetchone()
|
||||
cur.close()
|
||||
|
|
246
salt/cache/redis_cache.py
vendored
246
salt/cache/redis_cache.py
vendored
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Redis
|
||||
=====
|
||||
|
||||
|
@ -134,45 +134,52 @@ Cluster Configuration Example:
|
|||
cache.redis.bank_keys_prefix: #BANKEYS
|
||||
cache.redis.key_prefix: #KEY
|
||||
cache.redis.separator: '@'
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import stdlib
|
||||
import logging
|
||||
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
# Import salt
|
||||
from salt.ext.six.moves import range
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import redis
|
||||
from redis.exceptions import ConnectionError as RedisConnectionError
|
||||
from redis.exceptions import ResponseError as RedisResponseError
|
||||
|
||||
HAS_REDIS = True
|
||||
except ImportError:
|
||||
HAS_REDIS = False
|
||||
|
||||
try:
|
||||
# pylint: disable=no-name-in-module
|
||||
from rediscluster import StrictRedisCluster
|
||||
|
||||
# pylint: enable=no-name-in-module
|
||||
|
||||
HAS_REDIS_CLUSTER = True
|
||||
except ImportError:
|
||||
HAS_REDIS_CLUSTER = False
|
||||
|
||||
# Import salt
|
||||
from salt.ext.six.moves import range
|
||||
from salt.exceptions import SaltCacheError
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# module properties
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
__virtualname__ = 'redis'
|
||||
__func_alias__ = {'list_': 'list'}
|
||||
__virtualname__ = "redis"
|
||||
__func_alias__ = {"list_": "list"}
|
||||
|
||||
log = logging.getLogger(__file__)
|
||||
|
||||
_BANK_PREFIX = '$BANK'
|
||||
_KEY_PREFIX = '$KEY'
|
||||
_BANK_KEYS_PREFIX = '$BANKEYS'
|
||||
_SEPARATOR = '_'
|
||||
_BANK_PREFIX = "$BANK"
|
||||
_KEY_PREFIX = "$KEY"
|
||||
_BANK_KEYS_PREFIX = "$BANKEYS"
|
||||
_SEPARATOR = "_"
|
||||
|
||||
REDIS_SERVER = None
|
||||
|
||||
|
@ -182,14 +189,14 @@ REDIS_SERVER = None
|
|||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
"""
|
||||
The redis library must be installed for this module to work.
|
||||
|
||||
The redis redis cluster library must be installed if cluster_mode is True
|
||||
'''
|
||||
"""
|
||||
if not HAS_REDIS:
|
||||
return (False, "Please install the python-redis package.")
|
||||
if not HAS_REDIS_CLUSTER and _get_redis_cache_opts()['cluster_mode']:
|
||||
if not HAS_REDIS_CLUSTER and _get_redis_cache_opts()["cluster_mode"]:
|
||||
return (False, "Please install the redis-py-cluster package.")
|
||||
return __virtualname__
|
||||
|
||||
|
@ -198,124 +205,125 @@ def __virtual__():
|
|||
# helper functions -- will not be exported
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def init_kwargs(kwargs):
|
||||
return {}
|
||||
|
||||
|
||||
def _get_redis_cache_opts():
|
||||
'''
|
||||
"""
|
||||
Return the Redis server connection details from the __opts__.
|
||||
'''
|
||||
"""
|
||||
return {
|
||||
'host': __opts__.get('cache.redis.host', 'localhost'),
|
||||
'port': __opts__.get('cache.redis.port', 6379),
|
||||
'unix_socket_path': __opts__.get('cache.redis.unix_socket_path', None),
|
||||
'db': __opts__.get('cache.redis.db', '0'),
|
||||
'password': __opts__.get('cache.redis.password', ''),
|
||||
'cluster_mode': __opts__.get('cache.redis.cluster_mode', False),
|
||||
'startup_nodes': __opts__.get('cache.redis.cluster.startup_nodes', {}),
|
||||
'skip_full_coverage_check': __opts__.get('cache.redis.cluster.skip_full_coverage_check', False),
|
||||
"host": __opts__.get("cache.redis.host", "localhost"),
|
||||
"port": __opts__.get("cache.redis.port", 6379),
|
||||
"unix_socket_path": __opts__.get("cache.redis.unix_socket_path", None),
|
||||
"db": __opts__.get("cache.redis.db", "0"),
|
||||
"password": __opts__.get("cache.redis.password", ""),
|
||||
"cluster_mode": __opts__.get("cache.redis.cluster_mode", False),
|
||||
"startup_nodes": __opts__.get("cache.redis.cluster.startup_nodes", {}),
|
||||
"skip_full_coverage_check": __opts__.get(
|
||||
"cache.redis.cluster.skip_full_coverage_check", False
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def _get_redis_server(opts=None):
|
||||
'''
|
||||
"""
|
||||
Return the Redis server instance.
|
||||
Caching the object instance.
|
||||
'''
|
||||
"""
|
||||
global REDIS_SERVER
|
||||
if REDIS_SERVER:
|
||||
return REDIS_SERVER
|
||||
if not opts:
|
||||
opts = _get_redis_cache_opts()
|
||||
|
||||
if opts['cluster_mode']:
|
||||
REDIS_SERVER = StrictRedisCluster(startup_nodes=opts['startup_nodes'],
|
||||
skip_full_coverage_check=opts['skip_full_coverage_check'])
|
||||
if opts["cluster_mode"]:
|
||||
REDIS_SERVER = StrictRedisCluster(
|
||||
startup_nodes=opts["startup_nodes"],
|
||||
skip_full_coverage_check=opts["skip_full_coverage_check"],
|
||||
)
|
||||
else:
|
||||
REDIS_SERVER = redis.StrictRedis(opts['host'],
|
||||
opts['port'],
|
||||
unix_socket_path=opts['unix_socket_path'],
|
||||
db=opts['db'],
|
||||
password=opts['password'])
|
||||
REDIS_SERVER = redis.StrictRedis(
|
||||
opts["host"],
|
||||
opts["port"],
|
||||
unix_socket_path=opts["unix_socket_path"],
|
||||
db=opts["db"],
|
||||
password=opts["password"],
|
||||
)
|
||||
return REDIS_SERVER
|
||||
|
||||
|
||||
def _get_redis_keys_opts():
|
||||
'''
|
||||
"""
|
||||
Build the key opts based on the user options.
|
||||
'''
|
||||
"""
|
||||
return {
|
||||
'bank_prefix': __opts__.get('cache.redis.bank_prefix', _BANK_PREFIX),
|
||||
'bank_keys_prefix': __opts__.get('cache.redis.bank_keys_prefix', _BANK_KEYS_PREFIX),
|
||||
'key_prefix': __opts__.get('cache.redis.key_prefix', _KEY_PREFIX),
|
||||
'separator': __opts__.get('cache.redis.separator', _SEPARATOR)
|
||||
"bank_prefix": __opts__.get("cache.redis.bank_prefix", _BANK_PREFIX),
|
||||
"bank_keys_prefix": __opts__.get(
|
||||
"cache.redis.bank_keys_prefix", _BANK_KEYS_PREFIX
|
||||
),
|
||||
"key_prefix": __opts__.get("cache.redis.key_prefix", _KEY_PREFIX),
|
||||
"separator": __opts__.get("cache.redis.separator", _SEPARATOR),
|
||||
}
|
||||
|
||||
|
||||
def _get_bank_redis_key(bank):
|
||||
'''
|
||||
"""
|
||||
Return the Redis key for the bank given the name.
|
||||
'''
|
||||
"""
|
||||
opts = _get_redis_keys_opts()
|
||||
return '{prefix}{separator}{bank}'.format(
|
||||
prefix=opts['bank_prefix'],
|
||||
separator=opts['separator'],
|
||||
bank=bank
|
||||
return "{prefix}{separator}{bank}".format(
|
||||
prefix=opts["bank_prefix"], separator=opts["separator"], bank=bank
|
||||
)
|
||||
|
||||
|
||||
def _get_key_redis_key(bank, key):
|
||||
'''
|
||||
"""
|
||||
Return the Redis key given the bank name and the key name.
|
||||
'''
|
||||
"""
|
||||
opts = _get_redis_keys_opts()
|
||||
return '{prefix}{separator}{bank}/{key}'.format(
|
||||
prefix=opts['key_prefix'],
|
||||
separator=opts['separator'],
|
||||
bank=bank,
|
||||
key=key
|
||||
return "{prefix}{separator}{bank}/{key}".format(
|
||||
prefix=opts["key_prefix"], separator=opts["separator"], bank=bank, key=key
|
||||
)
|
||||
|
||||
|
||||
def _get_bank_keys_redis_key(bank):
|
||||
'''
|
||||
"""
|
||||
Return the Redis key for the SET of keys under a certain bank, given the bank name.
|
||||
'''
|
||||
"""
|
||||
opts = _get_redis_keys_opts()
|
||||
return '{prefix}{separator}{bank}'.format(
|
||||
prefix=opts['bank_keys_prefix'],
|
||||
separator=opts['separator'],
|
||||
bank=bank
|
||||
return "{prefix}{separator}{bank}".format(
|
||||
prefix=opts["bank_keys_prefix"], separator=opts["separator"], bank=bank
|
||||
)
|
||||
|
||||
|
||||
def _build_bank_hier(bank, redis_pipe):
|
||||
'''
|
||||
"""
|
||||
Build the bank hierarchy from the root of the tree.
|
||||
If already exists, it won't rewrite.
|
||||
It's using the Redis pipeline,
|
||||
so there will be only one interaction with the remote server.
|
||||
'''
|
||||
bank_list = bank.split('/')
|
||||
"""
|
||||
bank_list = bank.split("/")
|
||||
parent_bank_path = bank_list[0]
|
||||
for bank_name in bank_list[1:]:
|
||||
prev_bank_redis_key = _get_bank_redis_key(parent_bank_path)
|
||||
redis_pipe.sadd(prev_bank_redis_key, bank_name)
|
||||
log.debug('Adding %s to %s', bank_name, prev_bank_redis_key)
|
||||
parent_bank_path = '{curr_path}/{bank_name}'.format(
|
||||
curr_path=parent_bank_path,
|
||||
bank_name=bank_name
|
||||
log.debug("Adding %s to %s", bank_name, prev_bank_redis_key)
|
||||
parent_bank_path = "{curr_path}/{bank_name}".format(
|
||||
curr_path=parent_bank_path, bank_name=bank_name
|
||||
) # this becomes the parent of the next
|
||||
return True
|
||||
|
||||
|
||||
def _get_banks_to_remove(redis_server, bank, path=''):
|
||||
'''
|
||||
def _get_banks_to_remove(redis_server, bank, path=""):
|
||||
"""
|
||||
A simple tree tarversal algorithm that builds the list of banks to remove,
|
||||
starting from an arbitrary node in the tree.
|
||||
'''
|
||||
current_path = bank if not path else '{path}/{bank}'.format(path=path, bank=bank)
|
||||
"""
|
||||
current_path = bank if not path else "{path}/{bank}".format(path=path, bank=bank)
|
||||
bank_paths_to_remove = [current_path]
|
||||
# as you got here, you'll be removed
|
||||
|
||||
|
@ -324,60 +332,65 @@ def _get_banks_to_remove(redis_server, bank, path=''):
|
|||
if not child_banks:
|
||||
return bank_paths_to_remove # this bank does not have any child banks so we stop here
|
||||
for child_bank in child_banks:
|
||||
bank_paths_to_remove.extend(_get_banks_to_remove(redis_server, child_bank, path=current_path))
|
||||
bank_paths_to_remove.extend(
|
||||
_get_banks_to_remove(redis_server, child_bank, path=current_path)
|
||||
)
|
||||
# go one more level deeper
|
||||
# and also remove the children of this child bank (if any)
|
||||
return bank_paths_to_remove
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# cache subsystem functions
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def store(bank, key, data):
|
||||
'''
|
||||
"""
|
||||
Store the data in a Redis key.
|
||||
'''
|
||||
"""
|
||||
redis_server = _get_redis_server()
|
||||
redis_pipe = redis_server.pipeline()
|
||||
redis_key = _get_key_redis_key(bank, key)
|
||||
redis_bank_keys = _get_bank_keys_redis_key(bank)
|
||||
try:
|
||||
_build_bank_hier(bank, redis_pipe)
|
||||
value = __context__['serial'].dumps(data)
|
||||
value = __context__["serial"].dumps(data)
|
||||
redis_pipe.set(redis_key, value)
|
||||
log.debug('Setting the value for %s under %s (%s)', key, bank, redis_key)
|
||||
log.debug("Setting the value for %s under %s (%s)", key, bank, redis_key)
|
||||
redis_pipe.sadd(redis_bank_keys, key)
|
||||
log.debug('Adding %s to %s', key, redis_bank_keys)
|
||||
log.debug("Adding %s to %s", key, redis_bank_keys)
|
||||
redis_pipe.execute()
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot set the Redis cache key {rkey}: {rerr}'.format(rkey=redis_key,
|
||||
rerr=rerr)
|
||||
mesg = "Cannot set the Redis cache key {rkey}: {rerr}".format(
|
||||
rkey=redis_key, rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
|
||||
|
||||
def fetch(bank, key):
|
||||
'''
|
||||
"""
|
||||
Fetch data from the Redis cache.
|
||||
'''
|
||||
"""
|
||||
redis_server = _get_redis_server()
|
||||
redis_key = _get_key_redis_key(bank, key)
|
||||
redis_value = None
|
||||
try:
|
||||
redis_value = redis_server.get(redis_key)
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot fetch the Redis cache key {rkey}: {rerr}'.format(rkey=redis_key,
|
||||
rerr=rerr)
|
||||
mesg = "Cannot fetch the Redis cache key {rkey}: {rerr}".format(
|
||||
rkey=redis_key, rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
if redis_value is None:
|
||||
return {}
|
||||
return __context__['serial'].loads(redis_value)
|
||||
return __context__["serial"].loads(redis_value)
|
||||
|
||||
|
||||
def flush(bank, key=None):
|
||||
'''
|
||||
"""
|
||||
Remove the key from the cache bank with all the key content. If no key is specified, remove
|
||||
the entire bank with all keys and sub-banks inside.
|
||||
This function is using the Redis pipelining for best performance.
|
||||
|
@ -395,7 +408,7 @@ def flush(bank, key=None):
|
|||
(using the ``register_script`` feature) and call it whenever we flush.
|
||||
This script would only need to build this sub-tree causing problems. It can be added later and the behaviour
|
||||
should not change as the user needs to explicitly allow Salt inject scripts in their Redis instance.
|
||||
'''
|
||||
"""
|
||||
redis_server = _get_redis_server()
|
||||
redis_pipe = redis_server.pipeline()
|
||||
if key is None:
|
||||
|
@ -407,19 +420,21 @@ def flush(bank, key=None):
|
|||
# Redis key of the SET that stores the bank keys
|
||||
redis_pipe.smembers(bank_keys_redis_key) # fetch these keys
|
||||
log.debug(
|
||||
'Fetching the keys of the %s bank (%s)',
|
||||
bank_to_remove, bank_keys_redis_key
|
||||
"Fetching the keys of the %s bank (%s)",
|
||||
bank_to_remove,
|
||||
bank_keys_redis_key,
|
||||
)
|
||||
try:
|
||||
log.debug('Executing the pipe...')
|
||||
subtree_keys = redis_pipe.execute() # here are the keys under these banks to be removed
|
||||
log.debug("Executing the pipe...")
|
||||
subtree_keys = (
|
||||
redis_pipe.execute()
|
||||
) # here are the keys under these banks to be removed
|
||||
# this retunrs a list of sets, e.g.:
|
||||
# [set([]), set(['my-key']), set(['my-other-key', 'yet-another-key'])]
|
||||
# one set corresponding to a bank
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot retrieve the keys under these cache banks: {rbanks}: {rerr}'.format(
|
||||
rbanks=', '.join(bank_paths_to_remove),
|
||||
rerr=rerr
|
||||
mesg = "Cannot retrieve the keys under these cache banks: {rbanks}: {rerr}".format(
|
||||
rbanks=", ".join(bank_paths_to_remove), rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
|
@ -432,56 +447,60 @@ def flush(bank, key=None):
|
|||
redis_key = _get_key_redis_key(bank_path, key)
|
||||
redis_pipe.delete(redis_key) # kill 'em all!
|
||||
log.debug(
|
||||
'Removing the key %s under the %s bank (%s)',
|
||||
key, bank_path, redis_key
|
||||
"Removing the key %s under the %s bank (%s)",
|
||||
key,
|
||||
bank_path,
|
||||
redis_key,
|
||||
)
|
||||
bank_keys_redis_key = _get_bank_keys_redis_key(bank_path)
|
||||
redis_pipe.delete(bank_keys_redis_key)
|
||||
log.debug(
|
||||
'Removing the bank-keys key for the %s bank (%s)',
|
||||
bank_path, bank_keys_redis_key
|
||||
"Removing the bank-keys key for the %s bank (%s)",
|
||||
bank_path,
|
||||
bank_keys_redis_key,
|
||||
)
|
||||
# delete the Redis key where are stored
|
||||
# the list of keys under this bank
|
||||
bank_key = _get_bank_redis_key(bank_path)
|
||||
redis_pipe.delete(bank_key)
|
||||
log.debug('Removing the %s bank (%s)', bank_path, bank_key)
|
||||
log.debug("Removing the %s bank (%s)", bank_path, bank_key)
|
||||
# delete the bank key itself
|
||||
else:
|
||||
redis_key = _get_key_redis_key(bank, key)
|
||||
redis_pipe.delete(redis_key) # delete the key cached
|
||||
log.debug(
|
||||
'Removing the key %s under the %s bank (%s)',
|
||||
key, bank, redis_key
|
||||
)
|
||||
log.debug("Removing the key %s under the %s bank (%s)", key, bank, redis_key)
|
||||
bank_keys_redis_key = _get_bank_keys_redis_key(bank)
|
||||
redis_pipe.srem(bank_keys_redis_key, key)
|
||||
log.debug(
|
||||
'De-referencing the key %s from the bank-keys of the %s bank (%s)',
|
||||
key, bank, bank_keys_redis_key
|
||||
"De-referencing the key %s from the bank-keys of the %s bank (%s)",
|
||||
key,
|
||||
bank,
|
||||
bank_keys_redis_key,
|
||||
)
|
||||
# but also its reference from $BANKEYS list
|
||||
try:
|
||||
redis_pipe.execute() # Fluuuush
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot flush the Redis cache bank {rbank}: {rerr}'.format(rbank=bank,
|
||||
rerr=rerr)
|
||||
mesg = "Cannot flush the Redis cache bank {rbank}: {rerr}".format(
|
||||
rbank=bank, rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
return True
|
||||
|
||||
|
||||
def list_(bank):
|
||||
'''
|
||||
"""
|
||||
Lists entries stored in the specified bank.
|
||||
'''
|
||||
"""
|
||||
redis_server = _get_redis_server()
|
||||
bank_redis_key = _get_bank_redis_key(bank)
|
||||
try:
|
||||
banks = redis_server.smembers(bank_redis_key)
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot list the Redis cache key {rkey}: {rerr}'.format(rkey=bank_redis_key,
|
||||
rerr=rerr)
|
||||
mesg = "Cannot list the Redis cache key {rkey}: {rerr}".format(
|
||||
rkey=bank_redis_key, rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
if not banks:
|
||||
|
@ -490,15 +509,16 @@ def list_(bank):
|
|||
|
||||
|
||||
def contains(bank, key):
|
||||
'''
|
||||
"""
|
||||
Checks if the specified bank contains the specified key.
|
||||
'''
|
||||
"""
|
||||
redis_server = _get_redis_server()
|
||||
bank_redis_key = _get_bank_redis_key(bank)
|
||||
try:
|
||||
return redis_server.sismember(bank_redis_key, key)
|
||||
except (RedisConnectionError, RedisResponseError) as rerr:
|
||||
mesg = 'Cannot retrieve the Redis cache key {rkey}: {rerr}'.format(rkey=bank_redis_key,
|
||||
rerr=rerr)
|
||||
mesg = "Cannot retrieve the Redis cache key {rkey}: {rerr}".format(
|
||||
rkey=bank_redis_key, rerr=rerr
|
||||
)
|
||||
log.error(mesg)
|
||||
raise SaltCacheError(mesg)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The management of salt command line utilities are stored in here
|
||||
'''
|
||||
"""
|
||||
|
|
|
@ -1,60 +1,61 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt.cli.api
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Salt's api cli parser.
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.client.netapi
|
||||
import salt.utils.files
|
||||
import salt.utils.parsers as parsers
|
||||
from salt.utils.verify import check_user, verify_log_files, verify_log
|
||||
from salt.utils.verify import check_user, verify_log, verify_log_files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SaltAPI(parsers.SaltAPIParser):
|
||||
'''
|
||||
"""
|
||||
The cli parser object used to fire up the salt api system.
|
||||
'''
|
||||
"""
|
||||
|
||||
def prepare(self):
|
||||
'''
|
||||
"""
|
||||
Run the preparation sequence required to start a salt-api daemon.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
||||
super(YourSubClass, self).prepare()
|
||||
'''
|
||||
"""
|
||||
super(SaltAPI, self).prepare()
|
||||
|
||||
try:
|
||||
if self.config['verify_env']:
|
||||
logfile = self.config['log_file']
|
||||
if self.config["verify_env"]:
|
||||
logfile = self.config["log_file"]
|
||||
if logfile is not None:
|
||||
# Logfile is not using Syslog, verify
|
||||
with salt.utils.files.set_umask(0o027):
|
||||
verify_log_files([logfile], self.config['user'])
|
||||
verify_log_files([logfile], self.config["user"])
|
||||
except OSError as err:
|
||||
log.exception('Failed to prepare salt environment')
|
||||
log.exception("Failed to prepare salt environment")
|
||||
self.shutdown(err.errno)
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
log.info('Setting up the Salt API')
|
||||
log.info("Setting up the Salt API")
|
||||
self.api = salt.client.netapi.NetapiClient(self.config)
|
||||
self.daemonize_if_required()
|
||||
self.set_pidfile()
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
Start the actual master.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
@ -62,18 +63,18 @@ class SaltAPI(parsers.SaltAPIParser):
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
super(SaltAPI, self).start()
|
||||
if check_user(self.config['user']):
|
||||
log.info('The salt-api is starting up')
|
||||
if check_user(self.config["user"]):
|
||||
log.info("The salt-api is starting up")
|
||||
self.api.run()
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
'''
|
||||
"""
|
||||
If sub-classed, run any shutdown operations on this method.
|
||||
'''
|
||||
log.info('The salt-api is shutting down..')
|
||||
msg = 'The salt-api is shutdown. '
|
||||
"""
|
||||
log.info("The salt-api is shutting down..")
|
||||
msg = "The salt-api is shutdown. "
|
||||
if exitmsg is not None:
|
||||
exitmsg = msg + exitmsg
|
||||
else:
|
||||
|
|
|
@ -1,71 +1,76 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Execute batch runs
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||
import logging
|
||||
import math
|
||||
import time
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import salt.client
|
||||
import salt.exceptions
|
||||
import salt.output
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.stringutils
|
||||
import salt.client
|
||||
import salt.output
|
||||
import salt.exceptions
|
||||
|
||||
# Import 3rd-party libs
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import range
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Batch(object):
|
||||
'''
|
||||
"""
|
||||
Manage the execution of batch runs
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, eauth=None, quiet=False, parser=None):
|
||||
self.opts = opts
|
||||
self.eauth = eauth if eauth else {}
|
||||
self.pub_kwargs = eauth if eauth else {}
|
||||
self.quiet = quiet
|
||||
self.local = salt.client.get_local_client(opts['conf_file'])
|
||||
self.local = salt.client.get_local_client(opts["conf_file"])
|
||||
self.minions, self.ping_gen, self.down_minions = self.__gather_minions()
|
||||
self.options = parser
|
||||
|
||||
def __gather_minions(self):
|
||||
'''
|
||||
"""
|
||||
Return a list of minions to use for the batch run
|
||||
'''
|
||||
args = [self.opts['tgt'],
|
||||
'test.ping',
|
||||
[],
|
||||
self.opts['timeout'],
|
||||
]
|
||||
"""
|
||||
args = [
|
||||
self.opts["tgt"],
|
||||
"test.ping",
|
||||
[],
|
||||
self.opts["timeout"],
|
||||
]
|
||||
|
||||
selected_target_option = self.opts.get('selected_target_option', None)
|
||||
selected_target_option = self.opts.get("selected_target_option", None)
|
||||
if selected_target_option is not None:
|
||||
args.append(selected_target_option)
|
||||
else:
|
||||
args.append(self.opts.get('tgt_type', 'glob'))
|
||||
args.append(self.opts.get("tgt_type", "glob"))
|
||||
|
||||
self.pub_kwargs['yield_pub_data'] = True
|
||||
ping_gen = self.local.cmd_iter(*args,
|
||||
gather_job_timeout=self.opts['gather_job_timeout'],
|
||||
**self.pub_kwargs)
|
||||
self.pub_kwargs["yield_pub_data"] = True
|
||||
ping_gen = self.local.cmd_iter(
|
||||
*args, gather_job_timeout=self.opts["gather_job_timeout"], **self.pub_kwargs
|
||||
)
|
||||
|
||||
# Broadcast to targets
|
||||
fret = set()
|
||||
nret = set()
|
||||
for ret in ping_gen:
|
||||
if ('minions' and 'jid') in ret:
|
||||
for minion in ret['minions']:
|
||||
if ("minions" and "jid") in ret:
|
||||
for minion in ret["minions"]:
|
||||
nret.add(minion)
|
||||
continue
|
||||
else:
|
||||
|
@ -73,30 +78,37 @@ class Batch(object):
|
|||
m = next(six.iterkeys(ret))
|
||||
except StopIteration:
|
||||
if not self.quiet:
|
||||
salt.utils.stringutils.print_cli('No minions matched the target.')
|
||||
salt.utils.stringutils.print_cli(
|
||||
"No minions matched the target."
|
||||
)
|
||||
break
|
||||
if m is not None:
|
||||
fret.add(m)
|
||||
return (list(fret), ping_gen, nret.difference(fret))
|
||||
|
||||
def get_bnum(self):
|
||||
'''
|
||||
"""
|
||||
Return the active number of minions to maintain
|
||||
'''
|
||||
"""
|
||||
partition = lambda x: float(x) / 100.0 * len(self.minions)
|
||||
try:
|
||||
if isinstance(self.opts['batch'], six.string_types) and '%' in self.opts['batch']:
|
||||
res = partition(float(self.opts['batch'].strip('%')))
|
||||
if (
|
||||
isinstance(self.opts["batch"], six.string_types)
|
||||
and "%" in self.opts["batch"]
|
||||
):
|
||||
res = partition(float(self.opts["batch"].strip("%")))
|
||||
if res < 1:
|
||||
return int(math.ceil(res))
|
||||
else:
|
||||
return int(res)
|
||||
else:
|
||||
return int(self.opts['batch'])
|
||||
return int(self.opts["batch"])
|
||||
except ValueError:
|
||||
if not self.quiet:
|
||||
salt.utils.stringutils.print_cli('Invalid batch data sent: {0}\nData must be in the '
|
||||
'form of %10, 10% or 3'.format(self.opts['batch']))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Invalid batch data sent: {0}\nData must be in the "
|
||||
"form of %10, 10% or 3".format(self.opts["batch"])
|
||||
)
|
||||
|
||||
def __update_wait(self, wait):
|
||||
now = datetime.now()
|
||||
|
@ -107,15 +119,16 @@ class Batch(object):
|
|||
del wait[:i]
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute the batch run
|
||||
'''
|
||||
args = [[],
|
||||
self.opts['fun'],
|
||||
self.opts['arg'],
|
||||
self.opts['timeout'],
|
||||
'list',
|
||||
]
|
||||
"""
|
||||
args = [
|
||||
[],
|
||||
self.opts["fun"],
|
||||
self.opts["arg"],
|
||||
self.opts["timeout"],
|
||||
"list",
|
||||
]
|
||||
bnum = self.get_bnum()
|
||||
# No targets to run
|
||||
if not self.minions:
|
||||
|
@ -125,7 +138,7 @@ class Batch(object):
|
|||
ret = {}
|
||||
iters = []
|
||||
# wait the specified time before decide a job is actually done
|
||||
bwait = self.opts.get('batch_wait', 0)
|
||||
bwait = self.opts.get("batch_wait", 0)
|
||||
wait = []
|
||||
|
||||
if self.options:
|
||||
|
@ -147,7 +160,11 @@ class Batch(object):
|
|||
# We already know some minions didn't respond to the ping, so inform
|
||||
# the user we won't be attempting to run a job on them
|
||||
for down_minion in self.down_minions:
|
||||
salt.utils.stringutils.print_cli('Minion {0} did not respond. No job will be sent.'.format(down_minion))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Minion {0} did not respond. No job will be sent.".format(
|
||||
down_minion
|
||||
)
|
||||
)
|
||||
|
||||
# Iterate while we still have things to execute
|
||||
while len(ret) < len(self.minions):
|
||||
|
@ -172,22 +189,25 @@ class Batch(object):
|
|||
|
||||
if next_:
|
||||
if not self.quiet:
|
||||
salt.utils.stringutils.print_cli('\nExecuting run on {0}\n'.format(sorted(next_)))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"\nExecuting run on {0}\n".format(sorted(next_))
|
||||
)
|
||||
# create a new iterator for this batch of minions
|
||||
new_iter = self.local.cmd_iter_no_block(
|
||||
*args,
|
||||
raw=self.opts.get('raw', False),
|
||||
ret=self.opts.get('return', ''),
|
||||
show_jid=show_jid,
|
||||
verbose=show_verbose,
|
||||
gather_job_timeout=self.opts['gather_job_timeout'],
|
||||
**self.eauth)
|
||||
*args,
|
||||
raw=self.opts.get("raw", False),
|
||||
ret=self.opts.get("return", ""),
|
||||
show_jid=show_jid,
|
||||
verbose=show_verbose,
|
||||
gather_job_timeout=self.opts["gather_job_timeout"],
|
||||
**self.eauth
|
||||
)
|
||||
# add it to our iterators and to the minion_tracker
|
||||
iters.append(new_iter)
|
||||
minion_tracker[new_iter] = {}
|
||||
# every iterator added is 'active' and has its set of minions
|
||||
minion_tracker[new_iter]['minions'] = next_
|
||||
minion_tracker[new_iter]['active'] = True
|
||||
minion_tracker[new_iter]["minions"] = next_
|
||||
minion_tracker[new_iter]["active"] = True
|
||||
|
||||
else:
|
||||
time.sleep(0.02)
|
||||
|
@ -214,19 +234,29 @@ class Batch(object):
|
|||
if ncnt > 5:
|
||||
break
|
||||
continue
|
||||
if self.opts.get('raw'):
|
||||
parts.update({part['data']['id']: part})
|
||||
if part['data']['id'] in minion_tracker[queue]['minions']:
|
||||
minion_tracker[queue]['minions'].remove(part['data']['id'])
|
||||
if self.opts.get("raw"):
|
||||
parts.update({part["data"]["id"]: part})
|
||||
if part["data"]["id"] in minion_tracker[queue]["minions"]:
|
||||
minion_tracker[queue]["minions"].remove(
|
||||
part["data"]["id"]
|
||||
)
|
||||
else:
|
||||
salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(part['id']))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"minion {0} was already deleted from tracker, probably a duplicate key".format(
|
||||
part["id"]
|
||||
)
|
||||
)
|
||||
else:
|
||||
parts.update(part)
|
||||
for id in part:
|
||||
if id in minion_tracker[queue]['minions']:
|
||||
minion_tracker[queue]['minions'].remove(id)
|
||||
if id in minion_tracker[queue]["minions"]:
|
||||
minion_tracker[queue]["minions"].remove(id)
|
||||
else:
|
||||
salt.utils.stringutils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(id))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"minion {0} was already deleted from tracker, probably a duplicate key".format(
|
||||
id
|
||||
)
|
||||
)
|
||||
except StopIteration:
|
||||
# if a iterator is done:
|
||||
# - set it to inactive
|
||||
|
@ -234,14 +264,14 @@ class Batch(object):
|
|||
|
||||
# check if the tracker contains the iterator
|
||||
if queue in minion_tracker:
|
||||
minion_tracker[queue]['active'] = False
|
||||
minion_tracker[queue]["active"] = False
|
||||
|
||||
# add all minions that belong to this iterator and
|
||||
# that have not responded to parts{} with an empty response
|
||||
for minion in minion_tracker[queue]['minions']:
|
||||
for minion in minion_tracker[queue]["minions"]:
|
||||
if minion not in parts:
|
||||
parts[minion] = {}
|
||||
parts[minion]['ret'] = {}
|
||||
parts[minion]["ret"] = {}
|
||||
|
||||
for minion, data in six.iteritems(parts):
|
||||
if minion in active:
|
||||
|
@ -250,45 +280,47 @@ class Batch(object):
|
|||
wait.append(datetime.now() + timedelta(seconds=bwait))
|
||||
# Munge retcode into return data
|
||||
failhard = False
|
||||
if 'retcode' in data and isinstance(data['ret'], dict) and 'retcode' not in data['ret']:
|
||||
data['ret']['retcode'] = data['retcode']
|
||||
if self.opts.get('failhard') and data['ret']['retcode'] > 0:
|
||||
if (
|
||||
"retcode" in data
|
||||
and isinstance(data["ret"], dict)
|
||||
and "retcode" not in data["ret"]
|
||||
):
|
||||
data["ret"]["retcode"] = data["retcode"]
|
||||
if self.opts.get("failhard") and data["ret"]["retcode"] > 0:
|
||||
failhard = True
|
||||
else:
|
||||
if self.opts.get('failhard') and data['retcode'] > 0:
|
||||
if self.opts.get("failhard") and data["retcode"] > 0:
|
||||
failhard = True
|
||||
|
||||
if self.opts.get('raw'):
|
||||
if self.opts.get("raw"):
|
||||
ret[minion] = data
|
||||
yield data
|
||||
else:
|
||||
ret[minion] = data['ret']
|
||||
yield {minion: data['ret']}
|
||||
ret[minion] = data["ret"]
|
||||
yield {minion: data["ret"]}
|
||||
if not self.quiet:
|
||||
ret[minion] = data['ret']
|
||||
data[minion] = data.pop('ret')
|
||||
if 'out' in data:
|
||||
out = data.pop('out')
|
||||
ret[minion] = data["ret"]
|
||||
data[minion] = data.pop("ret")
|
||||
if "out" in data:
|
||||
out = data.pop("out")
|
||||
else:
|
||||
out = None
|
||||
salt.output.display_output(
|
||||
data,
|
||||
out,
|
||||
self.opts)
|
||||
salt.output.display_output(data, out, self.opts)
|
||||
if failhard:
|
||||
log.error(
|
||||
'Minion %s returned with non-zero exit code. '
|
||||
'Batch run stopped due to failhard', minion
|
||||
"Minion %s returned with non-zero exit code. "
|
||||
"Batch run stopped due to failhard",
|
||||
minion,
|
||||
)
|
||||
raise StopIteration
|
||||
|
||||
# remove inactive iterators from the iters list
|
||||
for queue in minion_tracker:
|
||||
# only remove inactive queues
|
||||
if not minion_tracker[queue]['active'] and queue in iters:
|
||||
if not minion_tracker[queue]["active"] and queue in iters:
|
||||
iters.remove(queue)
|
||||
# also remove the iterator's minions from the active list
|
||||
for minion in minion_tracker[queue]['minions']:
|
||||
for minion in minion_tracker[queue]["minions"]:
|
||||
if minion in active:
|
||||
active.remove(minion)
|
||||
if bwait:
|
||||
|
|
|
@ -1,44 +1,45 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import salt.utils.parsers
|
||||
from salt.utils.verify import verify_log
|
||||
from salt.config import _expand_glob_path
|
||||
import salt.cli.caller
|
||||
import salt.defaults.exitcodes
|
||||
import salt.utils.parsers
|
||||
from salt.config import _expand_glob_path
|
||||
from salt.utils.verify import verify_log
|
||||
|
||||
|
||||
class SaltCall(salt.utils.parsers.SaltCallOptionParser):
|
||||
'''
|
||||
"""
|
||||
Used to locally execute a salt command
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute the salt call!
|
||||
'''
|
||||
"""
|
||||
self.parse_args()
|
||||
|
||||
if self.options.file_root:
|
||||
# check if the argument is pointing to a file on disk
|
||||
file_root = os.path.abspath(self.options.file_root)
|
||||
self.config['file_roots'] = {'base': _expand_glob_path([file_root])}
|
||||
self.config["file_roots"] = {"base": _expand_glob_path([file_root])}
|
||||
|
||||
if self.options.pillar_root:
|
||||
# check if the argument is pointing to a file on disk
|
||||
pillar_root = os.path.abspath(self.options.pillar_root)
|
||||
self.config['pillar_roots'] = {'base': _expand_glob_path([pillar_root])}
|
||||
self.config["pillar_roots"] = {"base": _expand_glob_path([pillar_root])}
|
||||
|
||||
if self.options.states_dir:
|
||||
# check if the argument is pointing to a file on disk
|
||||
states_dir = os.path.abspath(self.options.states_dir)
|
||||
self.config['states_dirs'] = [states_dir]
|
||||
self.config["states_dirs"] = [states_dir]
|
||||
|
||||
if self.options.local:
|
||||
self.config['file_client'] = 'local'
|
||||
self.config["file_client"] = "local"
|
||||
if self.options.master:
|
||||
self.config['master'] = self.options.master
|
||||
self.config["master"] = self.options.master
|
||||
|
||||
# Setup file logging!
|
||||
self.setup_logfile_logger()
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The caller module is used as a front-end to manage direct calls to the salt
|
||||
minion modules.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
# Import salt libs
|
||||
import salt
|
||||
import salt.defaults.exitcodes
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
import salt.output
|
||||
|
@ -26,62 +27,63 @@ import salt.utils.jid
|
|||
import salt.utils.minion
|
||||
import salt.utils.profile
|
||||
import salt.utils.stringutils
|
||||
import salt.defaults.exitcodes
|
||||
from salt.log import LOG_LEVELS
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
# Custom exceptions
|
||||
from salt.exceptions import (
|
||||
SaltClientError,
|
||||
CommandNotFoundError,
|
||||
CommandExecutionError,
|
||||
CommandNotFoundError,
|
||||
SaltClientError,
|
||||
SaltInvocationError,
|
||||
)
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
from salt.log import LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Caller(object):
|
||||
'''
|
||||
"""
|
||||
Factory class to create salt-call callers for different transport
|
||||
'''
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def factory(opts, **kwargs):
|
||||
# Default to ZeroMQ for now
|
||||
ttype = 'zeromq'
|
||||
ttype = "zeromq"
|
||||
|
||||
# determine the ttype
|
||||
if 'transport' in opts:
|
||||
ttype = opts['transport']
|
||||
elif 'transport' in opts.get('pillar', {}).get('master', {}):
|
||||
ttype = opts['pillar']['master']['transport']
|
||||
if "transport" in opts:
|
||||
ttype = opts["transport"]
|
||||
elif "transport" in opts.get("pillar", {}).get("master", {}):
|
||||
ttype = opts["pillar"]["master"]["transport"]
|
||||
|
||||
# switch on available ttypes
|
||||
if ttype in ('zeromq', 'tcp', 'detect'):
|
||||
if ttype in ("zeromq", "tcp", "detect"):
|
||||
return ZeroMQCaller(opts, **kwargs)
|
||||
else:
|
||||
raise Exception('Callers are only defined for ZeroMQ and TCP')
|
||||
raise Exception("Callers are only defined for ZeroMQ and TCP")
|
||||
# return NewKindOfCaller(opts, **kwargs)
|
||||
|
||||
|
||||
class BaseCaller(object):
|
||||
'''
|
||||
"""
|
||||
Base class for caller transports
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts):
|
||||
'''
|
||||
"""
|
||||
Pass in command line opts
|
||||
'''
|
||||
"""
|
||||
self.opts = opts
|
||||
self.opts['caller'] = True
|
||||
self.opts["caller"] = True
|
||||
self.serial = salt.payload.Serial(self.opts)
|
||||
# Handle this here so other deeper code which might
|
||||
# be imported as part of the salt api doesn't do a
|
||||
# nasty sys.exit() and tick off our developer users
|
||||
try:
|
||||
if self.opts.get('proxyid'):
|
||||
if self.opts.get("proxyid"):
|
||||
self.minion = salt.minion.SProxyMinion(opts)
|
||||
else:
|
||||
self.minion = salt.minion.SMinion(opts)
|
||||
|
@ -89,30 +91,30 @@ class BaseCaller(object):
|
|||
raise SystemExit(six.text_type(exc))
|
||||
|
||||
def print_docs(self):
|
||||
'''
|
||||
"""
|
||||
Pick up the documentation for all of the modules and print it out.
|
||||
'''
|
||||
"""
|
||||
docs = {}
|
||||
for name, func in six.iteritems(self.minion.functions):
|
||||
if name not in docs:
|
||||
if func.__doc__:
|
||||
docs[name] = func.__doc__
|
||||
for name in sorted(docs):
|
||||
if name.startswith(self.opts.get('fun', '')):
|
||||
salt.utils.stringutils.print_cli('{0}:\n{1}\n'.format(name, docs[name]))
|
||||
if name.startswith(self.opts.get("fun", "")):
|
||||
salt.utils.stringutils.print_cli("{0}:\n{1}\n".format(name, docs[name]))
|
||||
|
||||
def print_grains(self):
|
||||
'''
|
||||
"""
|
||||
Print out the grains
|
||||
'''
|
||||
grains = self.minion.opts.get('grains') or salt.loader.grains(self.opts)
|
||||
salt.output.display_output({'local': grains}, 'grains', self.opts)
|
||||
"""
|
||||
grains = self.minion.opts.get("grains") or salt.loader.grains(self.opts)
|
||||
salt.output.display_output({"local": grains}, "grains", self.opts)
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute the salt call logic
|
||||
'''
|
||||
profiling_enabled = self.opts.get('profiling_enabled', False)
|
||||
"""
|
||||
profiling_enabled = self.opts.get("profiling_enabled", False)
|
||||
try:
|
||||
pr = salt.utils.profile.activate_profile(profiling_enabled)
|
||||
try:
|
||||
|
@ -120,116 +122,128 @@ class BaseCaller(object):
|
|||
finally:
|
||||
salt.utils.profile.output_profile(
|
||||
pr,
|
||||
stats_path=self.opts.get('profiling_path', '/tmp/stats'),
|
||||
stop=True)
|
||||
out = ret.get('out', 'nested')
|
||||
if self.opts['print_metadata']:
|
||||
stats_path=self.opts.get("profiling_path", "/tmp/stats"),
|
||||
stop=True,
|
||||
)
|
||||
out = ret.get("out", "nested")
|
||||
if self.opts["print_metadata"]:
|
||||
print_ret = ret
|
||||
out = 'nested'
|
||||
out = "nested"
|
||||
else:
|
||||
print_ret = ret.get('return', {})
|
||||
print_ret = ret.get("return", {})
|
||||
salt.output.display_output(
|
||||
{'local': print_ret},
|
||||
out=out,
|
||||
opts=self.opts,
|
||||
_retcode=ret.get('retcode', 0))
|
||||
{"local": print_ret},
|
||||
out=out,
|
||||
opts=self.opts,
|
||||
_retcode=ret.get("retcode", 0),
|
||||
)
|
||||
# _retcode will be available in the kwargs of the outputter function
|
||||
if self.opts.get('retcode_passthrough', False):
|
||||
sys.exit(ret['retcode'])
|
||||
elif ret['retcode'] != salt.defaults.exitcodes.EX_OK:
|
||||
if self.opts.get("retcode_passthrough", False):
|
||||
sys.exit(ret["retcode"])
|
||||
elif ret["retcode"] != salt.defaults.exitcodes.EX_OK:
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
except SaltInvocationError as err:
|
||||
raise SystemExit(err)
|
||||
|
||||
def call(self):
|
||||
'''
|
||||
"""
|
||||
Call the module
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
fun = self.opts['fun']
|
||||
ret['jid'] = salt.utils.jid.gen_jid(self.opts)
|
||||
fun = self.opts["fun"]
|
||||
ret["jid"] = salt.utils.jid.gen_jid(self.opts)
|
||||
proc_fn = os.path.join(
|
||||
salt.minion.get_proc_dir(self.opts['cachedir']),
|
||||
ret['jid']
|
||||
salt.minion.get_proc_dir(self.opts["cachedir"]), ret["jid"]
|
||||
)
|
||||
if fun not in self.minion.functions:
|
||||
docs = self.minion.functions['sys.doc']('{0}*'.format(fun))
|
||||
docs = self.minion.functions["sys.doc"]("{0}*".format(fun))
|
||||
if docs:
|
||||
docs[fun] = self.minion.functions.missing_fun_string(fun)
|
||||
ret['out'] = 'nested'
|
||||
ret['return'] = docs
|
||||
ret["out"] = "nested"
|
||||
ret["return"] = docs
|
||||
return ret
|
||||
sys.stderr.write(self.minion.functions.missing_fun_string(fun))
|
||||
mod_name = fun.split('.')[0]
|
||||
mod_name = fun.split(".")[0]
|
||||
if mod_name in self.minion.function_errors:
|
||||
sys.stderr.write(' Possible reasons: {0}\n'.format(self.minion.function_errors[mod_name]))
|
||||
sys.stderr.write(
|
||||
" Possible reasons: {0}\n".format(
|
||||
self.minion.function_errors[mod_name]
|
||||
)
|
||||
)
|
||||
else:
|
||||
sys.stderr.write('\n')
|
||||
sys.stderr.write("\n")
|
||||
sys.exit(-1)
|
||||
metadata = self.opts.get('metadata')
|
||||
metadata = self.opts.get("metadata")
|
||||
if metadata is not None:
|
||||
metadata = salt.utils.args.yamlify_arg(metadata)
|
||||
try:
|
||||
sdata = {
|
||||
'fun': fun,
|
||||
'pid': os.getpid(),
|
||||
'jid': ret['jid'],
|
||||
'tgt': 'salt-call'}
|
||||
"fun": fun,
|
||||
"pid": os.getpid(),
|
||||
"jid": ret["jid"],
|
||||
"tgt": "salt-call",
|
||||
}
|
||||
if metadata is not None:
|
||||
sdata['metadata'] = metadata
|
||||
sdata["metadata"] = metadata
|
||||
args, kwargs = salt.minion.load_args_and_kwargs(
|
||||
self.minion.functions[fun],
|
||||
salt.utils.args.parse_input(
|
||||
self.opts['arg'],
|
||||
no_parse=self.opts.get('no_parse', [])),
|
||||
data=sdata)
|
||||
self.opts["arg"], no_parse=self.opts.get("no_parse", [])
|
||||
),
|
||||
data=sdata,
|
||||
)
|
||||
try:
|
||||
with salt.utils.files.fopen(proc_fn, 'w+b') as fp_:
|
||||
with salt.utils.files.fopen(proc_fn, "w+b") as fp_:
|
||||
fp_.write(self.serial.dumps(sdata))
|
||||
except NameError:
|
||||
# Don't require msgpack with local
|
||||
pass
|
||||
except IOError:
|
||||
sys.stderr.write(
|
||||
'Cannot write to process directory. '
|
||||
'Do you have permissions to '
|
||||
'write to {0} ?\n'.format(proc_fn))
|
||||
"Cannot write to process directory. "
|
||||
"Do you have permissions to "
|
||||
"write to {0} ?\n".format(proc_fn)
|
||||
)
|
||||
func = self.minion.functions[fun]
|
||||
data = {
|
||||
'arg': args,
|
||||
'fun': fun
|
||||
}
|
||||
data = {"arg": args, "fun": fun}
|
||||
data.update(kwargs)
|
||||
executors = getattr(self.minion, 'module_executors', []) or \
|
||||
salt.utils.args.yamlify_arg(
|
||||
self.opts.get('module_executors', '[direct_call]')
|
||||
)
|
||||
if self.opts.get('executor_opts', None):
|
||||
data['executor_opts'] = salt.utils.args.yamlify_arg(
|
||||
self.opts['executor_opts']
|
||||
executors = getattr(
|
||||
self.minion, "module_executors", []
|
||||
) or salt.utils.args.yamlify_arg(
|
||||
self.opts.get("module_executors", "[direct_call]")
|
||||
)
|
||||
if self.opts.get("executor_opts", None):
|
||||
data["executor_opts"] = salt.utils.args.yamlify_arg(
|
||||
self.opts["executor_opts"]
|
||||
)
|
||||
if isinstance(executors, six.string_types):
|
||||
executors = [executors]
|
||||
try:
|
||||
for name in executors:
|
||||
fname = '{0}.execute'.format(name)
|
||||
fname = "{0}.execute".format(name)
|
||||
if fname not in self.minion.executors:
|
||||
raise SaltInvocationError("Executor '{0}' is not available".format(name))
|
||||
ret['return'] = self.minion.executors[fname](self.opts, data, func, args, kwargs)
|
||||
if ret['return'] is not None:
|
||||
raise SaltInvocationError(
|
||||
"Executor '{0}' is not available".format(name)
|
||||
)
|
||||
ret["return"] = self.minion.executors[fname](
|
||||
self.opts, data, func, args, kwargs
|
||||
)
|
||||
if ret["return"] is not None:
|
||||
break
|
||||
except TypeError as exc:
|
||||
sys.stderr.write('\nPassed invalid arguments: {0}.\n\nUsage:\n'.format(exc))
|
||||
sys.stderr.write(
|
||||
"\nPassed invalid arguments: {0}.\n\nUsage:\n".format(exc)
|
||||
)
|
||||
salt.utils.stringutils.print_cli(func.__doc__)
|
||||
active_level = LOG_LEVELS.get(
|
||||
self.opts['log_level'].lower(), logging.ERROR)
|
||||
self.opts["log_level"].lower(), logging.ERROR
|
||||
)
|
||||
if active_level <= logging.DEBUG:
|
||||
trace = traceback.format_exc()
|
||||
sys.stderr.write(trace)
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
try:
|
||||
retcode = sys.modules[
|
||||
func.__module__].__context__.get('retcode', 0)
|
||||
retcode = sys.modules[func.__module__].__context__.get("retcode", 0)
|
||||
except AttributeError:
|
||||
retcode = salt.defaults.exitcodes.EX_GENERIC
|
||||
|
||||
|
@ -237,52 +251,54 @@ class BaseCaller(object):
|
|||
# No nonzero retcode in __context__ dunder. Check if return
|
||||
# is a dictionary with a "result" or "success" key.
|
||||
try:
|
||||
func_result = all(ret['return'].get(x, True)
|
||||
for x in ('result', 'success'))
|
||||
func_result = all(
|
||||
ret["return"].get(x, True) for x in ("result", "success")
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# return data is not a dict
|
||||
func_result = True
|
||||
if not func_result:
|
||||
retcode = salt.defaults.exitcodes.EX_GENERIC
|
||||
|
||||
ret['retcode'] = retcode
|
||||
ret["retcode"] = retcode
|
||||
except (CommandExecutionError) as exc:
|
||||
msg = 'Error running \'{0}\': {1}\n'
|
||||
active_level = LOG_LEVELS.get(
|
||||
self.opts['log_level'].lower(), logging.ERROR)
|
||||
msg = "Error running '{0}': {1}\n"
|
||||
active_level = LOG_LEVELS.get(self.opts["log_level"].lower(), logging.ERROR)
|
||||
if active_level <= logging.DEBUG:
|
||||
sys.stderr.write(traceback.format_exc())
|
||||
sys.stderr.write(msg.format(fun, exc))
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
except CommandNotFoundError as exc:
|
||||
msg = 'Command required for \'{0}\' not found: {1}\n'
|
||||
msg = "Command required for '{0}' not found: {1}\n"
|
||||
sys.stderr.write(msg.format(fun, exc))
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
try:
|
||||
os.remove(proc_fn)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
if hasattr(self.minion.functions[fun], '__outputter__'):
|
||||
if hasattr(self.minion.functions[fun], "__outputter__"):
|
||||
oput = self.minion.functions[fun].__outputter__
|
||||
if isinstance(oput, six.string_types):
|
||||
ret['out'] = oput
|
||||
is_local = self.opts['local'] or self.opts.get(
|
||||
'file_client', False) == 'local' or self.opts.get(
|
||||
'master_type') == 'disable'
|
||||
returners = self.opts.get('return', '').split(',')
|
||||
ret["out"] = oput
|
||||
is_local = (
|
||||
self.opts["local"]
|
||||
or self.opts.get("file_client", False) == "local"
|
||||
or self.opts.get("master_type") == "disable"
|
||||
)
|
||||
returners = self.opts.get("return", "").split(",")
|
||||
if (not is_local) or returners:
|
||||
ret['id'] = self.opts['id']
|
||||
ret['fun'] = fun
|
||||
ret['fun_args'] = self.opts['arg']
|
||||
ret["id"] = self.opts["id"]
|
||||
ret["fun"] = fun
|
||||
ret["fun_args"] = self.opts["arg"]
|
||||
if metadata is not None:
|
||||
ret['metadata'] = metadata
|
||||
ret["metadata"] = metadata
|
||||
|
||||
for returner in returners:
|
||||
if not returner: # if we got an empty returner somehow, skip
|
||||
continue
|
||||
try:
|
||||
ret['success'] = True
|
||||
self.minion.returners['{0}.returner'.format(returner)](ret)
|
||||
ret["success"] = True
|
||||
self.minion.returners["{0}.returner".format(returner)](ret)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
|
@ -290,34 +306,36 @@ class BaseCaller(object):
|
|||
if not is_local:
|
||||
try:
|
||||
mret = ret.copy()
|
||||
mret['jid'] = 'req'
|
||||
mret["jid"] = "req"
|
||||
self.return_pub(mret)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
elif self.opts['cache_jobs']:
|
||||
elif self.opts["cache_jobs"]:
|
||||
# Local job cache has been enabled
|
||||
salt.utils.minion.cache_jobs(self.opts, ret['jid'], ret)
|
||||
salt.utils.minion.cache_jobs(self.opts, ret["jid"], ret)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class ZeroMQCaller(BaseCaller):
|
||||
'''
|
||||
"""
|
||||
Object to wrap the calling of local salt modules for the salt-call command
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts): # pylint: disable=useless-super-delegation
|
||||
'''
|
||||
"""
|
||||
Pass in the command line options
|
||||
'''
|
||||
"""
|
||||
super(ZeroMQCaller, self).__init__(opts)
|
||||
|
||||
def return_pub(self, ret):
|
||||
'''
|
||||
"""
|
||||
Return the data up to the master
|
||||
'''
|
||||
with salt.transport.client.ReqChannel.factory(self.opts,
|
||||
usage='salt_call') as channel:
|
||||
load = {'cmd': '_return', 'id': self.opts['id']}
|
||||
"""
|
||||
with salt.transport.client.ReqChannel.factory(
|
||||
self.opts, usage="salt_call"
|
||||
) as channel:
|
||||
load = {"cmd": "_return", "id": self.opts["id"]}
|
||||
for key, value in six.iteritems(ret):
|
||||
load[key] = value
|
||||
channel.send(load)
|
||||
|
|
163
salt/cli/cp.py
163
salt/cli/cp.py
|
@ -1,13 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The cp module is used to execute the logic used by the salt-cp command
|
||||
line application, salt-cp is NOT intended to broadcast large files, it is
|
||||
intended to handle text files.
|
||||
Salt-cp can be used to distribute configuration files
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import base64
|
||||
import errno
|
||||
import logging
|
||||
|
@ -34,14 +35,14 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class SaltCPCli(salt.utils.parsers.SaltCPOptionParser):
|
||||
'''
|
||||
"""
|
||||
Run the salt-cp command line client
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute salt-cp
|
||||
'''
|
||||
"""
|
||||
self.parse_args()
|
||||
|
||||
# Setup file logging!
|
||||
|
@ -53,9 +54,10 @@ class SaltCPCli(salt.utils.parsers.SaltCPOptionParser):
|
|||
|
||||
|
||||
class SaltCP(object):
|
||||
'''
|
||||
"""
|
||||
Create a salt cp object, used to distribute simple files with salt
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
self.is_windows = salt.utils.platform.is_windows()
|
||||
|
@ -69,9 +71,9 @@ class SaltCP(object):
|
|||
return None
|
||||
|
||||
def _recurse(self, path):
|
||||
'''
|
||||
"""
|
||||
Get a list of all specified files
|
||||
'''
|
||||
"""
|
||||
files = {}
|
||||
empty_dirs = []
|
||||
try:
|
||||
|
@ -79,7 +81,7 @@ class SaltCP(object):
|
|||
except OSError as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
# Path does not exist
|
||||
sys.stderr.write('{0} does not exist\n'.format(path))
|
||||
sys.stderr.write("{0} does not exist\n".format(path))
|
||||
sys.exit(42)
|
||||
elif exc.errno in (errno.EINVAL, errno.ENOTDIR):
|
||||
# Path is a file (EINVAL on Windows, ENOTDIR otherwise)
|
||||
|
@ -97,107 +99,108 @@ class SaltCP(object):
|
|||
def _list_files(self):
|
||||
files = {}
|
||||
empty_dirs = set()
|
||||
for fn_ in self.opts['src']:
|
||||
for fn_ in self.opts["src"]:
|
||||
files_, empty_dirs_ = self._recurse(fn_)
|
||||
files.update(files_)
|
||||
empty_dirs.update(empty_dirs_)
|
||||
return files, sorted(empty_dirs)
|
||||
|
||||
def _file_dict(self, fn_):
|
||||
'''
|
||||
"""
|
||||
Take a path and return the contents of the file as a string
|
||||
'''
|
||||
"""
|
||||
if not os.path.isfile(fn_):
|
||||
err = 'The referenced file, {0} is not available.'.format(fn_)
|
||||
sys.stderr.write(err + '\n')
|
||||
err = "The referenced file, {0} is not available.".format(fn_)
|
||||
sys.stderr.write(err + "\n")
|
||||
sys.exit(42)
|
||||
with salt.utils.files.fopen(fn_, 'r') as fp_:
|
||||
with salt.utils.files.fopen(fn_, "r") as fp_:
|
||||
data = fp_.read()
|
||||
return {fn_: data}
|
||||
|
||||
def _load_files(self):
|
||||
'''
|
||||
"""
|
||||
Parse the files indicated in opts['src'] and load them into a python
|
||||
object for transport
|
||||
'''
|
||||
"""
|
||||
files = {}
|
||||
for fn_ in self.opts['src']:
|
||||
for fn_ in self.opts["src"]:
|
||||
if os.path.isfile(fn_):
|
||||
files.update(self._file_dict(fn_))
|
||||
elif os.path.isdir(fn_):
|
||||
salt.utils.stringutils.print_cli(
|
||||
fn_ + ' is a directory, only files are supported '
|
||||
fn_ + " is a directory, only files are supported "
|
||||
'in non-chunked mode. Use "--chunked" command '
|
||||
'line argument.')
|
||||
"line argument."
|
||||
)
|
||||
sys.exit(1)
|
||||
return files
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Make the salt client call
|
||||
'''
|
||||
if self.opts['chunked']:
|
||||
"""
|
||||
if self.opts["chunked"]:
|
||||
ret = self.run_chunked()
|
||||
else:
|
||||
ret = self.run_oldstyle()
|
||||
|
||||
salt.output.display_output(
|
||||
ret,
|
||||
self.opts.get('output', 'nested'),
|
||||
self.opts)
|
||||
salt.output.display_output(ret, self.opts.get("output", "nested"), self.opts)
|
||||
|
||||
def run_oldstyle(self):
|
||||
'''
|
||||
"""
|
||||
Make the salt client call in old-style all-in-one call method
|
||||
'''
|
||||
arg = [self._load_files(), self.opts['dest']]
|
||||
local = salt.client.get_local_client(self.opts['conf_file'])
|
||||
args = [self.opts['tgt'],
|
||||
'cp.recv',
|
||||
arg,
|
||||
self.opts['timeout'],
|
||||
]
|
||||
"""
|
||||
arg = [self._load_files(), self.opts["dest"]]
|
||||
local = salt.client.get_local_client(self.opts["conf_file"])
|
||||
args = [
|
||||
self.opts["tgt"],
|
||||
"cp.recv",
|
||||
arg,
|
||||
self.opts["timeout"],
|
||||
]
|
||||
|
||||
selected_target_option = self.opts.get('selected_target_option', None)
|
||||
selected_target_option = self.opts.get("selected_target_option", None)
|
||||
if selected_target_option is not None:
|
||||
args.append(selected_target_option)
|
||||
|
||||
return local.cmd(*args)
|
||||
|
||||
def run_chunked(self):
|
||||
'''
|
||||
"""
|
||||
Make the salt client call in the new fasion chunked multi-call way
|
||||
'''
|
||||
"""
|
||||
files, empty_dirs = self._list_files()
|
||||
dest = self.opts['dest']
|
||||
gzip = self.opts['gzip']
|
||||
tgt = self.opts['tgt']
|
||||
timeout = self.opts['timeout']
|
||||
selected_target_option = self.opts.get('selected_target_option')
|
||||
dest = self.opts["dest"]
|
||||
gzip = self.opts["gzip"]
|
||||
tgt = self.opts["tgt"]
|
||||
timeout = self.opts["timeout"]
|
||||
selected_target_option = self.opts.get("selected_target_option")
|
||||
|
||||
dest_is_dir = bool(empty_dirs) \
|
||||
or len(files) > 1 \
|
||||
or bool(re.search(r'[\\/]$', dest))
|
||||
dest_is_dir = (
|
||||
bool(empty_dirs) or len(files) > 1 or bool(re.search(r"[\\/]$", dest))
|
||||
)
|
||||
|
||||
reader = salt.utils.gzip_util.compress_file \
|
||||
if gzip \
|
||||
reader = (
|
||||
salt.utils.gzip_util.compress_file
|
||||
if gzip
|
||||
else salt.utils.itertools.read_file
|
||||
)
|
||||
|
||||
_res = salt.utils.minions.CkMinions(self.opts).check_minions(
|
||||
tgt,
|
||||
tgt_type=selected_target_option or 'glob')
|
||||
minions = _res['minions']
|
||||
tgt, tgt_type=selected_target_option or "glob"
|
||||
)
|
||||
minions = _res["minions"]
|
||||
|
||||
local = salt.client.get_local_client(self.opts['conf_file'])
|
||||
local = salt.client.get_local_client(self.opts["conf_file"])
|
||||
|
||||
def _get_remote_path(fn_):
|
||||
if fn_ in self.opts['src']:
|
||||
if fn_ in self.opts["src"]:
|
||||
# This was a filename explicitly passed on the CLI
|
||||
return os.path.join(dest, os.path.basename(fn_)) \
|
||||
if dest_is_dir \
|
||||
else dest
|
||||
return (
|
||||
os.path.join(dest, os.path.basename(fn_)) if dest_is_dir else dest
|
||||
)
|
||||
else:
|
||||
for path in self.opts['src']:
|
||||
for path in self.opts["src"]:
|
||||
relpath = os.path.relpath(fn_, path + os.sep)
|
||||
if relpath.startswith(parent):
|
||||
# File is not within this dir
|
||||
|
@ -205,32 +208,32 @@ class SaltCP(object):
|
|||
return os.path.join(dest, os.path.basename(path), relpath)
|
||||
else: # pylint: disable=useless-else-on-loop
|
||||
# Should not happen
|
||||
log.error('Failed to find remote path for %s', fn_)
|
||||
log.error("Failed to find remote path for %s", fn_)
|
||||
return None
|
||||
|
||||
ret = {}
|
||||
parent = '..' + os.sep
|
||||
parent = ".." + os.sep
|
||||
for fn_, mode in six.iteritems(files):
|
||||
remote_path = _get_remote_path(fn_)
|
||||
|
||||
index = 1
|
||||
failed = {}
|
||||
for chunk in reader(fn_, chunk_size=self.opts['salt_cp_chunk_size']):
|
||||
for chunk in reader(fn_, chunk_size=self.opts["salt_cp_chunk_size"]):
|
||||
chunk = base64.b64encode(salt.utils.stringutils.to_bytes(chunk))
|
||||
append = index > 1
|
||||
log.debug(
|
||||
'Copying %s to %starget \'%s\' as %s%s',
|
||||
"Copying %s to %starget '%s' as %s%s",
|
||||
fn_,
|
||||
'{0} '.format(selected_target_option)
|
||||
if selected_target_option
|
||||
else '',
|
||||
"{0} ".format(selected_target_option)
|
||||
if selected_target_option
|
||||
else "",
|
||||
tgt,
|
||||
remote_path,
|
||||
' (chunk #{0})'.format(index) if append else ''
|
||||
" (chunk #{0})".format(index) if append else "",
|
||||
)
|
||||
args = [
|
||||
tgt,
|
||||
'cp.recv_chunked',
|
||||
"cp.recv_chunked",
|
||||
[remote_path, chunk, append, gzip, mode],
|
||||
timeout,
|
||||
]
|
||||
|
@ -242,11 +245,11 @@ class SaltCP(object):
|
|||
if not result:
|
||||
# Publish failed
|
||||
msg = (
|
||||
'Publish failed.{0} It may be necessary to '
|
||||
'decrease salt_cp_chunk_size (current value: '
|
||||
'{1})'.format(
|
||||
' File partially transferred.' if index > 1 else '',
|
||||
self.opts['salt_cp_chunk_size'],
|
||||
"Publish failed.{0} It may be necessary to "
|
||||
"decrease salt_cp_chunk_size (current value: "
|
||||
"{1})".format(
|
||||
" File partially transferred." if index > 1 else "",
|
||||
self.opts["salt_cp_chunk_size"],
|
||||
)
|
||||
)
|
||||
for minion in minions:
|
||||
|
@ -269,14 +272,16 @@ class SaltCP(object):
|
|||
for dirname in empty_dirs:
|
||||
remote_path = _get_remote_path(dirname)
|
||||
log.debug(
|
||||
'Creating empty dir %s on %starget \'%s\'',
|
||||
"Creating empty dir %s on %starget '%s'",
|
||||
dirname,
|
||||
'{0} '.format(selected_target_option) # pylint: disable=str-format-in-logging
|
||||
if selected_target_option
|
||||
else '',
|
||||
"{0} ".format(
|
||||
selected_target_option
|
||||
) # pylint: disable=str-format-in-logging
|
||||
if selected_target_option
|
||||
else "",
|
||||
tgt,
|
||||
)
|
||||
args = [tgt, 'cp.recv_chunked', [remote_path, None], timeout]
|
||||
args = [tgt, "cp.recv_chunked", [remote_path, None], timeout]
|
||||
if selected_target_option is not None:
|
||||
args.append(selected_target_option)
|
||||
|
||||
|
|
|
@ -1,56 +1,56 @@
|
|||
# coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Make me some salt!
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import warnings
|
||||
from salt.utils.verify import verify_log
|
||||
|
||||
|
||||
# All salt related deprecation warnings should be shown once each!
|
||||
warnings.filterwarnings(
|
||||
'once', # Show once
|
||||
'', # No deprecation message match
|
||||
DeprecationWarning, # This filter is for DeprecationWarnings
|
||||
r'^(salt|salt\.(.*))$' # Match module(s) 'salt' and 'salt.<whatever>'
|
||||
)
|
||||
|
||||
# While we are supporting Python2.6, hide nested with-statements warnings
|
||||
warnings.filterwarnings(
|
||||
'ignore',
|
||||
'With-statements now directly support multiple context managers',
|
||||
DeprecationWarning
|
||||
)
|
||||
|
||||
# Filter the backports package UserWarning about being re-imported
|
||||
warnings.filterwarnings(
|
||||
'ignore',
|
||||
'^Module backports was already imported from (.*), but (.*) is being added to sys.path$',
|
||||
UserWarning
|
||||
)
|
||||
|
||||
# Import salt libs
|
||||
# We import log ASAP because we NEED to make sure that any logger instance salt
|
||||
# instantiates is using salt.log.setup.SaltLoggingClass
|
||||
import salt.log.setup
|
||||
|
||||
import salt.utils.kinds as kinds
|
||||
from salt.exceptions import SaltClientError, SaltSystemExit, get_error_message
|
||||
|
||||
# the try block below bypasses an issue at build time so that modules don't
|
||||
# cause the build to fail
|
||||
from salt.utils import migrations
|
||||
import salt.utils.kinds as kinds
|
||||
from salt.utils.verify import verify_log
|
||||
|
||||
# All salt related deprecation warnings should be shown once each!
|
||||
warnings.filterwarnings(
|
||||
"once", # Show once
|
||||
"", # No deprecation message match
|
||||
DeprecationWarning, # This filter is for DeprecationWarnings
|
||||
r"^(salt|salt\.(.*))$", # Match module(s) 'salt' and 'salt.<whatever>'
|
||||
)
|
||||
|
||||
# While we are supporting Python2.6, hide nested with-statements warnings
|
||||
warnings.filterwarnings(
|
||||
"ignore",
|
||||
"With-statements now directly support multiple context managers",
|
||||
DeprecationWarning,
|
||||
)
|
||||
|
||||
# Filter the backports package UserWarning about being re-imported
|
||||
warnings.filterwarnings(
|
||||
"ignore",
|
||||
"^Module backports was already imported from (.*), but (.*) is being added to sys.path$",
|
||||
UserWarning,
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
from salt.utils.zeromq import ip_bracket
|
||||
import salt.utils.parsers
|
||||
from salt.utils.verify import check_user, verify_env, verify_socket
|
||||
except ImportError as exc:
|
||||
if exc.args[0] != 'No module named _msgpack':
|
||||
if exc.args[0] != "No module named _msgpack":
|
||||
raise
|
||||
from salt.exceptions import SaltSystemExit, SaltClientError, get_error_message
|
||||
|
||||
|
||||
# Let's instantiate log using salt.log.setup.logging.getLogger() so pylint
|
||||
|
@ -59,67 +59,73 @@ log = salt.log.setup.logging.getLogger(__name__)
|
|||
|
||||
|
||||
class DaemonsMixin(object): # pylint: disable=no-init
|
||||
'''
|
||||
"""
|
||||
Uses the same functions for all daemons
|
||||
'''
|
||||
"""
|
||||
|
||||
def verify_hash_type(self):
|
||||
'''
|
||||
"""
|
||||
Verify and display a nag-messsage to the log if vulnerable hash-type is used.
|
||||
|
||||
:return:
|
||||
'''
|
||||
if self.config['hash_type'].lower() in ['md5', 'sha1']:
|
||||
"""
|
||||
if self.config["hash_type"].lower() in ["md5", "sha1"]:
|
||||
log.warning(
|
||||
'IMPORTANT: Do not use %s hashing algorithm! Please set '
|
||||
"IMPORTANT: Do not use %s hashing algorithm! Please set "
|
||||
'"hash_type" to sha256 in Salt %s config!',
|
||||
self.config['hash_type'], self.__class__.__name__
|
||||
self.config["hash_type"],
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
def action_log_info(self, action):
|
||||
'''
|
||||
"""
|
||||
Say daemon starting.
|
||||
|
||||
:param action
|
||||
:return:
|
||||
'''
|
||||
log.info('%s the Salt %s', action, self.__class__.__name__)
|
||||
"""
|
||||
log.info("%s the Salt %s", action, self.__class__.__name__)
|
||||
|
||||
def start_log_info(self):
|
||||
'''
|
||||
"""
|
||||
Say daemon starting.
|
||||
|
||||
:return:
|
||||
'''
|
||||
log.info('The Salt %s is starting up', self.__class__.__name__)
|
||||
"""
|
||||
log.info("The Salt %s is starting up", self.__class__.__name__)
|
||||
|
||||
def shutdown_log_info(self):
|
||||
'''
|
||||
"""
|
||||
Say daemon shutting down.
|
||||
|
||||
:return:
|
||||
'''
|
||||
log.info('The Salt %s is shut down', self.__class__.__name__)
|
||||
"""
|
||||
log.info("The Salt %s is shut down", self.__class__.__name__)
|
||||
|
||||
def environment_failure(self, error):
|
||||
'''
|
||||
"""
|
||||
Log environment failure for the daemon and exit with the error code.
|
||||
|
||||
:param error:
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
log.exception(
|
||||
'Failed to create environment for %s: %s',
|
||||
self.__class__.__name__, get_error_message(error)
|
||||
"Failed to create environment for %s: %s",
|
||||
self.__class__.__name__,
|
||||
get_error_message(error),
|
||||
)
|
||||
self.shutdown(error)
|
||||
|
||||
|
||||
class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: disable=no-init
|
||||
'''
|
||||
class Master(
|
||||
salt.utils.parsers.MasterOptionParser, DaemonsMixin
|
||||
): # pylint: disable=no-init
|
||||
"""
|
||||
Creates a master server
|
||||
'''
|
||||
"""
|
||||
|
||||
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
|
||||
if hasattr(self.master, 'process_manager'):
|
||||
if hasattr(self.master, "process_manager"):
|
||||
# escalate signal to the process manager processes
|
||||
self.master.process_manager.stop_restarting()
|
||||
self.master.process_manager.send_signal_to_processes(signum)
|
||||
|
@ -128,61 +134,62 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
|||
super(Master, self)._handle_signals(signum, sigframe)
|
||||
|
||||
def prepare(self):
|
||||
'''
|
||||
"""
|
||||
Run the preparation sequence required to start a salt master server.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
||||
super(YourSubClass, self).prepare()
|
||||
'''
|
||||
"""
|
||||
super(Master, self).prepare()
|
||||
|
||||
try:
|
||||
if self.config['verify_env']:
|
||||
if self.config["verify_env"]:
|
||||
v_dirs = [
|
||||
self.config['pki_dir'],
|
||||
os.path.join(self.config['pki_dir'], 'minions'),
|
||||
os.path.join(self.config['pki_dir'], 'minions_pre'),
|
||||
os.path.join(self.config['pki_dir'], 'minions_denied'),
|
||||
os.path.join(self.config['pki_dir'],
|
||||
'minions_autosign'),
|
||||
os.path.join(self.config['pki_dir'],
|
||||
'minions_rejected'),
|
||||
self.config['cachedir'],
|
||||
os.path.join(self.config['cachedir'], 'jobs'),
|
||||
os.path.join(self.config['cachedir'], 'proc'),
|
||||
self.config['sock_dir'],
|
||||
self.config['token_dir'],
|
||||
self.config['syndic_dir'],
|
||||
self.config['sqlite_queue_dir'],
|
||||
]
|
||||
self.config["pki_dir"],
|
||||
os.path.join(self.config["pki_dir"], "minions"),
|
||||
os.path.join(self.config["pki_dir"], "minions_pre"),
|
||||
os.path.join(self.config["pki_dir"], "minions_denied"),
|
||||
os.path.join(self.config["pki_dir"], "minions_autosign"),
|
||||
os.path.join(self.config["pki_dir"], "minions_rejected"),
|
||||
self.config["cachedir"],
|
||||
os.path.join(self.config["cachedir"], "jobs"),
|
||||
os.path.join(self.config["cachedir"], "proc"),
|
||||
self.config["sock_dir"],
|
||||
self.config["token_dir"],
|
||||
self.config["syndic_dir"],
|
||||
self.config["sqlite_queue_dir"],
|
||||
]
|
||||
verify_env(
|
||||
v_dirs,
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
self.config["user"],
|
||||
permissive=self.config["permissive_pki_access"],
|
||||
root_dir=self.config["root_dir"],
|
||||
pki_dir=self.config["pki_dir"],
|
||||
)
|
||||
# Clear out syndics from cachedir
|
||||
for syndic_file in os.listdir(self.config['syndic_dir']):
|
||||
os.remove(os.path.join(self.config['syndic_dir'], syndic_file))
|
||||
for syndic_file in os.listdir(self.config["syndic_dir"]):
|
||||
os.remove(os.path.join(self.config["syndic_dir"], syndic_file))
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
self.action_log_info('Setting up')
|
||||
self.action_log_info("Setting up")
|
||||
|
||||
# TODO: AIO core is separate from transport
|
||||
if not verify_socket(self.config['interface'],
|
||||
self.config['publish_port'],
|
||||
self.config['ret_port']):
|
||||
self.shutdown(4, 'The ports are not available to bind')
|
||||
self.config['interface'] = ip_bracket(self.config['interface'])
|
||||
if not verify_socket(
|
||||
self.config["interface"],
|
||||
self.config["publish_port"],
|
||||
self.config["ret_port"],
|
||||
):
|
||||
self.shutdown(4, "The ports are not available to bind")
|
||||
self.config["interface"] = ip_bracket(self.config["interface"])
|
||||
migrations.migrate_paths(self.config)
|
||||
|
||||
# Late import so logging works correctly
|
||||
import salt.master
|
||||
|
||||
self.master = salt.master.Master(self.config)
|
||||
|
||||
self.daemonize_if_required()
|
||||
|
@ -190,7 +197,7 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
|||
salt.utils.process.notify_systemd()
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
Start the actual master.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
@ -198,19 +205,19 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
super(Master, self).start()
|
||||
if check_user(self.config['user']):
|
||||
self.action_log_info('Starting up')
|
||||
if check_user(self.config["user"]):
|
||||
self.action_log_info("Starting up")
|
||||
self.verify_hash_type()
|
||||
self.master.start()
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
'''
|
||||
"""
|
||||
If sub-classed, run any shutdown operations on this method.
|
||||
'''
|
||||
"""
|
||||
self.shutdown_log_info()
|
||||
msg = 'The salt master is shutdown. '
|
||||
msg = "The salt master is shutdown. "
|
||||
if exitmsg is not None:
|
||||
exitmsg = msg + exitmsg
|
||||
else:
|
||||
|
@ -218,34 +225,36 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
|||
super(Master, self).shutdown(exitcode, exitmsg)
|
||||
|
||||
|
||||
class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-init
|
||||
'''
|
||||
class Minion(
|
||||
salt.utils.parsers.MinionOptionParser, DaemonsMixin
|
||||
): # pylint: disable=no-init
|
||||
"""
|
||||
Create a minion server
|
||||
'''
|
||||
"""
|
||||
|
||||
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
|
||||
# escalate signal to the process manager processes
|
||||
if hasattr(self.minion, 'stop'):
|
||||
if hasattr(self.minion, "stop"):
|
||||
self.minion.stop(signum)
|
||||
super(Minion, self)._handle_signals(signum, sigframe)
|
||||
|
||||
# pylint: disable=no-member
|
||||
def prepare(self):
|
||||
'''
|
||||
"""
|
||||
Run the preparation sequence required to start a salt minion.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
||||
super(YourSubClass, self).prepare()
|
||||
'''
|
||||
"""
|
||||
super(Minion, self).prepare()
|
||||
|
||||
try:
|
||||
if self.config['verify_env']:
|
||||
confd = self.config.get('default_include')
|
||||
if self.config["verify_env"]:
|
||||
confd = self.config.get("default_include")
|
||||
if confd:
|
||||
# If 'default_include' is specified in config, then use it
|
||||
if '*' in confd:
|
||||
if "*" in confd:
|
||||
# Value is of the form "minion.d/*.conf"
|
||||
confd = os.path.dirname(confd)
|
||||
if not os.path.isabs(confd):
|
||||
|
@ -253,65 +262,67 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
|||
# path, consider it relative to folder of 'conf_file'
|
||||
# (/etc/salt by default)
|
||||
confd = os.path.join(
|
||||
os.path.dirname(self.config['conf_file']), confd
|
||||
os.path.dirname(self.config["conf_file"]), confd
|
||||
)
|
||||
else:
|
||||
confd = os.path.join(
|
||||
os.path.dirname(self.config['conf_file']), 'minion.d'
|
||||
os.path.dirname(self.config["conf_file"]), "minion.d"
|
||||
)
|
||||
|
||||
v_dirs = [
|
||||
self.config['pki_dir'],
|
||||
self.config['cachedir'],
|
||||
self.config['sock_dir'],
|
||||
self.config['extension_modules'],
|
||||
confd,
|
||||
]
|
||||
self.config["pki_dir"],
|
||||
self.config["cachedir"],
|
||||
self.config["sock_dir"],
|
||||
self.config["extension_modules"],
|
||||
confd,
|
||||
]
|
||||
|
||||
verify_env(
|
||||
v_dirs,
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
self.config["user"],
|
||||
permissive=self.config["permissive_pki_access"],
|
||||
root_dir=self.config["root_dir"],
|
||||
pki_dir=self.config["pki_dir"],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
log.info('Setting up the Salt Minion "%s"', self.config['id'])
|
||||
log.info('Setting up the Salt Minion "%s"', self.config["id"])
|
||||
migrations.migrate_paths(self.config)
|
||||
|
||||
# Bail out if we find a process running and it matches out pidfile
|
||||
if self.check_running():
|
||||
self.action_log_info('An instance is already running. Exiting')
|
||||
self.action_log_info("An instance is already running. Exiting")
|
||||
self.shutdown(1)
|
||||
|
||||
transport = self.config.get('transport').lower()
|
||||
transport = self.config.get("transport").lower()
|
||||
|
||||
# TODO: AIO core is separate from transport
|
||||
if transport in ('zeromq', 'tcp', 'detect'):
|
||||
if transport in ("zeromq", "tcp", "detect"):
|
||||
# Late import so logging works correctly
|
||||
import salt.minion
|
||||
|
||||
# If the minion key has not been accepted, then Salt enters a loop
|
||||
# waiting for it, if we daemonize later then the minion could halt
|
||||
# the boot process waiting for a key to be accepted on the master.
|
||||
# This is the latest safe place to daemonize
|
||||
self.daemonize_if_required()
|
||||
self.set_pidfile()
|
||||
if self.config.get('master_type') == 'func':
|
||||
if self.config.get("master_type") == "func":
|
||||
salt.minion.eval_master_func(self.config)
|
||||
self.minion = salt.minion.MinionManager(self.config)
|
||||
else:
|
||||
log.error(
|
||||
'The transport \'%s\' is not supported. Please use one of '
|
||||
'the following: tcp, zeromq, or detect.', transport
|
||||
"The transport '%s' is not supported. Please use one of "
|
||||
"the following: tcp, zeromq, or detect.",
|
||||
transport,
|
||||
)
|
||||
self.shutdown(1)
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
Start the actual minion.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
@ -319,7 +330,7 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
super(Minion, self).start()
|
||||
while True:
|
||||
try:
|
||||
|
@ -332,23 +343,23 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
|||
|
||||
def _real_start(self):
|
||||
try:
|
||||
if check_user(self.config['user']):
|
||||
self.action_log_info('Starting up')
|
||||
if check_user(self.config["user"]):
|
||||
self.action_log_info("Starting up")
|
||||
self.verify_hash_type()
|
||||
self.minion.tune_in()
|
||||
if self.minion.restart:
|
||||
raise SaltClientError('Minion could not connect to Master')
|
||||
raise SaltClientError("Minion could not connect to Master")
|
||||
except (KeyboardInterrupt, SaltSystemExit) as error:
|
||||
self.action_log_info('Stopping')
|
||||
self.action_log_info("Stopping")
|
||||
if isinstance(error, KeyboardInterrupt):
|
||||
log.warning('Exiting on Ctrl-c')
|
||||
log.warning("Exiting on Ctrl-c")
|
||||
self.shutdown()
|
||||
else:
|
||||
log.error(error)
|
||||
self.shutdown(error.code)
|
||||
|
||||
def call(self, cleanup_protecteds):
|
||||
'''
|
||||
"""
|
||||
Start the actual minion as a caller minion.
|
||||
|
||||
cleanup_protecteds is list of yard host addresses that should not be
|
||||
|
@ -359,41 +370,51 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self.prepare()
|
||||
if check_user(self.config['user']):
|
||||
self.minion.opts['__role'] = kinds.APPL_KIND_NAMES[kinds.applKinds.caller]
|
||||
if check_user(self.config["user"]):
|
||||
self.minion.opts["__role"] = kinds.APPL_KIND_NAMES[
|
||||
kinds.applKinds.caller
|
||||
]
|
||||
self.minion.call_in()
|
||||
except (KeyboardInterrupt, SaltSystemExit) as exc:
|
||||
self.action_log_info('Stopping')
|
||||
self.action_log_info("Stopping")
|
||||
if isinstance(exc, KeyboardInterrupt):
|
||||
log.warning('Exiting on Ctrl-c')
|
||||
log.warning("Exiting on Ctrl-c")
|
||||
self.shutdown()
|
||||
else:
|
||||
log.error(exc)
|
||||
self.shutdown(exc.code)
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
'''
|
||||
"""
|
||||
If sub-classed, run any shutdown operations on this method.
|
||||
|
||||
:param exitcode
|
||||
:param exitmsg
|
||||
'''
|
||||
self.action_log_info('Shutting down')
|
||||
if hasattr(self, 'minion') and hasattr(self.minion, 'destroy'):
|
||||
"""
|
||||
self.action_log_info("Shutting down")
|
||||
if hasattr(self, "minion") and hasattr(self.minion, "destroy"):
|
||||
self.minion.destroy()
|
||||
super(Minion, self).shutdown(
|
||||
exitcode, ('The Salt {0} is shutdown. {1}'.format(
|
||||
self.__class__.__name__, (exitmsg or '')).strip()))
|
||||
exitcode,
|
||||
(
|
||||
"The Salt {0} is shutdown. {1}".format(
|
||||
self.__class__.__name__, (exitmsg or "")
|
||||
).strip()
|
||||
),
|
||||
)
|
||||
|
||||
# pylint: enable=no-member
|
||||
|
||||
|
||||
class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): # pylint: disable=no-init
|
||||
'''
|
||||
class ProxyMinion(
|
||||
salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin
|
||||
): # pylint: disable=no-init
|
||||
"""
|
||||
Create a proxy minion server
|
||||
'''
|
||||
"""
|
||||
|
||||
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
|
||||
# escalate signal to the process manager processes
|
||||
|
@ -402,17 +423,17 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
|
|||
|
||||
# pylint: disable=no-member
|
||||
def prepare(self):
|
||||
'''
|
||||
"""
|
||||
Run the preparation sequence required to start a salt proxy minion.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
||||
super(YourSubClass, self).prepare()
|
||||
'''
|
||||
"""
|
||||
super(ProxyMinion, self).prepare()
|
||||
|
||||
if not self.values.proxyid:
|
||||
self.error('salt-proxy requires --proxyid')
|
||||
self.error("salt-proxy requires --proxyid")
|
||||
|
||||
# Proxies get their ID from the command line. This may need to change in
|
||||
# the future.
|
||||
|
@ -421,11 +442,11 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
|
|||
# self.config['id'] = self.values.proxyid
|
||||
|
||||
try:
|
||||
if self.config['verify_env']:
|
||||
confd = self.config.get('default_include')
|
||||
if self.config["verify_env"]:
|
||||
confd = self.config.get("default_include")
|
||||
if confd:
|
||||
# If 'default_include' is specified in config, then use it
|
||||
if '*' in confd:
|
||||
if "*" in confd:
|
||||
# Value is of the form "minion.d/*.conf"
|
||||
confd = os.path.dirname(confd)
|
||||
if not os.path.isabs(confd):
|
||||
|
@ -433,57 +454,58 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
|
|||
# path, consider it relative to folder of 'conf_file'
|
||||
# (/etc/salt by default)
|
||||
confd = os.path.join(
|
||||
os.path.dirname(self.config['conf_file']), confd
|
||||
os.path.dirname(self.config["conf_file"]), confd
|
||||
)
|
||||
else:
|
||||
confd = os.path.join(
|
||||
os.path.dirname(self.config['conf_file']), 'proxy.d'
|
||||
os.path.dirname(self.config["conf_file"]), "proxy.d"
|
||||
)
|
||||
|
||||
v_dirs = [
|
||||
self.config['pki_dir'],
|
||||
self.config['cachedir'],
|
||||
self.config['sock_dir'],
|
||||
self.config['extension_modules'],
|
||||
self.config["pki_dir"],
|
||||
self.config["cachedir"],
|
||||
self.config["sock_dir"],
|
||||
self.config["extension_modules"],
|
||||
confd,
|
||||
]
|
||||
|
||||
verify_env(
|
||||
v_dirs,
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
self.config["user"],
|
||||
permissive=self.config["permissive_pki_access"],
|
||||
root_dir=self.config["root_dir"],
|
||||
pki_dir=self.config["pki_dir"],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
self.action_log_info('Setting up "{0}"'.format(self.config['id']))
|
||||
self.action_log_info('Setting up "{0}"'.format(self.config["id"]))
|
||||
|
||||
migrations.migrate_paths(self.config)
|
||||
|
||||
# Bail out if we find a process running and it matches out pidfile
|
||||
if self.check_running():
|
||||
self.action_log_info('An instance is already running. Exiting')
|
||||
self.action_log_info("An instance is already running. Exiting")
|
||||
self.shutdown(1)
|
||||
|
||||
# TODO: AIO core is separate from transport
|
||||
# Late import so logging works correctly
|
||||
import salt.minion
|
||||
|
||||
# If the minion key has not been accepted, then Salt enters a loop
|
||||
# waiting for it, if we daemonize later then the minion could halt
|
||||
# the boot process waiting for a key to be accepted on the master.
|
||||
# This is the latest safe place to daemonize
|
||||
self.daemonize_if_required()
|
||||
self.set_pidfile()
|
||||
if self.config.get('master_type') == 'func':
|
||||
if self.config.get("master_type") == "func":
|
||||
salt.minion.eval_master_func(self.config)
|
||||
self.minion = salt.minion.ProxyMinionManager(self.config)
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
Start the actual proxy minion.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
@ -491,84 +513,93 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
super(ProxyMinion, self).start()
|
||||
try:
|
||||
if check_user(self.config['user']):
|
||||
self.action_log_info('The Proxy Minion is starting up')
|
||||
if check_user(self.config["user"]):
|
||||
self.action_log_info("The Proxy Minion is starting up")
|
||||
self.verify_hash_type()
|
||||
self.minion.tune_in()
|
||||
if self.minion.restart:
|
||||
raise SaltClientError('Proxy Minion could not connect to Master')
|
||||
raise SaltClientError("Proxy Minion could not connect to Master")
|
||||
except (KeyboardInterrupt, SaltSystemExit) as exc:
|
||||
self.action_log_info('Proxy Minion Stopping')
|
||||
self.action_log_info("Proxy Minion Stopping")
|
||||
if isinstance(exc, KeyboardInterrupt):
|
||||
log.warning('Exiting on Ctrl-c')
|
||||
log.warning("Exiting on Ctrl-c")
|
||||
self.shutdown()
|
||||
else:
|
||||
log.error(exc)
|
||||
self.shutdown(exc.code)
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
'''
|
||||
"""
|
||||
If sub-classed, run any shutdown operations on this method.
|
||||
|
||||
:param exitcode
|
||||
:param exitmsg
|
||||
'''
|
||||
if hasattr(self, 'minion') and 'proxymodule' in self.minion.opts:
|
||||
proxy_fn = self.minion.opts['proxymodule'].loaded_base_name + '.shutdown'
|
||||
self.minion.opts['proxymodule'][proxy_fn](self.minion.opts)
|
||||
self.action_log_info('Shutting down')
|
||||
"""
|
||||
if hasattr(self, "minion") and "proxymodule" in self.minion.opts:
|
||||
proxy_fn = self.minion.opts["proxymodule"].loaded_base_name + ".shutdown"
|
||||
self.minion.opts["proxymodule"][proxy_fn](self.minion.opts)
|
||||
self.action_log_info("Shutting down")
|
||||
super(ProxyMinion, self).shutdown(
|
||||
exitcode, ('The Salt {0} is shutdown. {1}'.format(
|
||||
self.__class__.__name__, (exitmsg or '')).strip()))
|
||||
exitcode,
|
||||
(
|
||||
"The Salt {0} is shutdown. {1}".format(
|
||||
self.__class__.__name__, (exitmsg or "")
|
||||
).strip()
|
||||
),
|
||||
)
|
||||
|
||||
# pylint: enable=no-member
|
||||
|
||||
|
||||
class Syndic(salt.utils.parsers.SyndicOptionParser, DaemonsMixin): # pylint: disable=no-init
|
||||
'''
|
||||
class Syndic(
|
||||
salt.utils.parsers.SyndicOptionParser, DaemonsMixin
|
||||
): # pylint: disable=no-init
|
||||
"""
|
||||
Create a syndic server
|
||||
'''
|
||||
"""
|
||||
|
||||
def prepare(self):
|
||||
'''
|
||||
"""
|
||||
Run the preparation sequence required to start a salt syndic minion.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
||||
super(YourSubClass, self).prepare()
|
||||
'''
|
||||
"""
|
||||
super(Syndic, self).prepare()
|
||||
try:
|
||||
if self.config['verify_env']:
|
||||
if self.config["verify_env"]:
|
||||
verify_env(
|
||||
[
|
||||
self.config['pki_dir'],
|
||||
self.config['cachedir'],
|
||||
self.config['sock_dir'],
|
||||
self.config['extension_modules'],
|
||||
self.config["pki_dir"],
|
||||
self.config["cachedir"],
|
||||
self.config["sock_dir"],
|
||||
self.config["extension_modules"],
|
||||
],
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
self.config["user"],
|
||||
permissive=self.config["permissive_pki_access"],
|
||||
root_dir=self.config["root_dir"],
|
||||
pki_dir=self.config["pki_dir"],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
self.action_log_info('Setting up "{0}"'.format(self.config['id']))
|
||||
self.action_log_info('Setting up "{0}"'.format(self.config["id"]))
|
||||
|
||||
# Late import so logging works correctly
|
||||
import salt.minion
|
||||
|
||||
self.daemonize_if_required()
|
||||
self.syndic = salt.minion.SyndicManager(self.config)
|
||||
self.set_pidfile()
|
||||
|
||||
def start(self):
|
||||
'''
|
||||
"""
|
||||
Start the actual syndic.
|
||||
|
||||
If sub-classed, don't **ever** forget to run:
|
||||
|
@ -576,25 +607,30 @@ class Syndic(salt.utils.parsers.SyndicOptionParser, DaemonsMixin): # pylint: di
|
|||
super(YourSubClass, self).start()
|
||||
|
||||
NOTE: Run any required code before calling `super()`.
|
||||
'''
|
||||
"""
|
||||
super(Syndic, self).start()
|
||||
if check_user(self.config['user']):
|
||||
self.action_log_info('Starting up')
|
||||
if check_user(self.config["user"]):
|
||||
self.action_log_info("Starting up")
|
||||
self.verify_hash_type()
|
||||
try:
|
||||
self.syndic.tune_in()
|
||||
except KeyboardInterrupt:
|
||||
self.action_log_info('Stopping')
|
||||
self.action_log_info("Stopping")
|
||||
self.shutdown()
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
'''
|
||||
"""
|
||||
If sub-classed, run any shutdown operations on this method.
|
||||
|
||||
:param exitcode
|
||||
:param exitmsg
|
||||
'''
|
||||
self.action_log_info('Shutting down')
|
||||
"""
|
||||
self.action_log_info("Shutting down")
|
||||
super(Syndic, self).shutdown(
|
||||
exitcode, ('The Salt {0} is shutdown. {1}'.format(
|
||||
self.__class__.__name__, (exitmsg or '')).strip()))
|
||||
exitcode,
|
||||
(
|
||||
"The Salt {0} is shutdown. {1}".format(
|
||||
self.__class__.__name__, (exitmsg or "")
|
||||
).strip()
|
||||
),
|
||||
)
|
||||
|
|
|
@ -1,26 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
|
||||
import salt.utils.parsers
|
||||
from salt.utils.verify import check_user, verify_log
|
||||
|
||||
|
||||
class SaltKey(salt.utils.parsers.SaltKeyOptionParser):
|
||||
'''
|
||||
"""
|
||||
Initialize the Salt key manager
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute salt-key
|
||||
'''
|
||||
"""
|
||||
import salt.key
|
||||
|
||||
self.parse_args()
|
||||
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
|
||||
key = salt.key.KeyCLI(self.config)
|
||||
if check_user(self.config['user']):
|
||||
if check_user(self.config["user"]):
|
||||
key.run()
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import salt.defaults.exitcodes # pylint: disable=W0611
|
||||
import salt.utils.parsers
|
||||
import salt.utils.profile
|
||||
from salt.utils.verify import check_user, verify_log
|
||||
from salt.exceptions import SaltClientError
|
||||
from salt.ext import six
|
||||
import salt.defaults.exitcodes # pylint: disable=W0611
|
||||
from salt.utils.verify import check_user, verify_log
|
||||
|
||||
|
||||
class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
||||
'''
|
||||
"""
|
||||
Used to execute Salt runners
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute salt-run
|
||||
'''
|
||||
"""
|
||||
import salt.runner
|
||||
|
||||
self.parse_args()
|
||||
|
||||
# Setup file logging!
|
||||
|
@ -34,7 +35,7 @@ class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
|||
# Run this here so SystemExit isn't raised anywhere else when
|
||||
# someone tries to use the runners via the python API
|
||||
try:
|
||||
if check_user(self.config['user']):
|
||||
if check_user(self.config["user"]):
|
||||
pr = salt.utils.profile.activate_profile(profiling_enabled)
|
||||
try:
|
||||
ret = runner.run()
|
||||
|
@ -44,15 +45,14 @@ class SaltRun(salt.utils.parsers.SaltRunOptionParser):
|
|||
# runners might still use it. For this reason, we
|
||||
# also check ret['data']['retcode'] if
|
||||
# ret['retcode'] is not available.
|
||||
if isinstance(ret, dict) and 'retcode' in ret:
|
||||
self.exit(ret['retcode'])
|
||||
elif isinstance(ret, dict) and 'retcode' in ret.get('data', {}):
|
||||
self.exit(ret['data']['retcode'])
|
||||
if isinstance(ret, dict) and "retcode" in ret:
|
||||
self.exit(ret["retcode"])
|
||||
elif isinstance(ret, dict) and "retcode" in ret.get("data", {}):
|
||||
self.exit(ret["data"]["retcode"])
|
||||
finally:
|
||||
salt.utils.profile.output_profile(
|
||||
pr,
|
||||
stats_path=self.options.profiling_path,
|
||||
stop=True)
|
||||
pr, stats_path=self.options.profiling_path, stop=True
|
||||
)
|
||||
|
||||
except SaltClientError as exc:
|
||||
raise SystemExit(six.text_type(exc))
|
||||
|
|
342
salt/cli/salt.py
342
salt/cli/salt.py
|
@ -2,18 +2,16 @@
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
sys.modules['pkg_resources'] = None
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import Salt libs
|
||||
import salt.defaults.exitcodes
|
||||
import salt.log
|
||||
import salt.utils.job
|
||||
import salt.utils.parsers
|
||||
import salt.utils.stringutils
|
||||
import salt.log
|
||||
from salt.utils.args import yamlify_arg
|
||||
from salt.utils.verify import verify_log
|
||||
from salt.exceptions import (
|
||||
AuthenticationError,
|
||||
AuthorizationError,
|
||||
|
@ -21,26 +19,31 @@ from salt.exceptions import (
|
|||
LoaderError,
|
||||
SaltClientError,
|
||||
SaltInvocationError,
|
||||
SaltSystemExit
|
||||
SaltSystemExit,
|
||||
)
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
from salt.utils.args import yamlify_arg
|
||||
from salt.utils.verify import verify_log
|
||||
|
||||
sys.modules["pkg_resources"] = None
|
||||
|
||||
|
||||
class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
||||
'''
|
||||
"""
|
||||
The execution of a salt command happens here
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Execute the salt command line
|
||||
'''
|
||||
"""
|
||||
import salt.client
|
||||
|
||||
self.parse_args()
|
||||
|
||||
if self.config['log_level'] not in ('quiet', ):
|
||||
if self.config["log_level"] not in ("quiet",):
|
||||
# Setup file logging!
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
|
@ -48,14 +51,15 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
try:
|
||||
# We don't need to bail on config file permission errors
|
||||
# if the CLI process is run with the -a flag
|
||||
skip_perm_errors = self.options.eauth != ''
|
||||
skip_perm_errors = self.options.eauth != ""
|
||||
|
||||
self.local_client = salt.client.get_local_client(
|
||||
self.get_config_file_path(),
|
||||
skip_perm_errors=skip_perm_errors,
|
||||
auto_reconnect=True)
|
||||
auto_reconnect=True,
|
||||
)
|
||||
except SaltClientError as exc:
|
||||
self.exit(2, '{0}\n'.format(exc))
|
||||
self.exit(2, "{0}\n".format(exc))
|
||||
return
|
||||
|
||||
if self.options.batch or self.options.static:
|
||||
|
@ -68,87 +72,95 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
|
||||
if self.options.preview_target:
|
||||
minion_list = self._preview_target()
|
||||
self._output_ret(minion_list, self.config.get('output', 'nested'))
|
||||
self._output_ret(minion_list, self.config.get("output", "nested"))
|
||||
return
|
||||
|
||||
if self.options.timeout <= 0:
|
||||
self.options.timeout = self.local_client.opts['timeout']
|
||||
self.options.timeout = self.local_client.opts["timeout"]
|
||||
|
||||
kwargs = {
|
||||
'tgt': self.config['tgt'],
|
||||
'fun': self.config['fun'],
|
||||
'arg': self.config['arg'],
|
||||
'timeout': self.options.timeout,
|
||||
'show_timeout': self.options.show_timeout,
|
||||
'show_jid': self.options.show_jid}
|
||||
"tgt": self.config["tgt"],
|
||||
"fun": self.config["fun"],
|
||||
"arg": self.config["arg"],
|
||||
"timeout": self.options.timeout,
|
||||
"show_timeout": self.options.show_timeout,
|
||||
"show_jid": self.options.show_jid,
|
||||
}
|
||||
|
||||
if 'token' in self.config:
|
||||
if "token" in self.config:
|
||||
import salt.utils.files
|
||||
try:
|
||||
with salt.utils.files.fopen(os.path.join(self.config['cachedir'], '.root_key'), 'r') as fp_:
|
||||
kwargs['key'] = fp_.readline()
|
||||
except IOError:
|
||||
kwargs['token'] = self.config['token']
|
||||
|
||||
kwargs['delimiter'] = self.options.delimiter
|
||||
try:
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(self.config["cachedir"], ".root_key"), "r"
|
||||
) as fp_:
|
||||
kwargs["key"] = fp_.readline()
|
||||
except IOError:
|
||||
kwargs["token"] = self.config["token"]
|
||||
|
||||
kwargs["delimiter"] = self.options.delimiter
|
||||
|
||||
if self.selected_target_option:
|
||||
kwargs['tgt_type'] = self.selected_target_option
|
||||
kwargs["tgt_type"] = self.selected_target_option
|
||||
else:
|
||||
kwargs['tgt_type'] = 'glob'
|
||||
kwargs["tgt_type"] = "glob"
|
||||
|
||||
# If batch_safe_limit is set, check minions matching target and
|
||||
# potentially switch to batch execution
|
||||
if self.options.batch_safe_limit > 1:
|
||||
if len(self._preview_target()) >= self.options.batch_safe_limit:
|
||||
salt.utils.stringutils.print_cli('\nNOTICE: Too many minions targeted, switching to batch execution.')
|
||||
salt.utils.stringutils.print_cli(
|
||||
"\nNOTICE: Too many minions targeted, switching to batch execution."
|
||||
)
|
||||
self.options.batch = self.options.batch_safe_size
|
||||
self._run_batch()
|
||||
return
|
||||
|
||||
if getattr(self.options, 'return'):
|
||||
kwargs['ret'] = getattr(self.options, 'return')
|
||||
if getattr(self.options, "return"):
|
||||
kwargs["ret"] = getattr(self.options, "return")
|
||||
|
||||
if getattr(self.options, 'return_config'):
|
||||
kwargs['ret_config'] = getattr(self.options, 'return_config')
|
||||
if getattr(self.options, "return_config"):
|
||||
kwargs["ret_config"] = getattr(self.options, "return_config")
|
||||
|
||||
if getattr(self.options, 'return_kwargs'):
|
||||
kwargs['ret_kwargs'] = yamlify_arg(
|
||||
getattr(self.options, 'return_kwargs'))
|
||||
if getattr(self.options, "return_kwargs"):
|
||||
kwargs["ret_kwargs"] = yamlify_arg(getattr(self.options, "return_kwargs"))
|
||||
|
||||
if getattr(self.options, 'module_executors'):
|
||||
kwargs['module_executors'] = yamlify_arg(getattr(self.options, 'module_executors'))
|
||||
if getattr(self.options, "module_executors"):
|
||||
kwargs["module_executors"] = yamlify_arg(
|
||||
getattr(self.options, "module_executors")
|
||||
)
|
||||
|
||||
if getattr(self.options, 'executor_opts'):
|
||||
kwargs['executor_opts'] = yamlify_arg(getattr(self.options, 'executor_opts'))
|
||||
if getattr(self.options, "executor_opts"):
|
||||
kwargs["executor_opts"] = yamlify_arg(
|
||||
getattr(self.options, "executor_opts")
|
||||
)
|
||||
|
||||
if getattr(self.options, 'metadata'):
|
||||
kwargs['metadata'] = yamlify_arg(
|
||||
getattr(self.options, 'metadata'))
|
||||
if getattr(self.options, "metadata"):
|
||||
kwargs["metadata"] = yamlify_arg(getattr(self.options, "metadata"))
|
||||
|
||||
# If using eauth and a token hasn't already been loaded into
|
||||
# kwargs, prompt the user to enter auth credentials
|
||||
if 'token' not in kwargs and 'key' not in kwargs and self.options.eauth:
|
||||
if "token" not in kwargs and "key" not in kwargs and self.options.eauth:
|
||||
# This is expensive. Don't do it unless we need to.
|
||||
import salt.auth
|
||||
|
||||
resolver = salt.auth.Resolver(self.config)
|
||||
res = resolver.cli(self.options.eauth)
|
||||
if self.options.mktoken and res:
|
||||
tok = resolver.token_cli(
|
||||
self.options.eauth,
|
||||
res
|
||||
)
|
||||
tok = resolver.token_cli(self.options.eauth, res)
|
||||
if tok:
|
||||
kwargs['token'] = tok.get('token', '')
|
||||
kwargs["token"] = tok.get("token", "")
|
||||
if not res:
|
||||
sys.stderr.write('ERROR: Authentication failed\n')
|
||||
sys.stderr.write("ERROR: Authentication failed\n")
|
||||
sys.exit(2)
|
||||
kwargs.update(res)
|
||||
kwargs['eauth'] = self.options.eauth
|
||||
kwargs["eauth"] = self.options.eauth
|
||||
|
||||
if self.config['async']:
|
||||
if self.config["async"]:
|
||||
jid = self.local_client.cmd_async(**kwargs)
|
||||
salt.utils.stringutils.print_cli('Executed command with job ID: {0}'.format(jid))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Executed command with job ID: {0}".format(jid)
|
||||
)
|
||||
return
|
||||
|
||||
# local will be None when there was an error
|
||||
|
@ -161,35 +173,35 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
try:
|
||||
if self.options.subset:
|
||||
cmd_func = self.local_client.cmd_subset
|
||||
kwargs['sub'] = self.options.subset
|
||||
kwargs['cli'] = True
|
||||
kwargs["sub"] = self.options.subset
|
||||
kwargs["cli"] = True
|
||||
else:
|
||||
cmd_func = self.local_client.cmd_cli
|
||||
|
||||
if self.options.progress:
|
||||
kwargs['progress'] = True
|
||||
self.config['progress'] = True
|
||||
kwargs["progress"] = True
|
||||
self.config["progress"] = True
|
||||
ret = {}
|
||||
for progress in cmd_func(**kwargs):
|
||||
out = 'progress'
|
||||
out = "progress"
|
||||
try:
|
||||
self._progress_ret(progress, out)
|
||||
except LoaderError as exc:
|
||||
raise SaltSystemExit(exc)
|
||||
if 'return_count' not in progress:
|
||||
if "return_count" not in progress:
|
||||
ret.update(progress)
|
||||
self._progress_end(out)
|
||||
self._print_returns_summary(ret)
|
||||
elif self.config['fun'] == 'sys.doc':
|
||||
elif self.config["fun"] == "sys.doc":
|
||||
ret = {}
|
||||
out = ''
|
||||
out = ""
|
||||
for full_ret in self.local_client.cmd_cli(**kwargs):
|
||||
ret_, out, retcode = self._format_ret(full_ret)
|
||||
ret.update(ret_)
|
||||
self._output_ret(ret, out, retcode=retcode)
|
||||
else:
|
||||
if self.options.verbose:
|
||||
kwargs['verbose'] = True
|
||||
kwargs["verbose"] = True
|
||||
ret = {}
|
||||
for full_ret in cmd_func(**kwargs):
|
||||
try:
|
||||
|
@ -201,8 +213,8 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
errors.append(full_ret)
|
||||
|
||||
# Returns summary
|
||||
if self.config['cli_summary'] is True:
|
||||
if self.config['fun'] != 'sys.doc':
|
||||
if self.config["cli_summary"] is True:
|
||||
if self.config["fun"] != "sys.doc":
|
||||
if self.options.output is None:
|
||||
self._print_returns_summary(ret)
|
||||
self._print_errors_summary(errors)
|
||||
|
@ -211,54 +223,59 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
# returned 'ok' with a retcode of 0.
|
||||
# This is the final point before the 'salt' cmd returns,
|
||||
# which is why we set the retcode here.
|
||||
if not all(exit_code == salt.defaults.exitcodes.EX_OK for exit_code in retcodes):
|
||||
sys.stderr.write('ERROR: Minions returned with non-zero exit code\n')
|
||||
if not all(
|
||||
exit_code == salt.defaults.exitcodes.EX_OK for exit_code in retcodes
|
||||
):
|
||||
sys.stderr.write("ERROR: Minions returned with non-zero exit code\n")
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
|
||||
except (AuthenticationError,
|
||||
AuthorizationError,
|
||||
SaltInvocationError,
|
||||
EauthAuthenticationError,
|
||||
SaltClientError) as exc:
|
||||
except (
|
||||
AuthenticationError,
|
||||
AuthorizationError,
|
||||
SaltInvocationError,
|
||||
EauthAuthenticationError,
|
||||
SaltClientError,
|
||||
) as exc:
|
||||
ret = six.text_type(exc)
|
||||
self._output_ret(ret, '', retcode=1)
|
||||
self._output_ret(ret, "", retcode=1)
|
||||
|
||||
def _preview_target(self):
|
||||
'''
|
||||
"""
|
||||
Return a list of minions from a given target
|
||||
'''
|
||||
return self.local_client.gather_minions(self.config['tgt'], self.selected_target_option or 'glob')
|
||||
"""
|
||||
return self.local_client.gather_minions(
|
||||
self.config["tgt"], self.selected_target_option or "glob"
|
||||
)
|
||||
|
||||
def _run_batch(self):
|
||||
import salt.cli.batch
|
||||
|
||||
eauth = {}
|
||||
if 'token' in self.config:
|
||||
eauth['token'] = self.config['token']
|
||||
if "token" in self.config:
|
||||
eauth["token"] = self.config["token"]
|
||||
|
||||
# If using eauth and a token hasn't already been loaded into
|
||||
# kwargs, prompt the user to enter auth credentials
|
||||
if 'token' not in eauth and self.options.eauth:
|
||||
if "token" not in eauth and self.options.eauth:
|
||||
# This is expensive. Don't do it unless we need to.
|
||||
import salt.auth
|
||||
|
||||
resolver = salt.auth.Resolver(self.config)
|
||||
res = resolver.cli(self.options.eauth)
|
||||
if self.options.mktoken and res:
|
||||
tok = resolver.token_cli(
|
||||
self.options.eauth,
|
||||
res
|
||||
)
|
||||
tok = resolver.token_cli(self.options.eauth, res)
|
||||
if tok:
|
||||
eauth['token'] = tok.get('token', '')
|
||||
eauth["token"] = tok.get("token", "")
|
||||
if not res:
|
||||
sys.stderr.write('ERROR: Authentication failed\n')
|
||||
sys.stderr.write("ERROR: Authentication failed\n")
|
||||
sys.exit(2)
|
||||
eauth.update(res)
|
||||
eauth['eauth'] = self.options.eauth
|
||||
eauth["eauth"] = self.options.eauth
|
||||
|
||||
if self.options.static:
|
||||
|
||||
if not self.options.batch:
|
||||
self.config['batch'] = '100%'
|
||||
self.config["batch"] = "100%"
|
||||
|
||||
try:
|
||||
batch = salt.cli.batch.Batch(self.config, eauth=eauth, quiet=True)
|
||||
|
@ -270,12 +287,14 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
for res in batch.run():
|
||||
ret.update(res)
|
||||
|
||||
self._output_ret(ret, '')
|
||||
self._output_ret(ret, "")
|
||||
|
||||
else:
|
||||
try:
|
||||
self.config['batch'] = self.options.batch
|
||||
batch = salt.cli.batch.Batch(self.config, eauth=eauth, parser=self.options)
|
||||
self.config["batch"] = self.options.batch
|
||||
batch = salt.cli.batch.Batch(
|
||||
self.config, eauth=eauth, parser=self.options
|
||||
)
|
||||
except SaltClientError:
|
||||
# We will print errors to the console further down the stack
|
||||
sys.exit(1)
|
||||
|
@ -291,17 +310,17 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
|
||||
def _print_errors_summary(self, errors):
|
||||
if errors:
|
||||
salt.utils.stringutils.print_cli('\n')
|
||||
salt.utils.stringutils.print_cli('---------------------------')
|
||||
salt.utils.stringutils.print_cli('Errors')
|
||||
salt.utils.stringutils.print_cli('---------------------------')
|
||||
salt.utils.stringutils.print_cli("\n")
|
||||
salt.utils.stringutils.print_cli("---------------------------")
|
||||
salt.utils.stringutils.print_cli("Errors")
|
||||
salt.utils.stringutils.print_cli("---------------------------")
|
||||
for error in errors:
|
||||
salt.utils.stringutils.print_cli(self._format_error(error))
|
||||
|
||||
def _print_returns_summary(self, ret):
|
||||
'''
|
||||
"""
|
||||
Display returns summary
|
||||
'''
|
||||
"""
|
||||
return_counter = 0
|
||||
not_return_counter = 0
|
||||
not_return_minions = []
|
||||
|
@ -310,12 +329,11 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
failed_minions = []
|
||||
for each_minion in ret:
|
||||
minion_ret = ret[each_minion]
|
||||
if isinstance(minion_ret, dict) and 'ret' in minion_ret:
|
||||
minion_ret = ret[each_minion].get('ret')
|
||||
if (
|
||||
isinstance(minion_ret, six.string_types)
|
||||
and minion_ret.startswith("Minion did not return")
|
||||
):
|
||||
if isinstance(minion_ret, dict) and "ret" in minion_ret:
|
||||
minion_ret = ret[each_minion].get("ret")
|
||||
if isinstance(minion_ret, six.string_types) and minion_ret.startswith(
|
||||
"Minion did not return"
|
||||
):
|
||||
if "Not connected" in minion_ret:
|
||||
not_connected_minions.append(each_minion)
|
||||
elif "No response" in minion_ret:
|
||||
|
@ -326,85 +344,101 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
return_counter += 1
|
||||
if self._get_retcode(ret[each_minion]):
|
||||
failed_minions.append(each_minion)
|
||||
salt.utils.stringutils.print_cli('\n')
|
||||
salt.utils.stringutils.print_cli('-------------------------------------------')
|
||||
salt.utils.stringutils.print_cli('Summary')
|
||||
salt.utils.stringutils.print_cli('-------------------------------------------')
|
||||
salt.utils.stringutils.print_cli('# of minions targeted: {0}'.format(return_counter + not_return_counter))
|
||||
salt.utils.stringutils.print_cli('# of minions returned: {0}'.format(return_counter))
|
||||
salt.utils.stringutils.print_cli('# of minions that did not return: {0}'.format(not_return_counter))
|
||||
salt.utils.stringutils.print_cli('# of minions with errors: {0}'.format(len(failed_minions)))
|
||||
salt.utils.stringutils.print_cli("\n")
|
||||
salt.utils.stringutils.print_cli("-------------------------------------------")
|
||||
salt.utils.stringutils.print_cli("Summary")
|
||||
salt.utils.stringutils.print_cli("-------------------------------------------")
|
||||
salt.utils.stringutils.print_cli(
|
||||
"# of minions targeted: {0}".format(return_counter + not_return_counter)
|
||||
)
|
||||
salt.utils.stringutils.print_cli(
|
||||
"# of minions returned: {0}".format(return_counter)
|
||||
)
|
||||
salt.utils.stringutils.print_cli(
|
||||
"# of minions that did not return: {0}".format(not_return_counter)
|
||||
)
|
||||
salt.utils.stringutils.print_cli(
|
||||
"# of minions with errors: {0}".format(len(failed_minions))
|
||||
)
|
||||
if self.options.verbose:
|
||||
if not_connected_minions:
|
||||
salt.utils.stringutils.print_cli('Minions not connected: {0}'.format(" ".join(not_connected_minions)))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Minions not connected: {0}".format(" ".join(not_connected_minions))
|
||||
)
|
||||
if not_response_minions:
|
||||
salt.utils.stringutils.print_cli('Minions not responding: {0}'.format(" ".join(not_response_minions)))
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Minions not responding: {0}".format(" ".join(not_response_minions))
|
||||
)
|
||||
if failed_minions:
|
||||
salt.utils.stringutils.print_cli('Minions with failures: {0}'.format(" ".join(failed_minions)))
|
||||
salt.utils.stringutils.print_cli('-------------------------------------------')
|
||||
salt.utils.stringutils.print_cli(
|
||||
"Minions with failures: {0}".format(" ".join(failed_minions))
|
||||
)
|
||||
salt.utils.stringutils.print_cli("-------------------------------------------")
|
||||
|
||||
def _progress_end(self, out):
|
||||
import salt.output
|
||||
|
||||
salt.output.progress_end(self.progress_bar)
|
||||
|
||||
def _progress_ret(self, progress, out):
|
||||
'''
|
||||
"""
|
||||
Print progress events
|
||||
'''
|
||||
"""
|
||||
import salt.output
|
||||
|
||||
# Get the progress bar
|
||||
if not hasattr(self, 'progress_bar'):
|
||||
if not hasattr(self, "progress_bar"):
|
||||
try:
|
||||
self.progress_bar = salt.output.get_progress(self.config, out, progress)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
raise LoaderError('\nWARNING: Install the `progressbar` python package. '
|
||||
'Requested job was still run but output cannot be displayed.\n')
|
||||
raise LoaderError(
|
||||
"\nWARNING: Install the `progressbar` python package. "
|
||||
"Requested job was still run but output cannot be displayed.\n"
|
||||
)
|
||||
salt.output.update_progress(self.config, progress, self.progress_bar, out)
|
||||
|
||||
def _output_ret(self, ret, out, retcode=0):
|
||||
'''
|
||||
"""
|
||||
Print the output from a single return to the terminal
|
||||
'''
|
||||
"""
|
||||
import salt.output
|
||||
|
||||
# Handle special case commands
|
||||
if self.config['fun'] == 'sys.doc' and not isinstance(ret, Exception):
|
||||
if self.config["fun"] == "sys.doc" and not isinstance(ret, Exception):
|
||||
self._print_docs(ret)
|
||||
else:
|
||||
# Determine the proper output method and run it
|
||||
salt.output.display_output(ret,
|
||||
out=out,
|
||||
opts=self.config,
|
||||
_retcode=retcode)
|
||||
salt.output.display_output(ret, out=out, opts=self.config, _retcode=retcode)
|
||||
if not ret:
|
||||
sys.stderr.write('ERROR: No return received\n')
|
||||
sys.stderr.write("ERROR: No return received\n")
|
||||
sys.exit(2)
|
||||
|
||||
def _format_ret(self, full_ret):
|
||||
'''
|
||||
"""
|
||||
Take the full return data and format it to simple output
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
out = ''
|
||||
out = ""
|
||||
retcode = 0
|
||||
for key, data in six.iteritems(full_ret):
|
||||
ret[key] = data['ret']
|
||||
if 'out' in data:
|
||||
out = data['out']
|
||||
ret[key] = data["ret"]
|
||||
if "out" in data:
|
||||
out = data["out"]
|
||||
ret_retcode = self._get_retcode(data)
|
||||
if ret_retcode > retcode:
|
||||
retcode = ret_retcode
|
||||
return ret, out, retcode
|
||||
|
||||
def _get_retcode(self, ret):
|
||||
'''
|
||||
"""
|
||||
Determine a retcode for a given return
|
||||
'''
|
||||
"""
|
||||
retcode = 0
|
||||
# if there is a dict with retcode, use that
|
||||
if isinstance(ret, dict) and ret.get('retcode', 0) != 0:
|
||||
if isinstance(ret.get('retcode', 0), dict):
|
||||
return max(six.itervalues(ret.get('retcode', {0: 0})))
|
||||
return ret['retcode']
|
||||
if isinstance(ret, dict) and ret.get("retcode", 0) != 0:
|
||||
if isinstance(ret.get("retcode", 0), dict):
|
||||
return max(six.itervalues(ret.get("retcode", {0: 0})))
|
||||
return ret["retcode"]
|
||||
# if its a boolean, False means 1
|
||||
elif isinstance(ret, bool) and not ret:
|
||||
return 1
|
||||
|
@ -412,32 +446,36 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
|||
|
||||
def _format_error(self, minion_error):
|
||||
for minion, error_doc in six.iteritems(minion_error):
|
||||
error = 'Minion [{0}] encountered exception \'{1}\''.format(minion, error_doc['message'])
|
||||
error = "Minion [{0}] encountered exception '{1}'".format(
|
||||
minion, error_doc["message"]
|
||||
)
|
||||
return error
|
||||
|
||||
def _print_docs(self, ret):
|
||||
'''
|
||||
"""
|
||||
Print out the docstrings for all of the functions on the minions
|
||||
'''
|
||||
"""
|
||||
import salt.output
|
||||
|
||||
docs = {}
|
||||
if not ret:
|
||||
self.exit(2, 'No minions found to gather docs from\n')
|
||||
self.exit(2, "No minions found to gather docs from\n")
|
||||
if isinstance(ret, six.string_types):
|
||||
self.exit(2, '{0}\n'.format(ret))
|
||||
self.exit(2, "{0}\n".format(ret))
|
||||
for host in ret:
|
||||
if isinstance(ret[host], six.string_types) \
|
||||
and (ret[host].startswith("Minion did not return")
|
||||
or ret[host] == 'VALUE_TRIMMED'):
|
||||
if isinstance(ret[host], six.string_types) and (
|
||||
ret[host].startswith("Minion did not return")
|
||||
or ret[host] == "VALUE_TRIMMED"
|
||||
):
|
||||
continue
|
||||
for fun in ret[host]:
|
||||
if fun not in docs and ret[host][fun]:
|
||||
docs[fun] = ret[host][fun]
|
||||
if self.options.output:
|
||||
for fun in sorted(docs):
|
||||
salt.output.display_output({fun: docs[fun]}, 'nested', self.config)
|
||||
salt.output.display_output({fun: docs[fun]}, "nested", self.config)
|
||||
else:
|
||||
for fun in sorted(docs):
|
||||
salt.utils.stringutils.print_cli('{0}:'.format(fun))
|
||||
salt.utils.stringutils.print_cli("{0}:".format(fun))
|
||||
salt.utils.stringutils.print_cli(docs[fun])
|
||||
salt.utils.stringutils.print_cli('')
|
||||
salt.utils.stringutils.print_cli("")
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
salt.cli.spm
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
Salt's spm cli parser.
|
||||
|
||||
.. versionadded:: 2015.8.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
@ -14,28 +14,27 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
# Import Salt libs
|
||||
import salt.spm
|
||||
import salt.utils.parsers as parsers
|
||||
from salt.utils.verify import verify_log, verify_env
|
||||
from salt.utils.verify import verify_env, verify_log
|
||||
|
||||
|
||||
class SPM(parsers.SPMParser):
|
||||
'''
|
||||
"""
|
||||
The cli parser object used to fire up the salt spm system.
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Run the api
|
||||
'''
|
||||
"""
|
||||
ui = salt.spm.SPMCmdlineInterface()
|
||||
self.parse_args()
|
||||
self.setup_logfile_logger()
|
||||
v_dirs = [
|
||||
self.config['spm_cache_dir'],
|
||||
self.config["spm_cache_dir"],
|
||||
]
|
||||
verify_env(v_dirs,
|
||||
self.config['user'],
|
||||
root_dir=self.config['root_dir'],
|
||||
)
|
||||
verify_env(
|
||||
v_dirs, self.config["user"], root_dir=self.config["root_dir"],
|
||||
)
|
||||
verify_log(self.config)
|
||||
client = salt.spm.SPMClient(ui, self.config)
|
||||
client.run(self.args)
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
import salt.client.ssh
|
||||
import salt.utils.parsers
|
||||
from salt.utils.verify import verify_log
|
||||
|
||||
|
||||
class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
|
||||
'''
|
||||
"""
|
||||
Used to Execute the salt ssh routine
|
||||
'''
|
||||
"""
|
||||
|
||||
def run(self):
|
||||
if '-H' in sys.argv or '--hosts' in sys.argv:
|
||||
sys.argv += ['x', 'x'] # Hack: pass a mandatory two options
|
||||
# that won't be used anyways with -H or --hosts
|
||||
if "-H" in sys.argv or "--hosts" in sys.argv:
|
||||
sys.argv += ["x", "x"] # Hack: pass a mandatory two options
|
||||
# that won't be used anyways with -H or --hosts
|
||||
self.parse_args()
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
This module provides the point of entry for client applications to interface to
|
||||
salt. The purpose is to have a simplified consistent interface for various
|
||||
client applications.
|
||||
|
@ -13,62 +13,65 @@ client applications.
|
|||
|
||||
http://docs.saltstack.com/ref/clients/index.html
|
||||
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import salt.auth
|
||||
import salt.client
|
||||
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
import salt.auth
|
||||
import salt.client
|
||||
import salt.runner
|
||||
import salt.wheel
|
||||
import salt.syspaths as syspaths
|
||||
import salt.utils.args
|
||||
import salt.utils.event
|
||||
import salt.syspaths as syspaths
|
||||
import salt.wheel
|
||||
from salt.exceptions import EauthAuthenticationError
|
||||
|
||||
|
||||
def tokenify(cmd, token=None):
|
||||
'''
|
||||
"""
|
||||
If token is not None Then assign token to 'token' key of cmd dict
|
||||
and return cmd
|
||||
Otherwise return cmd
|
||||
'''
|
||||
"""
|
||||
if token is not None:
|
||||
cmd['token'] = token
|
||||
cmd["token"] = token
|
||||
return cmd
|
||||
|
||||
|
||||
class APIClient(object):
|
||||
'''
|
||||
"""
|
||||
Provide a uniform method of accessing the various client interfaces in Salt
|
||||
in the form of low-data data structures. For example:
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts=None, listen=True):
|
||||
if not opts:
|
||||
opts = salt.config.client_config(
|
||||
os.environ.get(
|
||||
'SALT_MASTER_CONFIG',
|
||||
os.path.join(syspaths.CONFIG_DIR, 'master')
|
||||
"SALT_MASTER_CONFIG", os.path.join(syspaths.CONFIG_DIR, "master")
|
||||
)
|
||||
)
|
||||
self.opts = opts
|
||||
self.localClient = salt.client.get_local_client(self.opts['conf_file'])
|
||||
self.localClient = salt.client.get_local_client(self.opts["conf_file"])
|
||||
self.runnerClient = salt.runner.RunnerClient(self.opts)
|
||||
self.wheelClient = salt.wheel.Wheel(self.opts)
|
||||
self.resolver = salt.auth.Resolver(self.opts)
|
||||
self.event = salt.utils.event.get_event(
|
||||
'master',
|
||||
self.opts['sock_dir'],
|
||||
self.opts['transport'],
|
||||
opts=self.opts,
|
||||
listen=listen)
|
||||
"master",
|
||||
self.opts["sock_dir"],
|
||||
self.opts["transport"],
|
||||
opts=self.opts,
|
||||
listen=listen,
|
||||
)
|
||||
|
||||
def run(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Execute the salt command given by cmd dict.
|
||||
|
||||
cmd is a dictionary of the following form:
|
||||
|
@ -117,65 +120,67 @@ class APIClient(object):
|
|||
password: the user's password. Required if token is missing.
|
||||
eauth: the authentication type such as 'pam' or 'ldap'. Required if token is missing
|
||||
|
||||
'''
|
||||
"""
|
||||
cmd = dict(cmd) # make copy
|
||||
client = 'minion' # default to local minion client
|
||||
mode = cmd.get('mode', 'async')
|
||||
client = "minion" # default to local minion client
|
||||
mode = cmd.get("mode", "async")
|
||||
|
||||
# check for wheel or runner prefix to fun name to use wheel or runner client
|
||||
funparts = cmd.get('fun', '').split('.')
|
||||
if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: # master
|
||||
funparts = cmd.get("fun", "").split(".")
|
||||
if len(funparts) > 2 and funparts[0] in ["wheel", "runner"]: # master
|
||||
client = funparts[0]
|
||||
cmd['fun'] = '.'.join(funparts[1:]) # strip prefix
|
||||
cmd["fun"] = ".".join(funparts[1:]) # strip prefix
|
||||
|
||||
if not ('token' in cmd or
|
||||
('eauth' in cmd and 'password' in cmd and 'username' in cmd)):
|
||||
raise EauthAuthenticationError('No authentication credentials given')
|
||||
if not (
|
||||
"token" in cmd
|
||||
or ("eauth" in cmd and "password" in cmd and "username" in cmd)
|
||||
):
|
||||
raise EauthAuthenticationError("No authentication credentials given")
|
||||
|
||||
executor = getattr(self, '{0}_{1}'.format(client, mode))
|
||||
executor = getattr(self, "{0}_{1}".format(client, mode))
|
||||
result = executor(**cmd)
|
||||
return result
|
||||
|
||||
def minion_async(self, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
|
||||
and immediately return the job ID. The results of the job can then be
|
||||
retrieved at a later time.
|
||||
|
||||
.. seealso:: :ref:`python-api`
|
||||
'''
|
||||
"""
|
||||
return self.localClient.run_job(**kwargs)
|
||||
|
||||
def minion_sync(self, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
|
||||
|
||||
.. seealso:: :ref:`python-api`
|
||||
'''
|
||||
"""
|
||||
return self.localClient.cmd(**kwargs)
|
||||
|
||||
def runner_async(self, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Wrap RunnerClient for executing :ref:`runner modules <all-salt.runners>`
|
||||
Expects that one of the kwargs is key 'fun' whose value is the namestring
|
||||
of the function to call
|
||||
'''
|
||||
"""
|
||||
return self.runnerClient.master_call(**kwargs)
|
||||
|
||||
runner_sync = runner_async # always runner asynchronous, so works in either mode
|
||||
|
||||
def wheel_sync(self, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Wrap Wheel to enable executing :ref:`wheel modules <all-salt.wheel>`
|
||||
Expects that one of the kwargs is key 'fun' whose value is the namestring
|
||||
of the function to call
|
||||
'''
|
||||
"""
|
||||
return self.wheelClient.master_call(**kwargs)
|
||||
|
||||
wheel_async = wheel_sync # always wheel_sync, so it works either mode
|
||||
|
||||
def signature(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Convenience function that returns dict of function signature(s) specified by cmd.
|
||||
|
||||
cmd is dict of the form:
|
||||
|
@ -204,37 +209,40 @@ class APIClient(object):
|
|||
eauth: the authentication type such as 'pam' or 'ldap'. Required if token is missing
|
||||
|
||||
Adds client per the command.
|
||||
'''
|
||||
cmd['client'] = 'minion'
|
||||
if len(cmd['module'].split('.')) > 2 and cmd['module'].split('.')[0] in ['runner', 'wheel']:
|
||||
cmd['client'] = 'master'
|
||||
"""
|
||||
cmd["client"] = "minion"
|
||||
if len(cmd["module"].split(".")) > 2 and cmd["module"].split(".")[0] in [
|
||||
"runner",
|
||||
"wheel",
|
||||
]:
|
||||
cmd["client"] = "master"
|
||||
return self._signature(cmd)
|
||||
|
||||
def _signature(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Expects everything that signature does and also a client type string.
|
||||
client can either be master or minion.
|
||||
'''
|
||||
"""
|
||||
result = {}
|
||||
|
||||
client = cmd.get('client', 'minion')
|
||||
if client == 'minion':
|
||||
cmd['fun'] = 'sys.argspec'
|
||||
cmd['kwarg'] = dict(module=cmd['module'])
|
||||
client = cmd.get("client", "minion")
|
||||
if client == "minion":
|
||||
cmd["fun"] = "sys.argspec"
|
||||
cmd["kwarg"] = dict(module=cmd["module"])
|
||||
result = self.run(cmd)
|
||||
elif client == 'master':
|
||||
parts = cmd['module'].split('.')
|
||||
elif client == "master":
|
||||
parts = cmd["module"].split(".")
|
||||
client = parts[0]
|
||||
module = '.'.join(parts[1:]) # strip prefix
|
||||
if client == 'wheel':
|
||||
module = ".".join(parts[1:]) # strip prefix
|
||||
if client == "wheel":
|
||||
functions = self.wheelClient.functions
|
||||
elif client == 'runner':
|
||||
elif client == "runner":
|
||||
functions = self.runnerClient.functions
|
||||
result = {'master': salt.utils.args.argspec_report(functions, module)}
|
||||
result = {"master": salt.utils.args.argspec_report(functions, module)}
|
||||
return result
|
||||
|
||||
def create_token(self, creds):
|
||||
'''
|
||||
"""
|
||||
Create token with creds.
|
||||
Token authorizes salt access if successful authentication
|
||||
with the credentials in creds.
|
||||
|
@ -270,57 +278,61 @@ class APIClient(object):
|
|||
"test.*"
|
||||
]
|
||||
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
tokenage = self.resolver.mk_token(creds)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
raise EauthAuthenticationError(
|
||||
"Authentication failed with {0}.".format(repr(ex)))
|
||||
"Authentication failed with {0}.".format(repr(ex))
|
||||
)
|
||||
|
||||
if 'token' not in tokenage:
|
||||
raise EauthAuthenticationError("Authentication failed with provided credentials.")
|
||||
if "token" not in tokenage:
|
||||
raise EauthAuthenticationError(
|
||||
"Authentication failed with provided credentials."
|
||||
)
|
||||
|
||||
# Grab eauth config for the current backend for the current user
|
||||
tokenage_eauth = self.opts['external_auth'][tokenage['eauth']]
|
||||
if tokenage['name'] in tokenage_eauth:
|
||||
tokenage['perms'] = tokenage_eauth[tokenage['name']]
|
||||
tokenage_eauth = self.opts["external_auth"][tokenage["eauth"]]
|
||||
if tokenage["name"] in tokenage_eauth:
|
||||
tokenage["perms"] = tokenage_eauth[tokenage["name"]]
|
||||
else:
|
||||
tokenage['perms'] = tokenage_eauth['*']
|
||||
tokenage["perms"] = tokenage_eauth["*"]
|
||||
|
||||
tokenage['user'] = tokenage['name']
|
||||
tokenage['username'] = tokenage['name']
|
||||
tokenage["user"] = tokenage["name"]
|
||||
tokenage["username"] = tokenage["name"]
|
||||
|
||||
return tokenage
|
||||
|
||||
def verify_token(self, token):
|
||||
'''
|
||||
"""
|
||||
If token is valid Then returns user name associated with token
|
||||
Else False.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
result = self.resolver.get_token(token)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
raise EauthAuthenticationError(
|
||||
"Token validation failed with {0}.".format(repr(ex)))
|
||||
"Token validation failed with {0}.".format(repr(ex))
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def get_event(self, wait=0.25, tag='', full=False):
|
||||
'''
|
||||
def get_event(self, wait=0.25, tag="", full=False):
|
||||
"""
|
||||
Get a single salt event.
|
||||
If no events are available, then block for up to ``wait`` seconds.
|
||||
Return the event if it matches the tag (or ``tag`` is empty)
|
||||
Otherwise return None
|
||||
|
||||
If wait is 0 then block forever or until next event becomes available.
|
||||
'''
|
||||
"""
|
||||
return self.event.get_event(wait=wait, tag=tag, full=full, auto_reconnect=True)
|
||||
|
||||
def fire_event(self, data, tag):
|
||||
'''
|
||||
"""
|
||||
fires event with data and tag
|
||||
This only works if api is running with same user permissions as master
|
||||
Need to convert this to a master call with appropriate authentication
|
||||
|
||||
'''
|
||||
return self.event.fire_event(data, salt.utils.event.tagify(tag, 'wui'))
|
||||
"""
|
||||
return self.event.fire_event(data, salt.utils.event.tagify(tag, "wui"))
|
||||
|
|
|
@ -1,21 +1,27 @@
|
|||
# coding: utf-8
|
||||
'''
|
||||
"""
|
||||
A collection of mixins useful for the various *Client interfaces
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, with_statement, unicode_literals
|
||||
import fnmatch
|
||||
import signal
|
||||
import logging
|
||||
import weakref
|
||||
import traceback
|
||||
from __future__ import absolute_import, print_function, unicode_literals, with_statement
|
||||
|
||||
import collections
|
||||
import copy as pycopy
|
||||
import fnmatch
|
||||
import logging
|
||||
import signal
|
||||
import traceback
|
||||
import weakref
|
||||
|
||||
# Import Salt libs
|
||||
import salt.exceptions
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.tornado.stack_context
|
||||
import salt.log.setup
|
||||
import salt.minion
|
||||
import salt.transport.client
|
||||
import salt.utils.args
|
||||
import salt.utils.doc
|
||||
import salt.utils.error
|
||||
|
@ -28,44 +34,42 @@ import salt.utils.process
|
|||
import salt.utils.state
|
||||
import salt.utils.user
|
||||
import salt.utils.versions
|
||||
import salt.transport.client
|
||||
import salt.log.setup
|
||||
from salt.ext import six
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.tornado.stack_context
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
CLIENT_INTERNAL_KEYWORDS = frozenset([
|
||||
'client',
|
||||
'cmd',
|
||||
'eauth',
|
||||
'fun',
|
||||
'kwarg',
|
||||
'match',
|
||||
'token',
|
||||
'__jid__',
|
||||
'__tag__',
|
||||
'__user__',
|
||||
'username',
|
||||
'password',
|
||||
'full_return',
|
||||
'print_event'
|
||||
])
|
||||
CLIENT_INTERNAL_KEYWORDS = frozenset(
|
||||
[
|
||||
"client",
|
||||
"cmd",
|
||||
"eauth",
|
||||
"fun",
|
||||
"kwarg",
|
||||
"match",
|
||||
"token",
|
||||
"__jid__",
|
||||
"__tag__",
|
||||
"__user__",
|
||||
"username",
|
||||
"password",
|
||||
"full_return",
|
||||
"print_event",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ClientFuncsDict(collections.MutableMapping):
|
||||
'''
|
||||
"""
|
||||
Class to make a read-only dict for accessing runner funcs "directly"
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
|
||||
def __getattr__(self, attr):
|
||||
'''
|
||||
"""
|
||||
Provide access eg. to 'pack'
|
||||
'''
|
||||
"""
|
||||
return getattr(self.client.functions, attr)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
|
@ -75,38 +79,40 @@ class ClientFuncsDict(collections.MutableMapping):
|
|||
raise NotImplementedError()
|
||||
|
||||
def __getitem__(self, key):
|
||||
'''
|
||||
"""
|
||||
Return a function that you can call with regular func params, but
|
||||
will do all the _proc_function magic
|
||||
'''
|
||||
"""
|
||||
if key not in self.client.functions:
|
||||
raise KeyError
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
low = {'fun': key,
|
||||
'args': args,
|
||||
'kwargs': kwargs,
|
||||
}
|
||||
low = {
|
||||
"fun": key,
|
||||
"args": args,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
pub_data = {}
|
||||
# Copy kwargs keys so we can iterate over and pop the pub data
|
||||
kwargs_keys = list(kwargs)
|
||||
|
||||
# pull out pub_data if you have it
|
||||
for kwargs_key in kwargs_keys:
|
||||
if kwargs_key.startswith('__pub_'):
|
||||
if kwargs_key.startswith("__pub_"):
|
||||
pub_data[kwargs_key] = kwargs.pop(kwargs_key)
|
||||
|
||||
async_pub = self.client._gen_async_pub(pub_data.get('__pub_jid'))
|
||||
async_pub = self.client._gen_async_pub(pub_data.get("__pub_jid"))
|
||||
|
||||
user = salt.utils.user.get_specific_user()
|
||||
return self.client._proc_function(
|
||||
key,
|
||||
low,
|
||||
user,
|
||||
async_pub['tag'], # TODO: fix
|
||||
async_pub['jid'], # TODO: fix
|
||||
async_pub["tag"], # TODO: fix
|
||||
async_pub["jid"], # TODO: fix
|
||||
False, # Don't daemonize
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
def __len__(self):
|
||||
|
@ -117,37 +123,38 @@ class ClientFuncsDict(collections.MutableMapping):
|
|||
|
||||
|
||||
class SyncClientMixin(object):
|
||||
'''
|
||||
"""
|
||||
A mixin for *Client interfaces to abstract common function execution
|
||||
'''
|
||||
"""
|
||||
|
||||
functions = ()
|
||||
|
||||
def functions_dict(self):
|
||||
'''
|
||||
"""
|
||||
Return a dict that will mimic the "functions" dict used all over salt.
|
||||
It creates a wrapper around the function allowing **kwargs, and if pub_data
|
||||
is passed in as kwargs, will re-use the JID passed in
|
||||
'''
|
||||
"""
|
||||
return ClientFuncsDict(self)
|
||||
|
||||
def master_call(self, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Execute a function through the master network interface.
|
||||
'''
|
||||
"""
|
||||
load = kwargs
|
||||
load['cmd'] = self.client
|
||||
load["cmd"] = self.client
|
||||
|
||||
with salt.transport.client.ReqChannel.factory(self.opts,
|
||||
crypt='clear',
|
||||
usage='master_call') as channel:
|
||||
with salt.transport.client.ReqChannel.factory(
|
||||
self.opts, crypt="clear", usage="master_call"
|
||||
) as channel:
|
||||
ret = channel.send(load)
|
||||
if isinstance(ret, collections.Mapping):
|
||||
if 'error' in ret:
|
||||
salt.utils.error.raise_error(**ret['error'])
|
||||
if "error" in ret:
|
||||
salt.utils.error.raise_error(**ret["error"])
|
||||
return ret
|
||||
|
||||
def cmd_sync(self, low, timeout=None, full_return=False):
|
||||
'''
|
||||
"""
|
||||
Execute a runner function synchronously; eauth is respected
|
||||
|
||||
This function requires that :conf_master:`external_auth` is configured
|
||||
|
@ -161,23 +168,36 @@ class SyncClientMixin(object):
|
|||
'password': 'saltdev',
|
||||
'eauth': 'pam',
|
||||
})
|
||||
'''
|
||||
with salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=True) as event:
|
||||
"""
|
||||
with salt.utils.event.get_master_event(
|
||||
self.opts, self.opts["sock_dir"], listen=True
|
||||
) as event:
|
||||
job = self.master_call(**low)
|
||||
ret_tag = salt.utils.event.tagify('ret', base=job['tag'])
|
||||
ret_tag = salt.utils.event.tagify("ret", base=job["tag"])
|
||||
|
||||
if timeout is None:
|
||||
timeout = self.opts.get('rest_timeout', 300)
|
||||
ret = event.get_event(tag=ret_tag, full=True, wait=timeout, auto_reconnect=True)
|
||||
timeout = self.opts.get("rest_timeout", 300)
|
||||
ret = event.get_event(
|
||||
tag=ret_tag, full=True, wait=timeout, auto_reconnect=True
|
||||
)
|
||||
if ret is None:
|
||||
raise salt.exceptions.SaltClientTimeout(
|
||||
"RunnerClient job '{0}' timed out".format(job['jid']),
|
||||
jid=job['jid'])
|
||||
"RunnerClient job '{0}' timed out".format(job["jid"]),
|
||||
jid=job["jid"],
|
||||
)
|
||||
|
||||
return ret if full_return else ret['data']['return']
|
||||
return ret if full_return else ret["data"]["return"]
|
||||
|
||||
def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False):
|
||||
'''
|
||||
def cmd(
|
||||
self,
|
||||
fun,
|
||||
arg=None,
|
||||
pub_data=None,
|
||||
kwarg=None,
|
||||
print_event=True,
|
||||
full_return=False,
|
||||
):
|
||||
"""
|
||||
Execute a function
|
||||
|
||||
.. code-block:: python
|
||||
|
@ -204,72 +224,71 @@ class SyncClientMixin(object):
|
|||
},
|
||||
}
|
||||
|
||||
'''
|
||||
"""
|
||||
if arg is None:
|
||||
arg = tuple()
|
||||
if not isinstance(arg, list) and not isinstance(arg, tuple):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
'arg must be formatted as a list/tuple'
|
||||
"arg must be formatted as a list/tuple"
|
||||
)
|
||||
if pub_data is None:
|
||||
pub_data = {}
|
||||
if not isinstance(pub_data, dict):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
'pub_data must be formatted as a dictionary'
|
||||
"pub_data must be formatted as a dictionary"
|
||||
)
|
||||
if kwarg is None:
|
||||
kwarg = {}
|
||||
if not isinstance(kwarg, dict):
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
'kwarg must be formatted as a dictionary'
|
||||
"kwarg must be formatted as a dictionary"
|
||||
)
|
||||
arglist = salt.utils.args.parse_input(
|
||||
arg,
|
||||
no_parse=self.opts.get('no_parse', []))
|
||||
arg, no_parse=self.opts.get("no_parse", [])
|
||||
)
|
||||
|
||||
# if you were passed kwarg, add it to arglist
|
||||
if kwarg:
|
||||
kwarg['__kwarg__'] = True
|
||||
kwarg["__kwarg__"] = True
|
||||
arglist.append(kwarg)
|
||||
|
||||
args, kwargs = salt.minion.load_args_and_kwargs(
|
||||
self.functions[fun], arglist, pub_data
|
||||
)
|
||||
low = {'fun': fun,
|
||||
'arg': args,
|
||||
'kwarg': kwargs}
|
||||
low = {"fun": fun, "arg": args, "kwarg": kwargs}
|
||||
return self.low(fun, low, print_event=print_event, full_return=full_return)
|
||||
|
||||
@property
|
||||
def mminion(self):
|
||||
if not hasattr(self, '_mminion'):
|
||||
self._mminion = salt.minion.MasterMinion(self.opts, states=False, rend=False)
|
||||
if not hasattr(self, "_mminion"):
|
||||
self._mminion = salt.minion.MasterMinion(
|
||||
self.opts, states=False, rend=False
|
||||
)
|
||||
return self._mminion
|
||||
|
||||
@property
|
||||
def store_job(self):
|
||||
'''
|
||||
"""
|
||||
Helper that allows us to turn off storing jobs for different classes
|
||||
that may incorporate this mixin.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
class_name = self.__class__.__name__.lower()
|
||||
except AttributeError:
|
||||
log.warning(
|
||||
'Unable to determine class name',
|
||||
exc_info_on_loglevel=logging.DEBUG
|
||||
"Unable to determine class name", exc_info_on_loglevel=logging.DEBUG
|
||||
)
|
||||
return True
|
||||
|
||||
try:
|
||||
return self.opts['{0}_returns'.format(class_name)]
|
||||
return self.opts["{0}_returns".format(class_name)]
|
||||
except KeyError:
|
||||
# No such option, assume this isn't one we care about gating and
|
||||
# just return True.
|
||||
return True
|
||||
|
||||
def low(self, fun, low, print_event=True, full_return=False):
|
||||
'''
|
||||
"""
|
||||
Execute a function from low data
|
||||
Low data includes:
|
||||
required:
|
||||
|
@ -280,23 +299,24 @@ class SyncClientMixin(object):
|
|||
- __user__: user who is running the command
|
||||
- __jid__: jid to run under
|
||||
- __tag__: tag to run under
|
||||
'''
|
||||
"""
|
||||
# fire the mminion loading (if not already done) here
|
||||
# this is not to clutter the output with the module loading
|
||||
# if we have a high debug level.
|
||||
self.mminion # pylint: disable=W0104
|
||||
jid = low.get('__jid__', salt.utils.jid.gen_jid(self.opts))
|
||||
tag = low.get('__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix))
|
||||
jid = low.get("__jid__", salt.utils.jid.gen_jid(self.opts))
|
||||
tag = low.get("__tag__", salt.utils.event.tagify(jid, prefix=self.tag_prefix))
|
||||
|
||||
data = {'fun': '{0}.{1}'.format(self.client, fun),
|
||||
'jid': jid,
|
||||
'user': low.get('__user__', 'UNKNOWN'),
|
||||
}
|
||||
data = {
|
||||
"fun": "{0}.{1}".format(self.client, fun),
|
||||
"jid": jid,
|
||||
"user": low.get("__user__", "UNKNOWN"),
|
||||
}
|
||||
|
||||
if print_event:
|
||||
print_func = self.print_async_event \
|
||||
if hasattr(self, 'print_async_event') \
|
||||
else None
|
||||
print_func = (
|
||||
self.print_async_event if hasattr(self, "print_async_event") else None
|
||||
)
|
||||
else:
|
||||
# Suppress printing of return event (this keeps us from printing
|
||||
# runner/wheel output during orchestration).
|
||||
|
@ -304,25 +324,26 @@ class SyncClientMixin(object):
|
|||
|
||||
with salt.utils.event.NamespacedEvent(
|
||||
salt.utils.event.get_event(
|
||||
'master',
|
||||
self.opts['sock_dir'],
|
||||
self.opts['transport'],
|
||||
"master",
|
||||
self.opts["sock_dir"],
|
||||
self.opts["transport"],
|
||||
opts=self.opts,
|
||||
listen=False,
|
||||
),
|
||||
tag,
|
||||
print_func=print_func
|
||||
print_func=print_func,
|
||||
) as namespaced_event:
|
||||
|
||||
# TODO: test that they exist
|
||||
# TODO: Other things to inject??
|
||||
func_globals = {'__jid__': jid,
|
||||
'__user__': data['user'],
|
||||
'__tag__': tag,
|
||||
# weak ref to avoid the Exception in interpreter
|
||||
# teardown of event
|
||||
'__jid_event__': weakref.proxy(namespaced_event),
|
||||
}
|
||||
func_globals = {
|
||||
"__jid__": jid,
|
||||
"__user__": data["user"],
|
||||
"__tag__": tag,
|
||||
# weak ref to avoid the Exception in interpreter
|
||||
# teardown of event
|
||||
"__jid_event__": weakref.proxy(namespaced_event),
|
||||
}
|
||||
|
||||
try:
|
||||
self_functions = pycopy.copy(self.functions)
|
||||
|
@ -333,9 +354,9 @@ class SyncClientMixin(object):
|
|||
completed_funcs = []
|
||||
|
||||
for mod_name in six.iterkeys(self_functions):
|
||||
if '.' not in mod_name:
|
||||
if "." not in mod_name:
|
||||
continue
|
||||
mod, _ = mod_name.split('.', 1)
|
||||
mod, _ = mod_name.split(".", 1)
|
||||
if mod in completed_funcs:
|
||||
continue
|
||||
completed_funcs.append(mod)
|
||||
|
@ -351,110 +372,124 @@ class SyncClientMixin(object):
|
|||
# we make the transition we will load "kwargs" using format_call if
|
||||
# there are no kwargs in the low object passed in.
|
||||
|
||||
if 'arg' in low and 'kwarg' in low:
|
||||
args = low['arg']
|
||||
kwargs = low['kwarg']
|
||||
if "arg" in low and "kwarg" in low:
|
||||
args = low["arg"]
|
||||
kwargs = low["kwarg"]
|
||||
else:
|
||||
f_call = salt.utils.args.format_call(
|
||||
self.functions[fun],
|
||||
low,
|
||||
expected_extra_kws=CLIENT_INTERNAL_KEYWORDS
|
||||
expected_extra_kws=CLIENT_INTERNAL_KEYWORDS,
|
||||
)
|
||||
args = f_call.get('args', ())
|
||||
kwargs = f_call.get('kwargs', {})
|
||||
args = f_call.get("args", ())
|
||||
kwargs = f_call.get("kwargs", {})
|
||||
|
||||
# Update the event data with loaded args and kwargs
|
||||
data['fun_args'] = list(args) + ([kwargs] if kwargs else [])
|
||||
func_globals['__jid_event__'].fire_event(data, 'new')
|
||||
data["fun_args"] = list(args) + ([kwargs] if kwargs else [])
|
||||
func_globals["__jid_event__"].fire_event(data, "new")
|
||||
|
||||
# Initialize a context for executing the method.
|
||||
with salt.ext.tornado.stack_context.StackContext(self.functions.context_dict.clone):
|
||||
with salt.ext.tornado.stack_context.StackContext(
|
||||
self.functions.context_dict.clone
|
||||
):
|
||||
func = self.functions[fun]
|
||||
try:
|
||||
data['return'] = func(*args, **kwargs)
|
||||
data["return"] = func(*args, **kwargs)
|
||||
except TypeError as exc:
|
||||
data['return'] = '\nPassed invalid arguments: {0}\n\nUsage:\n{1}'.format(exc, func.__doc__)
|
||||
data[
|
||||
"return"
|
||||
] = "\nPassed invalid arguments: {0}\n\nUsage:\n{1}".format(
|
||||
exc, func.__doc__
|
||||
)
|
||||
try:
|
||||
data['success'] = self.context.get('retcode', 0) == 0
|
||||
data["success"] = self.context.get("retcode", 0) == 0
|
||||
except AttributeError:
|
||||
# Assume a True result if no context attribute
|
||||
data['success'] = True
|
||||
if isinstance(data['return'], dict) and 'data' in data['return']:
|
||||
data["success"] = True
|
||||
if isinstance(data["return"], dict) and "data" in data["return"]:
|
||||
# some functions can return boolean values
|
||||
data['success'] = salt.utils.state.check_result(data['return']['data'])
|
||||
data["success"] = salt.utils.state.check_result(
|
||||
data["return"]["data"]
|
||||
)
|
||||
except (Exception, SystemExit) as ex: # pylint: disable=broad-except
|
||||
if isinstance(ex, salt.exceptions.NotImplemented):
|
||||
data['return'] = six.text_type(ex)
|
||||
data["return"] = six.text_type(ex)
|
||||
else:
|
||||
data['return'] = 'Exception occurred in {0} {1}: {2}'.format(
|
||||
self.client,
|
||||
fun,
|
||||
traceback.format_exc(),
|
||||
)
|
||||
data['success'] = False
|
||||
data["return"] = "Exception occurred in {0} {1}: {2}".format(
|
||||
self.client, fun, traceback.format_exc(),
|
||||
)
|
||||
data["success"] = False
|
||||
|
||||
if self.store_job:
|
||||
try:
|
||||
salt.utils.job.store_job(
|
||||
self.opts,
|
||||
{
|
||||
'id': self.opts['id'],
|
||||
'tgt': self.opts['id'],
|
||||
'jid': data['jid'],
|
||||
'return': data,
|
||||
"id": self.opts["id"],
|
||||
"tgt": self.opts["id"],
|
||||
"jid": data["jid"],
|
||||
"return": data,
|
||||
},
|
||||
event=None,
|
||||
mminion=self.mminion,
|
||||
)
|
||||
)
|
||||
except salt.exceptions.SaltCacheError:
|
||||
log.error('Could not store job cache info. '
|
||||
'Job details for this run may be unavailable.')
|
||||
log.error(
|
||||
"Could not store job cache info. "
|
||||
"Job details for this run may be unavailable."
|
||||
)
|
||||
|
||||
# Outputters _can_ mutate data so write to the job cache first!
|
||||
namespaced_event.fire_event(data, 'ret')
|
||||
namespaced_event.fire_event(data, "ret")
|
||||
|
||||
# if we fired an event, make sure to delete the event object.
|
||||
# This will ensure that we call destroy, which will do the 0MQ linger
|
||||
log.info('Runner completed: %s', data['jid'])
|
||||
return data if full_return else data['return']
|
||||
log.info("Runner completed: %s", data["jid"])
|
||||
return data if full_return else data["return"]
|
||||
|
||||
def get_docs(self, arg=None):
|
||||
'''
|
||||
"""
|
||||
Return a dictionary of functions and the inline documentation for each
|
||||
'''
|
||||
"""
|
||||
if arg:
|
||||
if '*' in arg:
|
||||
if "*" in arg:
|
||||
target_mod = arg
|
||||
_use_fnmatch = True
|
||||
else:
|
||||
target_mod = arg + '.' if not arg.endswith('.') else arg
|
||||
target_mod = arg + "." if not arg.endswith(".") else arg
|
||||
_use_fnmatch = False
|
||||
if _use_fnmatch:
|
||||
docs = [(fun, self.functions[fun].__doc__)
|
||||
for fun in fnmatch.filter(self.functions, target_mod)]
|
||||
docs = [
|
||||
(fun, self.functions[fun].__doc__)
|
||||
for fun in fnmatch.filter(self.functions, target_mod)
|
||||
]
|
||||
else:
|
||||
docs = [(fun, self.functions[fun].__doc__)
|
||||
for fun in sorted(self.functions)
|
||||
if fun == arg or fun.startswith(target_mod)]
|
||||
docs = [
|
||||
(fun, self.functions[fun].__doc__)
|
||||
for fun in sorted(self.functions)
|
||||
if fun == arg or fun.startswith(target_mod)
|
||||
]
|
||||
else:
|
||||
docs = [(fun, self.functions[fun].__doc__)
|
||||
for fun in sorted(self.functions)]
|
||||
docs = [
|
||||
(fun, self.functions[fun].__doc__) for fun in sorted(self.functions)
|
||||
]
|
||||
docs = dict(docs)
|
||||
return salt.utils.doc.strip_rst(docs)
|
||||
|
||||
|
||||
class AsyncClientMixin(object):
|
||||
'''
|
||||
"""
|
||||
A mixin for *Client interfaces to enable easy asynchronous function execution
|
||||
'''
|
||||
"""
|
||||
|
||||
client = None
|
||||
tag_prefix = None
|
||||
|
||||
def _proc_function(self, fun, low, user, tag, jid, daemonize=True):
|
||||
'''
|
||||
"""
|
||||
Run this method in a multiprocess target to execute the function in a
|
||||
multiprocess and fire the return data on the event bus
|
||||
'''
|
||||
"""
|
||||
if daemonize and not salt.utils.platform.is_windows():
|
||||
# Shutdown the multiprocessing before daemonizing
|
||||
salt.log.setup.shutdown_multiprocessing_logging()
|
||||
|
@ -465,14 +500,14 @@ class AsyncClientMixin(object):
|
|||
salt.log.setup.setup_multiprocessing_logging()
|
||||
|
||||
# pack a few things into low
|
||||
low['__jid__'] = jid
|
||||
low['__user__'] = user
|
||||
low['__tag__'] = tag
|
||||
low["__jid__"] = jid
|
||||
low["__user__"] = user
|
||||
low["__tag__"] = tag
|
||||
|
||||
return self.low(fun, low, full_return=False)
|
||||
|
||||
def cmd_async(self, low):
|
||||
'''
|
||||
"""
|
||||
Execute a function asynchronously; eauth is respected
|
||||
|
||||
This function requires that :conf_master:`external_auth` is configured
|
||||
|
@ -488,25 +523,26 @@ class AsyncClientMixin(object):
|
|||
'password': 'saltdev',
|
||||
})
|
||||
{'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'}
|
||||
'''
|
||||
"""
|
||||
return self.master_call(**low)
|
||||
|
||||
def _gen_async_pub(self, jid=None):
|
||||
if jid is None:
|
||||
jid = salt.utils.jid.gen_jid(self.opts)
|
||||
tag = salt.utils.event.tagify(jid, prefix=self.tag_prefix)
|
||||
return {'tag': tag, 'jid': jid}
|
||||
return {"tag": tag, "jid": jid}
|
||||
|
||||
def asynchronous(self, fun, low, user='UNKNOWN', pub=None):
|
||||
'''
|
||||
def asynchronous(self, fun, low, user="UNKNOWN", pub=None):
|
||||
"""
|
||||
Execute the function in a multiprocess and return the event tag to use
|
||||
to watch for the return
|
||||
'''
|
||||
"""
|
||||
async_pub = pub if pub is not None else self._gen_async_pub()
|
||||
proc = salt.utils.process.SignalHandlingProcess(
|
||||
target=self._proc_function,
|
||||
name='ProcessFunc',
|
||||
args=(fun, low, user, async_pub['tag'], async_pub['jid']))
|
||||
target=self._proc_function,
|
||||
name="ProcessFunc",
|
||||
args=(fun, low, user, async_pub["tag"], async_pub["jid"]),
|
||||
)
|
||||
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
|
||||
# Reset current signals before starting the process in
|
||||
# order not to inherit the current signal handlers
|
||||
|
@ -515,36 +551,40 @@ class AsyncClientMixin(object):
|
|||
return async_pub
|
||||
|
||||
def print_async_event(self, suffix, event):
|
||||
'''
|
||||
"""
|
||||
Print all of the events with the prefix 'tag'
|
||||
'''
|
||||
"""
|
||||
if not isinstance(event, dict):
|
||||
return
|
||||
|
||||
# if we are "quiet", don't print
|
||||
if self.opts.get('quiet', False):
|
||||
if self.opts.get("quiet", False):
|
||||
return
|
||||
|
||||
# some suffixes we don't want to print
|
||||
if suffix in ('new',):
|
||||
if suffix in ("new",):
|
||||
return
|
||||
|
||||
try:
|
||||
outputter = self.opts.get('output', event.get('outputter', None) or event.get('return').get('outputter'))
|
||||
outputter = self.opts.get(
|
||||
"output",
|
||||
event.get("outputter", None) or event.get("return").get("outputter"),
|
||||
)
|
||||
except AttributeError:
|
||||
outputter = None
|
||||
|
||||
# if this is a ret, we have our own set of rules
|
||||
if suffix == 'ret':
|
||||
if suffix == "ret":
|
||||
# Check if outputter was passed in the return data. If this is the case,
|
||||
# then the return data will be a dict two keys: 'data' and 'outputter'
|
||||
if isinstance(event.get('return'), dict) \
|
||||
and set(event['return']) == set(('data', 'outputter')):
|
||||
event_data = event['return']['data']
|
||||
outputter = event['return']['outputter']
|
||||
if isinstance(event.get("return"), dict) and set(event["return"]) == set(
|
||||
("data", "outputter")
|
||||
):
|
||||
event_data = event["return"]["data"]
|
||||
outputter = event["return"]["outputter"]
|
||||
else:
|
||||
event_data = event['return']
|
||||
event_data = event["return"]
|
||||
else:
|
||||
event_data = {'suffix': suffix, 'event': event}
|
||||
event_data = {"suffix": suffix, "event": event}
|
||||
|
||||
salt.output.display_output(event_data, outputter, self.opts)
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
# encoding: utf-8
|
||||
'''
|
||||
"""
|
||||
The main entry point for salt-api
|
||||
'''
|
||||
"""
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import signal
|
||||
|
||||
import logging
|
||||
import signal
|
||||
|
||||
# Import salt-api libs
|
||||
import salt.loader
|
||||
|
@ -15,9 +16,10 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class RunNetapi(salt.utils.process.SignalHandlingProcess):
|
||||
'''
|
||||
"""
|
||||
Runner class that's pickable for netapi modules
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, fname, **kwargs):
|
||||
super(RunNetapi, self).__init__(**kwargs)
|
||||
self.opts = opts
|
||||
|
@ -28,18 +30,18 @@ class RunNetapi(salt.utils.process.SignalHandlingProcess):
|
|||
# process so that a register_after_fork() equivalent will work on Windows.
|
||||
def __setstate__(self, state):
|
||||
self.__init__(
|
||||
state['opts'],
|
||||
state['fname'],
|
||||
log_queue=state['log_queue'],
|
||||
log_queue_level=state['log_queue_level']
|
||||
state["opts"],
|
||||
state["fname"],
|
||||
log_queue=state["log_queue"],
|
||||
log_queue_level=state["log_queue_level"],
|
||||
)
|
||||
|
||||
def __getstate__(self):
|
||||
return {
|
||||
'opts': self.opts,
|
||||
'fname': self.fname,
|
||||
'log_queue': self.log_queue,
|
||||
'log_queue_level': self.log_queue_level
|
||||
"opts": self.opts,
|
||||
"fname": self.fname,
|
||||
"log_queue": self.log_queue,
|
||||
"log_queue_level": self.log_queue_level,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
|
@ -49,34 +51,36 @@ class RunNetapi(salt.utils.process.SignalHandlingProcess):
|
|||
|
||||
|
||||
class NetapiClient(object):
|
||||
'''
|
||||
"""
|
||||
Start each netapi module that is configured to run
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts):
|
||||
self.opts = opts
|
||||
self.process_manager = salt.utils.process.ProcessManager(name='NetAPIProcessManager')
|
||||
self.process_manager = salt.utils.process.ProcessManager(
|
||||
name="NetAPIProcessManager"
|
||||
)
|
||||
self.netapi = salt.loader.netapi(self.opts)
|
||||
|
||||
def run(self):
|
||||
'''
|
||||
"""
|
||||
Load and start all available api modules
|
||||
'''
|
||||
"""
|
||||
if not self.netapi:
|
||||
log.error("Did not find any netapi configurations, nothing to start")
|
||||
|
||||
kwargs = {}
|
||||
if salt.utils.platform.is_windows():
|
||||
kwargs['log_queue'] = salt.log.setup.get_multiprocessing_logging_queue()
|
||||
kwargs['log_queue_level'] = salt.log.setup.get_multiprocessing_logging_level()
|
||||
kwargs["log_queue"] = salt.log.setup.get_multiprocessing_logging_queue()
|
||||
kwargs[
|
||||
"log_queue_level"
|
||||
] = salt.log.setup.get_multiprocessing_logging_level()
|
||||
|
||||
for fun in self.netapi:
|
||||
if fun.endswith('.start'):
|
||||
log.info('Starting %s netapi module', fun)
|
||||
if fun.endswith(".start"):
|
||||
log.info("Starting %s netapi module", fun)
|
||||
self.process_manager.add_process(
|
||||
RunNetapi,
|
||||
args=(self.opts, fun),
|
||||
kwargs=kwargs,
|
||||
name='RunNetapi'
|
||||
RunNetapi, args=(self.opts, fun), kwargs=kwargs, name="RunNetapi"
|
||||
)
|
||||
|
||||
# Install the SIGINT/SIGTERM handlers if not done so far
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2,123 +2,104 @@
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
import salt.utils.args
|
||||
import salt.syspaths as syspaths
|
||||
import salt.utils.args
|
||||
from salt.exceptions import SaltClientError # Temporary
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SSHClient(object):
|
||||
'''
|
||||
"""
|
||||
Create a client object for executing routines via the salt-ssh backend
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
def __init__(self,
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
|
||||
mopts=None,
|
||||
disable_custom_roster=False):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
c_path=os.path.join(syspaths.CONFIG_DIR, "master"),
|
||||
mopts=None,
|
||||
disable_custom_roster=False,
|
||||
):
|
||||
if mopts:
|
||||
self.opts = mopts
|
||||
else:
|
||||
if os.path.isdir(c_path):
|
||||
log.warning(
|
||||
'%s expects a file path not a directory path(%s) to '
|
||||
'its \'c_path\' keyword argument',
|
||||
self.__class__.__name__, c_path
|
||||
"%s expects a file path not a directory path(%s) to "
|
||||
"its 'c_path' keyword argument",
|
||||
self.__class__.__name__,
|
||||
c_path,
|
||||
)
|
||||
self.opts = salt.config.client_config(c_path)
|
||||
|
||||
# Salt API should never offer a custom roster!
|
||||
self.opts['__disable_custom_roster'] = disable_custom_roster
|
||||
self.opts["__disable_custom_roster"] = disable_custom_roster
|
||||
|
||||
def _prep_ssh(
|
||||
self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type='glob',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, **kwargs
|
||||
):
|
||||
"""
|
||||
Prepare the arguments
|
||||
'''
|
||||
"""
|
||||
opts = copy.deepcopy(self.opts)
|
||||
opts.update(kwargs)
|
||||
if timeout:
|
||||
opts['timeout'] = timeout
|
||||
opts["timeout"] = timeout
|
||||
arg = salt.utils.args.condition_input(arg, kwarg)
|
||||
opts['argv'] = [fun] + arg
|
||||
opts['selected_target_option'] = tgt_type
|
||||
opts['tgt'] = tgt
|
||||
opts['arg'] = arg
|
||||
opts["argv"] = [fun] + arg
|
||||
opts["selected_target_option"] = tgt_type
|
||||
opts["tgt"] = tgt
|
||||
opts["arg"] = arg
|
||||
return salt.client.ssh.SSH(opts)
|
||||
|
||||
def cmd_iter(
|
||||
self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type='glob',
|
||||
ret='',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type="glob",
|
||||
ret="",
|
||||
kwarg=None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Execute a single command via the salt-ssh subsystem and return a
|
||||
generator
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
ssh = self._prep_ssh(
|
||||
tgt,
|
||||
fun,
|
||||
arg,
|
||||
timeout,
|
||||
tgt_type,
|
||||
kwarg,
|
||||
**kwargs)
|
||||
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||
"""
|
||||
ssh = self._prep_ssh(tgt, fun, arg, timeout, tgt_type, kwarg, **kwargs)
|
||||
for ret in ssh.run_iter(jid=kwargs.get("jid", None)):
|
||||
yield ret
|
||||
|
||||
def cmd(self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type='glob',
|
||||
kwarg=None,
|
||||
**kwargs):
|
||||
'''
|
||||
def cmd(
|
||||
self, tgt, fun, arg=(), timeout=None, tgt_type="glob", kwarg=None, **kwargs
|
||||
):
|
||||
"""
|
||||
Execute a single command via the salt-ssh subsystem and return all
|
||||
routines at once
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
ssh = self._prep_ssh(
|
||||
tgt,
|
||||
fun,
|
||||
arg,
|
||||
timeout,
|
||||
tgt_type,
|
||||
kwarg,
|
||||
**kwargs)
|
||||
"""
|
||||
ssh = self._prep_ssh(tgt, fun, arg, timeout, tgt_type, kwarg, **kwargs)
|
||||
final = {}
|
||||
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||
for ret in ssh.run_iter(jid=kwargs.get("jid", None)):
|
||||
final.update(ret)
|
||||
return final
|
||||
|
||||
def cmd_sync(self, low):
|
||||
'''
|
||||
"""
|
||||
Execute a salt-ssh call synchronously.
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
@ -135,24 +116,26 @@ class SSHClient(object):
|
|||
'kwarg'={}
|
||||
})
|
||||
{'silver': {'fun_args': [], 'jid': '20141202152721523072', 'return': True, 'retcode': 0, 'success': True, 'fun': 'test.ping', 'id': 'silver'}}
|
||||
'''
|
||||
"""
|
||||
|
||||
kwargs = copy.deepcopy(low)
|
||||
|
||||
for ignore in ['tgt', 'fun', 'arg', 'timeout', 'tgt_type', 'kwarg']:
|
||||
for ignore in ["tgt", "fun", "arg", "timeout", "tgt_type", "kwarg"]:
|
||||
if ignore in kwargs:
|
||||
del kwargs[ignore]
|
||||
|
||||
return self.cmd(low['tgt'],
|
||||
low['fun'],
|
||||
low.get('arg', []),
|
||||
low.get('timeout'),
|
||||
low.get('tgt_type'),
|
||||
low.get('kwarg'),
|
||||
**kwargs)
|
||||
return self.cmd(
|
||||
low["tgt"],
|
||||
low["fun"],
|
||||
low.get("arg", []),
|
||||
low.get("timeout"),
|
||||
low.get("tgt_type"),
|
||||
low.get("kwarg"),
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def cmd_async(self, low, timeout=None):
|
||||
'''
|
||||
"""
|
||||
Execute aa salt-ssh asynchronously
|
||||
|
||||
WARNING: Eauth is **NOT** respected
|
||||
|
@ -167,22 +150,23 @@ class SSHClient(object):
|
|||
'kwarg'={}
|
||||
})
|
||||
{'silver': {'fun_args': [], 'jid': '20141202152721523072', 'return': True, 'retcode': 0, 'success': True, 'fun': 'test.ping', 'id': 'silver'}}
|
||||
'''
|
||||
"""
|
||||
# TODO Not implemented
|
||||
raise SaltClientError
|
||||
|
||||
def cmd_subset(
|
||||
self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type='glob',
|
||||
ret='',
|
||||
kwarg=None,
|
||||
sub=3,
|
||||
**kwargs):
|
||||
'''
|
||||
self,
|
||||
tgt,
|
||||
fun,
|
||||
arg=(),
|
||||
timeout=None,
|
||||
tgt_type="glob",
|
||||
ret="",
|
||||
kwarg=None,
|
||||
sub=3,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Execute a command on a random subset of the targeted systems
|
||||
|
||||
The function signature is the same as :py:meth:`cmd` with the
|
||||
|
@ -198,17 +182,16 @@ class SSHClient(object):
|
|||
{'jerry': True}
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
'''
|
||||
minion_ret = self.cmd(tgt,
|
||||
'sys.list_functions',
|
||||
tgt_type=tgt_type,
|
||||
**kwargs)
|
||||
"""
|
||||
minion_ret = self.cmd(tgt, "sys.list_functions", tgt_type=tgt_type, **kwargs)
|
||||
minions = list(minion_ret)
|
||||
random.shuffle(minions)
|
||||
f_tgt = []
|
||||
for minion in minions:
|
||||
if fun in minion_ret[minion]['return']:
|
||||
if fun in minion_ret[minion]["return"]:
|
||||
f_tgt.append(minion)
|
||||
if len(f_tgt) >= sub:
|
||||
break
|
||||
return self.cmd_iter(f_tgt, fun, arg, timeout, tgt_type='list', ret=ret, kwarg=kwarg, **kwargs)
|
||||
return self.cmd_iter(
|
||||
f_tgt, fun, arg, timeout, tgt_type="list", ret=ret, kwarg=kwarg, **kwargs
|
||||
)
|
||||
|
|
|
@ -1,34 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Manage transport commands via ssh
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import python libs
|
||||
import re
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
# Import salt libs
|
||||
import salt.defaults.exitcodes
|
||||
import salt.utils.json
|
||||
import salt.utils.nb_popen
|
||||
import salt.utils.vt
|
||||
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SSH_PASSWORD_PROMPT_RE = re.compile(r'(?:.*)[Pp]assword(?: for .*)?:', re.M)
|
||||
KEY_VALID_RE = re.compile(r'.*\(yes\/no\).*')
|
||||
SSH_PRIVATE_KEY_PASSWORD_PROMPT_RE = re.compile(r'Enter passphrase for key', re.M)
|
||||
SSH_PASSWORD_PROMPT_RE = re.compile(r"(?:.*)[Pp]assword(?: for .*)?:", re.M)
|
||||
KEY_VALID_RE = re.compile(r".*\(yes\/no\).*")
|
||||
SSH_PRIVATE_KEY_PASSWORD_PROMPT_RE = re.compile(r"Enter passphrase for key", re.M)
|
||||
|
||||
# Keep these in sync with ./__init__.py
|
||||
RSTR = '_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878'
|
||||
RSTR_RE = re.compile(r'(?:^|\r?\n)' + RSTR + r'(?:\r?\n|$)')
|
||||
RSTR = "_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878"
|
||||
RSTR_RE = re.compile(r"(?:^|\r?\n)" + RSTR + r"(?:\r?\n|$)")
|
||||
|
||||
|
||||
class NoPasswdError(Exception):
|
||||
|
@ -40,9 +40,9 @@ class KeyAcceptError(Exception):
|
|||
|
||||
|
||||
def gen_key(path):
|
||||
'''
|
||||
"""
|
||||
Generate a key for use with salt-ssh
|
||||
'''
|
||||
"""
|
||||
cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path)
|
||||
if not os.path.isdir(os.path.dirname(path)):
|
||||
os.makedirs(os.path.dirname(path))
|
||||
|
@ -50,15 +50,16 @@ def gen_key(path):
|
|||
|
||||
|
||||
def gen_shell(opts, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Return the correct shell interface for the target system
|
||||
'''
|
||||
if kwargs['winrm']:
|
||||
"""
|
||||
if kwargs["winrm"]:
|
||||
try:
|
||||
import saltwinshell
|
||||
|
||||
shell = saltwinshell.Shell(opts, **kwargs)
|
||||
except ImportError:
|
||||
log.error('The saltwinshell library is not available')
|
||||
log.error("The saltwinshell library is not available")
|
||||
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
else:
|
||||
shell = Shell(opts, **kwargs)
|
||||
|
@ -66,30 +67,32 @@ def gen_shell(opts, **kwargs):
|
|||
|
||||
|
||||
class Shell(object):
|
||||
'''
|
||||
"""
|
||||
Create a shell connection object to encapsulate ssh executions
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
opts,
|
||||
host,
|
||||
user=None,
|
||||
port=None,
|
||||
passwd=None,
|
||||
priv=None,
|
||||
priv_passwd=None,
|
||||
timeout=None,
|
||||
sudo=False,
|
||||
tty=False,
|
||||
mods=None,
|
||||
identities_only=False,
|
||||
sudo_user=None,
|
||||
remote_port_forwards=None,
|
||||
winrm=False,
|
||||
ssh_options=None):
|
||||
self,
|
||||
opts,
|
||||
host,
|
||||
user=None,
|
||||
port=None,
|
||||
passwd=None,
|
||||
priv=None,
|
||||
priv_passwd=None,
|
||||
timeout=None,
|
||||
sudo=False,
|
||||
tty=False,
|
||||
mods=None,
|
||||
identities_only=False,
|
||||
sudo_user=None,
|
||||
remote_port_forwards=None,
|
||||
winrm=False,
|
||||
ssh_options=None,
|
||||
):
|
||||
self.opts = opts
|
||||
# ssh <ipv6>, but scp [<ipv6]:/path
|
||||
self.host = host.strip('[]')
|
||||
self.host = host.strip("[]")
|
||||
self.user = user
|
||||
self.port = port
|
||||
self.passwd = six.text_type(passwd) if passwd else passwd
|
||||
|
@ -101,198 +104,201 @@ class Shell(object):
|
|||
self.mods = mods
|
||||
self.identities_only = identities_only
|
||||
self.remote_port_forwards = remote_port_forwards
|
||||
self.ssh_options = '' if ssh_options is None else ssh_options
|
||||
self.ssh_options = "" if ssh_options is None else ssh_options
|
||||
|
||||
def get_error(self, errstr):
|
||||
'''
|
||||
"""
|
||||
Parse out an error and return a targeted error string
|
||||
'''
|
||||
for line in errstr.split('\n'):
|
||||
if line.startswith('ssh:'):
|
||||
"""
|
||||
for line in errstr.split("\n"):
|
||||
if line.startswith("ssh:"):
|
||||
return line
|
||||
if line.startswith('Pseudo-terminal'):
|
||||
if line.startswith("Pseudo-terminal"):
|
||||
continue
|
||||
if 'to the list of known hosts.' in line:
|
||||
if "to the list of known hosts." in line:
|
||||
continue
|
||||
return line
|
||||
return errstr
|
||||
|
||||
def _key_opts(self):
|
||||
'''
|
||||
"""
|
||||
Return options for the ssh command base for Salt to call
|
||||
'''
|
||||
"""
|
||||
options = [
|
||||
'KbdInteractiveAuthentication=no',
|
||||
]
|
||||
"KbdInteractiveAuthentication=no",
|
||||
]
|
||||
if self.passwd:
|
||||
options.append('PasswordAuthentication=yes')
|
||||
options.append("PasswordAuthentication=yes")
|
||||
else:
|
||||
options.append('PasswordAuthentication=no')
|
||||
if self.opts.get('_ssh_version', (0,)) > (4, 9):
|
||||
options.append('GSSAPIAuthentication=no')
|
||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get('ignore_host_keys'):
|
||||
options.append('StrictHostKeyChecking=no')
|
||||
if self.opts.get('no_host_keys'):
|
||||
options.extend(['StrictHostKeyChecking=no',
|
||||
'UserKnownHostsFile=/dev/null'])
|
||||
known_hosts = self.opts.get('known_hosts_file')
|
||||
options.append("PasswordAuthentication=no")
|
||||
if self.opts.get("_ssh_version", (0,)) > (4, 9):
|
||||
options.append("GSSAPIAuthentication=no")
|
||||
options.append("ConnectTimeout={0}".format(self.timeout))
|
||||
if self.opts.get("ignore_host_keys"):
|
||||
options.append("StrictHostKeyChecking=no")
|
||||
if self.opts.get("no_host_keys"):
|
||||
options.extend(["StrictHostKeyChecking=no", "UserKnownHostsFile=/dev/null"])
|
||||
known_hosts = self.opts.get("known_hosts_file")
|
||||
if known_hosts and os.path.isfile(known_hosts):
|
||||
options.append('UserKnownHostsFile={0}'.format(known_hosts))
|
||||
options.append("UserKnownHostsFile={0}".format(known_hosts))
|
||||
if self.port:
|
||||
options.append('Port={0}'.format(self.port))
|
||||
if self.priv and self.priv != 'agent-forwarding':
|
||||
options.append('IdentityFile={0}'.format(self.priv))
|
||||
options.append("Port={0}".format(self.port))
|
||||
if self.priv and self.priv != "agent-forwarding":
|
||||
options.append("IdentityFile={0}".format(self.priv))
|
||||
if self.user:
|
||||
options.append('User={0}'.format(self.user))
|
||||
options.append("User={0}".format(self.user))
|
||||
if self.identities_only:
|
||||
options.append('IdentitiesOnly=yes')
|
||||
options.append("IdentitiesOnly=yes")
|
||||
|
||||
ret = []
|
||||
for option in options:
|
||||
ret.append('-o {0} '.format(option))
|
||||
return ''.join(ret)
|
||||
ret.append("-o {0} ".format(option))
|
||||
return "".join(ret)
|
||||
|
||||
def _passwd_opts(self):
|
||||
'''
|
||||
"""
|
||||
Return options to pass to ssh
|
||||
'''
|
||||
"""
|
||||
# TODO ControlMaster does not work without ControlPath
|
||||
# user could take advantage of it if they set ControlPath in their
|
||||
# ssh config. Also, ControlPersist not widely available.
|
||||
options = ['ControlMaster=auto',
|
||||
'StrictHostKeyChecking=no',
|
||||
]
|
||||
if self.opts['_ssh_version'] > (4, 9):
|
||||
options.append('GSSAPIAuthentication=no')
|
||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||
if self.opts.get('ignore_host_keys'):
|
||||
options.append('StrictHostKeyChecking=no')
|
||||
if self.opts.get('no_host_keys'):
|
||||
options.extend(['StrictHostKeyChecking=no',
|
||||
'UserKnownHostsFile=/dev/null'])
|
||||
options = [
|
||||
"ControlMaster=auto",
|
||||
"StrictHostKeyChecking=no",
|
||||
]
|
||||
if self.opts["_ssh_version"] > (4, 9):
|
||||
options.append("GSSAPIAuthentication=no")
|
||||
options.append("ConnectTimeout={0}".format(self.timeout))
|
||||
if self.opts.get("ignore_host_keys"):
|
||||
options.append("StrictHostKeyChecking=no")
|
||||
if self.opts.get("no_host_keys"):
|
||||
options.extend(["StrictHostKeyChecking=no", "UserKnownHostsFile=/dev/null"])
|
||||
|
||||
if self.passwd:
|
||||
options.extend(['PasswordAuthentication=yes',
|
||||
'PubkeyAuthentication=yes'])
|
||||
options.extend(["PasswordAuthentication=yes", "PubkeyAuthentication=yes"])
|
||||
else:
|
||||
options.extend(['PasswordAuthentication=no',
|
||||
'PubkeyAuthentication=yes',
|
||||
'KbdInteractiveAuthentication=no',
|
||||
'ChallengeResponseAuthentication=no',
|
||||
'BatchMode=yes'])
|
||||
options.extend(
|
||||
[
|
||||
"PasswordAuthentication=no",
|
||||
"PubkeyAuthentication=yes",
|
||||
"KbdInteractiveAuthentication=no",
|
||||
"ChallengeResponseAuthentication=no",
|
||||
"BatchMode=yes",
|
||||
]
|
||||
)
|
||||
if self.port:
|
||||
options.append('Port={0}'.format(self.port))
|
||||
options.append("Port={0}".format(self.port))
|
||||
if self.user:
|
||||
options.append('User={0}'.format(self.user))
|
||||
options.append("User={0}".format(self.user))
|
||||
if self.identities_only:
|
||||
options.append('IdentitiesOnly=yes')
|
||||
options.append("IdentitiesOnly=yes")
|
||||
|
||||
ret = []
|
||||
for option in options:
|
||||
ret.append('-o {0} '.format(option))
|
||||
return ''.join(ret)
|
||||
ret.append("-o {0} ".format(option))
|
||||
return "".join(ret)
|
||||
|
||||
def _ssh_opts(self):
|
||||
return ' '.join(['-o {0}'.format(opt)
|
||||
for opt in self.ssh_options])
|
||||
return " ".join(["-o {0}".format(opt) for opt in self.ssh_options])
|
||||
|
||||
def _copy_id_str_old(self):
|
||||
'''
|
||||
"""
|
||||
Return the string to execute ssh-copy-id
|
||||
'''
|
||||
"""
|
||||
if self.passwd:
|
||||
# Using single quotes prevents shell expansion and
|
||||
# passwords containing '$'
|
||||
return "{0} {1} '{2} -p {3} {4} {5}@{6}'".format(
|
||||
'ssh-copy-id',
|
||||
'-i {0}.pub'.format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
self.user,
|
||||
self.host)
|
||||
"ssh-copy-id",
|
||||
"-i {0}.pub".format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
self.user,
|
||||
self.host,
|
||||
)
|
||||
return None
|
||||
|
||||
def _copy_id_str_new(self):
|
||||
'''
|
||||
"""
|
||||
Since newer ssh-copy-id commands ingest option differently we need to
|
||||
have two commands
|
||||
'''
|
||||
"""
|
||||
if self.passwd:
|
||||
# Using single quotes prevents shell expansion and
|
||||
# passwords containing '$'
|
||||
return "{0} {1} {2} -p {3} {4} {5}@{6}".format(
|
||||
'ssh-copy-id',
|
||||
'-i {0}.pub'.format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
self.user,
|
||||
self.host)
|
||||
"ssh-copy-id",
|
||||
"-i {0}.pub".format(self.priv),
|
||||
self._passwd_opts(),
|
||||
self.port,
|
||||
self._ssh_opts(),
|
||||
self.user,
|
||||
self.host,
|
||||
)
|
||||
return None
|
||||
|
||||
def copy_id(self):
|
||||
'''
|
||||
"""
|
||||
Execute ssh-copy-id to plant the id file on the target
|
||||
'''
|
||||
"""
|
||||
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_old())
|
||||
if salt.defaults.exitcodes.EX_OK != retcode and 'Usage' in stderr:
|
||||
if salt.defaults.exitcodes.EX_OK != retcode and "Usage" in stderr:
|
||||
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_new())
|
||||
return stdout, stderr, retcode
|
||||
|
||||
def _cmd_str(self, cmd, ssh='ssh'):
|
||||
'''
|
||||
def _cmd_str(self, cmd, ssh="ssh"):
|
||||
"""
|
||||
Return the cmd string to execute
|
||||
'''
|
||||
"""
|
||||
|
||||
# TODO: if tty, then our SSH_SHIM cannot be supplied from STDIN Will
|
||||
# need to deliver the SHIM to the remote host and execute it there
|
||||
|
||||
command = [ssh]
|
||||
if ssh != 'scp':
|
||||
if ssh != "scp":
|
||||
command.append(self.host)
|
||||
if self.tty and ssh == 'ssh':
|
||||
command.append('-t -t')
|
||||
if self.tty and ssh == "ssh":
|
||||
command.append("-t -t")
|
||||
if self.passwd or self.priv:
|
||||
command.append(self.priv and self._key_opts() or self._passwd_opts())
|
||||
if ssh != 'scp' and self.remote_port_forwards:
|
||||
command.append(' '.join(['-R {0}'.format(item)
|
||||
for item in self.remote_port_forwards.split(',')]))
|
||||
if ssh != "scp" and self.remote_port_forwards:
|
||||
command.append(
|
||||
" ".join(
|
||||
[
|
||||
"-R {0}".format(item)
|
||||
for item in self.remote_port_forwards.split(",")
|
||||
]
|
||||
)
|
||||
)
|
||||
if self.ssh_options:
|
||||
command.append(self._ssh_opts())
|
||||
|
||||
command.append(cmd)
|
||||
|
||||
return ' '.join(command)
|
||||
return " ".join(command)
|
||||
|
||||
def _old_run_cmd(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Cleanly execute the command string
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
shell=True,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
data = proc.communicate()
|
||||
return data[0], data[1], proc.returncode
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return ('local', 'Unknown Error', None)
|
||||
return ("local", "Unknown Error", None)
|
||||
|
||||
def _run_nb_cmd(self, cmd):
|
||||
'''
|
||||
"""
|
||||
cmd iterator
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
proc = salt.utils.nb_popen.NonBlockingPopen(
|
||||
cmd,
|
||||
shell=True,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
cmd, shell=True, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||
)
|
||||
while True:
|
||||
time.sleep(0.1)
|
||||
|
@ -305,20 +311,20 @@ class Shell(object):
|
|||
err = self.get_error(err)
|
||||
yield out, err, rcode
|
||||
except Exception: # pylint: disable=broad-except
|
||||
yield ('', 'Unknown Error', None)
|
||||
yield ("", "Unknown Error", None)
|
||||
|
||||
def exec_nb_cmd(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Yield None until cmd finished
|
||||
'''
|
||||
"""
|
||||
r_out = []
|
||||
r_err = []
|
||||
rcode = None
|
||||
cmd = self._cmd_str(cmd)
|
||||
|
||||
logmsg = 'Executing non-blocking command: {0}'.format(cmd)
|
||||
logmsg = "Executing non-blocking command: {0}".format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
logmsg = logmsg.replace(self.passwd, ("*" * 6))
|
||||
log.debug(logmsg)
|
||||
|
||||
for out, err, rcode in self._run_nb_cmd(cmd):
|
||||
|
@ -327,19 +333,19 @@ class Shell(object):
|
|||
if err is not None:
|
||||
r_err.append(err)
|
||||
yield None, None, None
|
||||
yield ''.join(r_out), ''.join(r_err), rcode
|
||||
yield "".join(r_out), "".join(r_err), rcode
|
||||
|
||||
def exec_cmd(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Execute a remote command
|
||||
'''
|
||||
"""
|
||||
cmd = self._cmd_str(cmd)
|
||||
|
||||
logmsg = 'Executing command: {0}'.format(cmd)
|
||||
logmsg = "Executing command: {0}".format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
if 'decode("base64")' in logmsg or 'base64.b64decode(' in logmsg:
|
||||
log.debug('Executed SHIM command. Command logged to TRACE')
|
||||
logmsg = logmsg.replace(self.passwd, ("*" * 6))
|
||||
if 'decode("base64")' in logmsg or "base64.b64decode(" in logmsg:
|
||||
log.debug("Executed SHIM command. Command logged to TRACE")
|
||||
log.trace(logmsg)
|
||||
else:
|
||||
log.debug(logmsg)
|
||||
|
@ -348,49 +354,50 @@ class Shell(object):
|
|||
return ret
|
||||
|
||||
def send(self, local, remote, makedirs=False):
|
||||
'''
|
||||
"""
|
||||
scp a file or files to a remote system
|
||||
'''
|
||||
"""
|
||||
if makedirs:
|
||||
self.exec_cmd('mkdir -p {0}'.format(os.path.dirname(remote)))
|
||||
self.exec_cmd("mkdir -p {0}".format(os.path.dirname(remote)))
|
||||
|
||||
# scp needs [<ipv6}
|
||||
host = self.host
|
||||
if ':' in host:
|
||||
host = '[{0}]'.format(host)
|
||||
if ":" in host:
|
||||
host = "[{0}]".format(host)
|
||||
|
||||
cmd = '{0} {1}:{2}'.format(local, host, remote)
|
||||
cmd = self._cmd_str(cmd, ssh='scp')
|
||||
cmd = "{0} {1}:{2}".format(local, host, remote)
|
||||
cmd = self._cmd_str(cmd, ssh="scp")
|
||||
|
||||
logmsg = 'Executing command: {0}'.format(cmd)
|
||||
logmsg = "Executing command: {0}".format(cmd)
|
||||
if self.passwd:
|
||||
logmsg = logmsg.replace(self.passwd, ('*' * 6))
|
||||
logmsg = logmsg.replace(self.passwd, ("*" * 6))
|
||||
log.debug(logmsg)
|
||||
|
||||
return self._run_cmd(cmd)
|
||||
|
||||
def _run_cmd(self, cmd, key_accept=False, passwd_retries=3):
|
||||
'''
|
||||
"""
|
||||
Execute a shell command via VT. This is blocking and assumes that ssh
|
||||
is being run
|
||||
'''
|
||||
"""
|
||||
if not cmd:
|
||||
return '', 'No command or passphrase', 245
|
||||
return "", "No command or passphrase", 245
|
||||
|
||||
term = salt.utils.vt.Terminal(
|
||||
cmd,
|
||||
shell=True,
|
||||
log_stdout=True,
|
||||
log_stdout_level='trace',
|
||||
log_stderr=True,
|
||||
log_stderr_level='trace',
|
||||
stream_stdout=False,
|
||||
stream_stderr=False)
|
||||
cmd,
|
||||
shell=True,
|
||||
log_stdout=True,
|
||||
log_stdout_level="trace",
|
||||
log_stderr=True,
|
||||
log_stderr_level="trace",
|
||||
stream_stdout=False,
|
||||
stream_stderr=False,
|
||||
)
|
||||
sent_passwd = 0
|
||||
send_password = True
|
||||
ret_stdout = ''
|
||||
ret_stderr = ''
|
||||
old_stdout = ''
|
||||
ret_stdout = ""
|
||||
ret_stderr = ""
|
||||
old_stdout = ""
|
||||
|
||||
try:
|
||||
while term.has_unread_data:
|
||||
|
@ -407,31 +414,40 @@ class Shell(object):
|
|||
send_password = False
|
||||
if buff and SSH_PRIVATE_KEY_PASSWORD_PROMPT_RE.search(buff):
|
||||
if not self.priv_passwd:
|
||||
return '', 'Private key file need passphrase', 254
|
||||
return "", "Private key file need passphrase", 254
|
||||
term.sendline(self.priv_passwd)
|
||||
continue
|
||||
if buff and SSH_PASSWORD_PROMPT_RE.search(buff) and send_password:
|
||||
if not self.passwd:
|
||||
return '', 'Permission denied, no authentication information', 254
|
||||
return (
|
||||
"",
|
||||
"Permission denied, no authentication information",
|
||||
254,
|
||||
)
|
||||
if sent_passwd < passwd_retries:
|
||||
term.sendline(self.passwd)
|
||||
sent_passwd += 1
|
||||
continue
|
||||
else:
|
||||
# asking for a password, and we can't seem to send it
|
||||
return '', 'Password authentication failed', 254
|
||||
return "", "Password authentication failed", 254
|
||||
elif buff and KEY_VALID_RE.search(buff):
|
||||
if key_accept:
|
||||
term.sendline('yes')
|
||||
term.sendline("yes")
|
||||
continue
|
||||
else:
|
||||
term.sendline('no')
|
||||
ret_stdout = ('The host key needs to be accepted, to '
|
||||
'auto accept run salt-ssh with the -i '
|
||||
'flag:\n{0}').format(stdout)
|
||||
return ret_stdout, '', 254
|
||||
elif buff and buff.endswith('_||ext_mods||_'):
|
||||
mods_raw = salt.utils.json.dumps(self.mods, separators=(',', ':')) + '|_E|0|'
|
||||
term.sendline("no")
|
||||
ret_stdout = (
|
||||
"The host key needs to be accepted, to "
|
||||
"auto accept run salt-ssh with the -i "
|
||||
"flag:\n{0}"
|
||||
).format(stdout)
|
||||
return ret_stdout, "", 254
|
||||
elif buff and buff.endswith("_||ext_mods||_"):
|
||||
mods_raw = (
|
||||
salt.utils.json.dumps(self.mods, separators=(",", ":"))
|
||||
+ "|_E|0|"
|
||||
)
|
||||
term.sendline(mods_raw)
|
||||
if stdout:
|
||||
old_stdout = stdout
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
# pylint: disable=no-encoding-in-file,resource-leakage
|
||||
'''
|
||||
"""
|
||||
This is a shim that handles checking and updating salt thin and
|
||||
then invoking thin.
|
||||
|
||||
This is not intended to be instantiated as a module, rather it is a
|
||||
helper script used by salt.client.ssh.Single. It is here, in a
|
||||
separate file, for convenience of development.
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import hashlib
|
||||
import tarfile
|
||||
import shutil
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
|
||||
THIN_ARCHIVE = 'salt-thin.tgz'
|
||||
EXT_ARCHIVE = 'salt-ext_mods.tgz'
|
||||
THIN_ARCHIVE = "salt-thin.tgz"
|
||||
EXT_ARCHIVE = "salt-ext_mods.tgz"
|
||||
|
||||
# Keep these in sync with salt/defaults/exitcodes.py
|
||||
EX_THIN_PYTHON_INVALID = 10
|
||||
|
@ -31,9 +31,9 @@ EX_CANTCREAT = 73
|
|||
|
||||
|
||||
class OptionsContainer(object):
|
||||
'''
|
||||
"""
|
||||
An empty class for holding instance attribute values.
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
OPTIONS = OptionsContainer()
|
||||
|
@ -46,15 +46,15 @@ ARGS = None
|
|||
|
||||
|
||||
def get_system_encoding():
|
||||
'''
|
||||
"""
|
||||
Get system encoding. Most of this code is a part of salt/__init__.py
|
||||
'''
|
||||
"""
|
||||
# This is the most trustworthy source of the system encoding, though, if
|
||||
# salt is being imported after being daemonized, this information is lost
|
||||
# and reset to None
|
||||
encoding = None
|
||||
|
||||
if not sys.platform.startswith('win') and sys.stdin is not None:
|
||||
if not sys.platform.startswith("win") and sys.stdin is not None:
|
||||
# On linux we can rely on sys.stdin for the encoding since it
|
||||
# most commonly matches the filesystem encoding. This however
|
||||
# does not apply to windows
|
||||
|
@ -65,6 +65,7 @@ def get_system_encoding():
|
|||
# encoding. MS Windows has problems with this and reports the wrong
|
||||
# encoding
|
||||
import locale
|
||||
|
||||
try:
|
||||
encoding = locale.getdefaultlocale()[-1]
|
||||
except ValueError:
|
||||
|
@ -80,31 +81,31 @@ def get_system_encoding():
|
|||
# the way back to ascii
|
||||
encoding = sys.getdefaultencoding()
|
||||
if not encoding:
|
||||
if sys.platform.startswith('darwin'):
|
||||
if sys.platform.startswith("darwin"):
|
||||
# Mac OS X uses UTF-8
|
||||
encoding = 'utf-8'
|
||||
elif sys.platform.startswith('win'):
|
||||
encoding = "utf-8"
|
||||
elif sys.platform.startswith("win"):
|
||||
# Windows uses a configurable encoding; on Windows, Python uses the name "mbcs"
|
||||
# to refer to whatever the currently configured encoding is.
|
||||
encoding = 'mbcs'
|
||||
encoding = "mbcs"
|
||||
else:
|
||||
# On linux default to ascii as a last resort
|
||||
encoding = 'ascii'
|
||||
encoding = "ascii"
|
||||
return encoding
|
||||
|
||||
|
||||
def is_windows():
|
||||
'''
|
||||
"""
|
||||
Simple function to return if a host is Windows or not
|
||||
'''
|
||||
return sys.platform.startswith('win')
|
||||
"""
|
||||
return sys.platform.startswith("win")
|
||||
|
||||
|
||||
def need_deployment():
|
||||
'''
|
||||
"""
|
||||
Salt thin needs to be deployed - prep the target directory and emit the
|
||||
delimiter and exit code that signals a required deployment.
|
||||
'''
|
||||
"""
|
||||
if os.path.exists(OPTIONS.saltdir):
|
||||
shutil.rmtree(OPTIONS.saltdir)
|
||||
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
|
||||
|
@ -124,15 +125,20 @@ def need_deployment():
|
|||
# Attack detected
|
||||
need_deployment()
|
||||
# If SUDOing then also give the super user group write permissions
|
||||
sudo_gid = os.environ.get('SUDO_GID')
|
||||
sudo_gid = os.environ.get("SUDO_GID")
|
||||
if sudo_gid:
|
||||
try:
|
||||
os.chown(OPTIONS.saltdir, -1, int(sudo_gid))
|
||||
stt = os.stat(OPTIONS.saltdir)
|
||||
os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP)
|
||||
os.chmod(
|
||||
OPTIONS.saltdir,
|
||||
stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP,
|
||||
)
|
||||
except OSError:
|
||||
sys.stdout.write('\n\nUnable to set permissions on thin directory.\nIf sudo_user is set '
|
||||
'and is not root, be certain the user is in the same group\nas the login user')
|
||||
sys.stdout.write(
|
||||
"\n\nUnable to set permissions on thin directory.\nIf sudo_user is set "
|
||||
"and is not root, be certain the user is in the same group\nas the login user"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Delimiter emitted on stdout *only* to indicate shim message to master.
|
||||
|
@ -141,26 +147,26 @@ def need_deployment():
|
|||
|
||||
|
||||
# Adapted from salt.utils.hashutils.get_hash()
|
||||
def get_hash(path, form='sha1', chunk_size=4096):
|
||||
'''
|
||||
def get_hash(path, form="sha1", chunk_size=4096):
|
||||
"""
|
||||
Generate a hash digest string for a file.
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
hash_type = getattr(hashlib, form)
|
||||
except AttributeError:
|
||||
raise ValueError('Invalid hash type: {0}'.format(form))
|
||||
with open(path, 'rb') as ifile:
|
||||
raise ValueError("Invalid hash type: {0}".format(form))
|
||||
with open(path, "rb") as ifile:
|
||||
hash_obj = hash_type()
|
||||
# read the file in in chunks, not the entire file
|
||||
for chunk in iter(lambda: ifile.read(chunk_size), b''):
|
||||
for chunk in iter(lambda: ifile.read(chunk_size), b""):
|
||||
hash_obj.update(chunk)
|
||||
return hash_obj.hexdigest()
|
||||
|
||||
|
||||
def unpack_thin(thin_path):
|
||||
'''
|
||||
"""
|
||||
Unpack the Salt thin archive.
|
||||
'''
|
||||
"""
|
||||
tfile = tarfile.TarFile.gzopen(thin_path)
|
||||
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
|
||||
tfile.extractall(path=OPTIONS.saltdir)
|
||||
|
@ -174,40 +180,35 @@ def unpack_thin(thin_path):
|
|||
|
||||
|
||||
def need_ext():
|
||||
'''
|
||||
"""
|
||||
Signal that external modules need to be deployed.
|
||||
'''
|
||||
"""
|
||||
sys.stdout.write("{0}\next_mods\n".format(OPTIONS.delimiter))
|
||||
sys.exit(EX_MOD_DEPLOY)
|
||||
|
||||
|
||||
def unpack_ext(ext_path):
|
||||
'''
|
||||
"""
|
||||
Unpack the external modules.
|
||||
'''
|
||||
"""
|
||||
modcache = os.path.join(
|
||||
OPTIONS.saltdir,
|
||||
'running_data',
|
||||
'var',
|
||||
'cache',
|
||||
'salt',
|
||||
'minion',
|
||||
'extmods')
|
||||
OPTIONS.saltdir, "running_data", "var", "cache", "salt", "minion", "extmods"
|
||||
)
|
||||
tfile = tarfile.TarFile.gzopen(ext_path)
|
||||
old_umask = os.umask(0o077) # pylint: disable=blacklisted-function
|
||||
tfile.extractall(path=modcache)
|
||||
tfile.close()
|
||||
os.umask(old_umask) # pylint: disable=blacklisted-function
|
||||
os.unlink(ext_path)
|
||||
ver_path = os.path.join(modcache, 'ext_version')
|
||||
ver_dst = os.path.join(OPTIONS.saltdir, 'ext_version')
|
||||
ver_path = os.path.join(modcache, "ext_version")
|
||||
ver_dst = os.path.join(OPTIONS.saltdir, "ext_version")
|
||||
shutil.move(ver_path, ver_dst)
|
||||
|
||||
|
||||
def reset_time(path='.', amt=None):
|
||||
'''
|
||||
def reset_time(path=".", amt=None):
|
||||
"""
|
||||
Reset atime/mtime on all files to prevent systemd swipes only part of the files in the /tmp.
|
||||
'''
|
||||
"""
|
||||
if not amt:
|
||||
amt = int(time.time())
|
||||
for fname in os.listdir(path):
|
||||
|
@ -218,37 +219,57 @@ def reset_time(path='.', amt=None):
|
|||
|
||||
|
||||
def get_executable():
|
||||
'''
|
||||
"""
|
||||
Find executable which matches supported python version in the thin
|
||||
'''
|
||||
"""
|
||||
pymap = {}
|
||||
with open(os.path.join(OPTIONS.saltdir, 'supported-versions')) as _fp:
|
||||
with open(os.path.join(OPTIONS.saltdir, "supported-versions")) as _fp:
|
||||
for line in _fp.readlines():
|
||||
ns, v_maj, v_min = line.strip().split(':')
|
||||
ns, v_maj, v_min = line.strip().split(":")
|
||||
pymap[ns] = (int(v_maj), int(v_min))
|
||||
|
||||
pycmds = (sys.executable, 'python3', 'python27', 'python2.7', 'python26', 'python2.6', 'python2', 'python')
|
||||
pycmds = (
|
||||
sys.executable,
|
||||
"python3",
|
||||
"python27",
|
||||
"python2.7",
|
||||
"python26",
|
||||
"python2.6",
|
||||
"python2",
|
||||
"python",
|
||||
)
|
||||
for py_cmd in pycmds:
|
||||
cmd = py_cmd + ' -c "import sys; sys.stdout.write(\'%s:%s\' % (sys.version_info[0], sys.version_info[1]))"'
|
||||
stdout, _ = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).communicate()
|
||||
cmd = (
|
||||
py_cmd
|
||||
+ " -c \"import sys; sys.stdout.write('%s:%s' % (sys.version_info[0], sys.version_info[1]))\""
|
||||
)
|
||||
stdout, _ = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
|
||||
).communicate()
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
|
||||
stdout = stdout.decode(get_system_encoding(), "replace").strip()
|
||||
else:
|
||||
stdout = stdout.decode(encoding=get_system_encoding(), errors="replace").strip()
|
||||
stdout = stdout.decode(
|
||||
encoding=get_system_encoding(), errors="replace"
|
||||
).strip()
|
||||
if not stdout:
|
||||
continue
|
||||
c_vn = tuple([int(x) for x in stdout.split(':')])
|
||||
c_vn = tuple([int(x) for x in stdout.split(":")])
|
||||
for ns in pymap:
|
||||
if c_vn[0] == pymap[ns][0] and c_vn >= pymap[ns] and os.path.exists(os.path.join(OPTIONS.saltdir, ns)):
|
||||
if (
|
||||
c_vn[0] == pymap[ns][0]
|
||||
and c_vn >= pymap[ns]
|
||||
and os.path.exists(os.path.join(OPTIONS.saltdir, ns))
|
||||
):
|
||||
return py_cmd
|
||||
|
||||
sys.exit(EX_THIN_PYTHON_INVALID)
|
||||
|
||||
|
||||
def main(argv): # pylint: disable=W0613
|
||||
'''
|
||||
"""
|
||||
Main program body
|
||||
'''
|
||||
"""
|
||||
thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
|
||||
if os.path.isfile(thin_path):
|
||||
if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
|
||||
|
@ -256,49 +277,60 @@ def main(argv): # pylint: disable=W0613
|
|||
unpack_thin(thin_path)
|
||||
# Salt thin now is available to use
|
||||
else:
|
||||
if not sys.platform.startswith('win'):
|
||||
scpstat = subprocess.Popen(['/bin/sh', '-c', 'command -v scp']).wait()
|
||||
if not sys.platform.startswith("win"):
|
||||
scpstat = subprocess.Popen(["/bin/sh", "-c", "command -v scp"]).wait()
|
||||
if scpstat != 0:
|
||||
sys.exit(EX_SCP_NOT_FOUND)
|
||||
|
||||
if os.path.exists(OPTIONS.saltdir) and not os.path.isdir(OPTIONS.saltdir):
|
||||
sys.stderr.write(
|
||||
'ERROR: salt path "{0}" exists but is'
|
||||
' not a directory\n'.format(OPTIONS.saltdir)
|
||||
" not a directory\n".format(OPTIONS.saltdir)
|
||||
)
|
||||
sys.exit(EX_CANTCREAT)
|
||||
|
||||
if not os.path.exists(OPTIONS.saltdir):
|
||||
need_deployment()
|
||||
|
||||
code_checksum_path = os.path.normpath(os.path.join(OPTIONS.saltdir, 'code-checksum'))
|
||||
if not os.path.exists(code_checksum_path) or not os.path.isfile(code_checksum_path):
|
||||
sys.stderr.write('WARNING: Unable to locate current code checksum: {0}.\n'.format(code_checksum_path))
|
||||
code_checksum_path = os.path.normpath(
|
||||
os.path.join(OPTIONS.saltdir, "code-checksum")
|
||||
)
|
||||
if not os.path.exists(code_checksum_path) or not os.path.isfile(
|
||||
code_checksum_path
|
||||
):
|
||||
sys.stderr.write(
|
||||
"WARNING: Unable to locate current code checksum: {0}.\n".format(
|
||||
code_checksum_path
|
||||
)
|
||||
)
|
||||
need_deployment()
|
||||
with open(code_checksum_path, 'r') as vpo:
|
||||
with open(code_checksum_path, "r") as vpo:
|
||||
cur_code_cs = vpo.readline().strip()
|
||||
if cur_code_cs != OPTIONS.code_checksum:
|
||||
sys.stderr.write('WARNING: current code checksum {0} is different to {1}.\n'.format(cur_code_cs,
|
||||
OPTIONS.code_checksum))
|
||||
sys.stderr.write(
|
||||
"WARNING: current code checksum {0} is different to {1}.\n".format(
|
||||
cur_code_cs, OPTIONS.code_checksum
|
||||
)
|
||||
)
|
||||
need_deployment()
|
||||
# Salt thin exists and is up-to-date - fall through and use it
|
||||
|
||||
salt_call_path = os.path.join(OPTIONS.saltdir, 'salt-call')
|
||||
salt_call_path = os.path.join(OPTIONS.saltdir, "salt-call")
|
||||
if not os.path.isfile(salt_call_path):
|
||||
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
||||
need_deployment()
|
||||
|
||||
with open(os.path.join(OPTIONS.saltdir, 'minion'), 'w') as config:
|
||||
config.write(OPTIONS.config + '\n')
|
||||
with open(os.path.join(OPTIONS.saltdir, "minion"), "w") as config:
|
||||
config.write(OPTIONS.config + "\n")
|
||||
if OPTIONS.ext_mods:
|
||||
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
|
||||
if os.path.exists(ext_path):
|
||||
unpack_ext(ext_path)
|
||||
else:
|
||||
version_path = os.path.join(OPTIONS.saltdir, 'ext_version')
|
||||
version_path = os.path.join(OPTIONS.saltdir, "ext_version")
|
||||
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
||||
need_ext()
|
||||
with open(version_path, 'r') as vpo:
|
||||
with open(version_path, "r") as vpo:
|
||||
cur_version = vpo.readline().strip()
|
||||
if cur_version != OPTIONS.ext_mods:
|
||||
need_ext()
|
||||
|
@ -311,39 +343,46 @@ def main(argv): # pylint: disable=W0613
|
|||
salt_argv = [
|
||||
get_executable(),
|
||||
salt_call_path,
|
||||
'--retcode-passthrough',
|
||||
'--local',
|
||||
'--metadata',
|
||||
'--out', 'json',
|
||||
'-l', 'quiet',
|
||||
'-c', OPTIONS.saltdir
|
||||
"--retcode-passthrough",
|
||||
"--local",
|
||||
"--metadata",
|
||||
"--out",
|
||||
"json",
|
||||
"-l",
|
||||
"quiet",
|
||||
"-c",
|
||||
OPTIONS.saltdir,
|
||||
]
|
||||
|
||||
try:
|
||||
if argv_prepared[-1].startswith('--no-parse='):
|
||||
if argv_prepared[-1].startswith("--no-parse="):
|
||||
salt_argv.append(argv_prepared.pop(-1))
|
||||
except (IndexError, TypeError):
|
||||
pass
|
||||
|
||||
salt_argv.append('--')
|
||||
salt_argv.append("--")
|
||||
salt_argv.extend(argv_prepared)
|
||||
|
||||
sys.stderr.write('SALT_ARGV: {0}\n'.format(salt_argv))
|
||||
sys.stderr.write("SALT_ARGV: {0}\n".format(salt_argv))
|
||||
|
||||
# Only emit the delimiter on *both* stdout and stderr when completely successful.
|
||||
# Yes, the flush() is necessary.
|
||||
sys.stdout.write(OPTIONS.delimiter + '\n')
|
||||
sys.stdout.write(OPTIONS.delimiter + "\n")
|
||||
sys.stdout.flush()
|
||||
if not OPTIONS.tty:
|
||||
sys.stderr.write(OPTIONS.delimiter + '\n')
|
||||
sys.stderr.write(OPTIONS.delimiter + "\n")
|
||||
sys.stderr.flush()
|
||||
if OPTIONS.cmd_umask is not None:
|
||||
old_umask = os.umask(OPTIONS.cmd_umask) # pylint: disable=blacklisted-function
|
||||
if OPTIONS.tty:
|
||||
proc = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
proc = subprocess.Popen(
|
||||
salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
# Returns bytes instead of string on python 3
|
||||
stdout, _ = proc.communicate()
|
||||
sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace"))
|
||||
sys.stdout.write(
|
||||
stdout.decode(encoding=get_system_encoding(), errors="replace")
|
||||
)
|
||||
sys.stdout.flush()
|
||||
retcode = proc.returncode
|
||||
if OPTIONS.wipe:
|
||||
|
@ -358,5 +397,5 @@ def main(argv): # pylint: disable=W0613
|
|||
return retcode
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
|
@ -1,19 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Create ssh executor system
|
||||
'''
|
||||
"""
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
# Import python libs
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
import shutil
|
||||
from contextlib import closing
|
||||
|
||||
import salt.client.ssh
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh.shell
|
||||
import salt.client.ssh
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
import salt.roster
|
||||
import salt.state
|
||||
import salt.utils.files
|
||||
import salt.utils.json
|
||||
import salt.utils.path
|
||||
|
@ -21,10 +27,6 @@ import salt.utils.stringutils
|
|||
import salt.utils.thin
|
||||
import salt.utils.url
|
||||
import salt.utils.verify
|
||||
import salt.roster
|
||||
import salt.state
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
@ -33,41 +35,45 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class SSHState(salt.state.State):
|
||||
'''
|
||||
"""
|
||||
Create a State object which wraps the SSH functions for state operations
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, opts, pillar=None, wrapper=None):
|
||||
self.wrapper = wrapper
|
||||
super(SSHState, self).__init__(opts, pillar)
|
||||
|
||||
def load_modules(self, data=None, proxy=None):
|
||||
'''
|
||||
"""
|
||||
Load up the modules for remote compilation via ssh
|
||||
'''
|
||||
"""
|
||||
self.functions = self.wrapper
|
||||
self.utils = salt.loader.utils(self.opts)
|
||||
self.serializers = salt.loader.serializers(self.opts)
|
||||
locals_ = salt.loader.minion_mods(self.opts, utils=self.utils)
|
||||
self.states = salt.loader.states(self.opts, locals_, self.utils, self.serializers)
|
||||
self.states = salt.loader.states(
|
||||
self.opts, locals_, self.utils, self.serializers
|
||||
)
|
||||
self.rend = salt.loader.render(self.opts, self.functions)
|
||||
|
||||
def check_refresh(self, data, ret):
|
||||
'''
|
||||
"""
|
||||
Stub out check_refresh
|
||||
'''
|
||||
"""
|
||||
return
|
||||
|
||||
def module_refresh(self):
|
||||
'''
|
||||
"""
|
||||
Module refresh is not needed, stub it out
|
||||
'''
|
||||
"""
|
||||
return
|
||||
|
||||
|
||||
class SSHHighState(salt.state.BaseHighState):
|
||||
'''
|
||||
"""
|
||||
Used to compile the highstate on the master
|
||||
'''
|
||||
"""
|
||||
|
||||
stack = []
|
||||
|
||||
def __init__(self, opts, pillar=None, wrapper=None, fsclient=None):
|
||||
|
@ -84,55 +90,57 @@ class SSHHighState(salt.state.BaseHighState):
|
|||
salt.state.HighState.stack.append(self)
|
||||
|
||||
def load_dynamic(self, matches):
|
||||
'''
|
||||
"""
|
||||
Stub out load_dynamic
|
||||
'''
|
||||
"""
|
||||
return
|
||||
|
||||
def _master_tops(self):
|
||||
'''
|
||||
"""
|
||||
Evaluate master_tops locally
|
||||
'''
|
||||
if 'id' not in self.opts:
|
||||
log.error('Received call for external nodes without an id')
|
||||
"""
|
||||
if "id" not in self.opts:
|
||||
log.error("Received call for external nodes without an id")
|
||||
return {}
|
||||
if not salt.utils.verify.valid_id(self.opts, self.opts['id']):
|
||||
if not salt.utils.verify.valid_id(self.opts, self.opts["id"]):
|
||||
return {}
|
||||
# Evaluate all configured master_tops interfaces
|
||||
|
||||
grains = {}
|
||||
ret = {}
|
||||
|
||||
if 'grains' in self.opts:
|
||||
grains = self.opts['grains']
|
||||
if "grains" in self.opts:
|
||||
grains = self.opts["grains"]
|
||||
for fun in self.tops:
|
||||
if fun not in self.opts.get('master_tops', {}):
|
||||
if fun not in self.opts.get("master_tops", {}):
|
||||
continue
|
||||
try:
|
||||
ret.update(self.tops[fun](opts=self.opts, grains=grains))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
# If anything happens in the top generation, log it and move on
|
||||
log.error(
|
||||
'Top function %s failed with error %s for minion %s',
|
||||
fun, exc, self.opts['id']
|
||||
"Top function %s failed with error %s for minion %s",
|
||||
fun,
|
||||
exc,
|
||||
self.opts["id"],
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
def lowstate_file_refs(chunks, extras=''):
|
||||
'''
|
||||
def lowstate_file_refs(chunks, extras=""):
|
||||
"""
|
||||
Create a list of file ref objects to reconcile
|
||||
'''
|
||||
"""
|
||||
refs = {}
|
||||
for chunk in chunks:
|
||||
if not isinstance(chunk, dict):
|
||||
continue
|
||||
saltenv = 'base'
|
||||
saltenv = "base"
|
||||
crefs = []
|
||||
for state in chunk:
|
||||
if state == '__env__':
|
||||
if state == "__env__":
|
||||
saltenv = chunk[state]
|
||||
elif state.startswith('__'):
|
||||
elif state.startswith("__"):
|
||||
continue
|
||||
crefs.extend(salt_refs(chunk[state]))
|
||||
if saltenv not in refs:
|
||||
|
@ -140,7 +148,7 @@ def lowstate_file_refs(chunks, extras=''):
|
|||
if crefs:
|
||||
refs[saltenv].append(crefs)
|
||||
if extras:
|
||||
extra_refs = extras.split(',')
|
||||
extra_refs = extras.split(",")
|
||||
if extra_refs:
|
||||
for env in refs:
|
||||
for x in extra_refs:
|
||||
|
@ -149,10 +157,10 @@ def lowstate_file_refs(chunks, extras=''):
|
|||
|
||||
|
||||
def salt_refs(data, ret=None):
|
||||
'''
|
||||
"""
|
||||
Pull salt file references out of the states
|
||||
'''
|
||||
proto = 'salt://'
|
||||
"""
|
||||
proto = "salt://"
|
||||
if ret is None:
|
||||
ret = []
|
||||
if isinstance(data, six.string_types):
|
||||
|
@ -167,57 +175,59 @@ def salt_refs(data, ret=None):
|
|||
return ret
|
||||
|
||||
|
||||
def prep_trans_tar(file_client, chunks, file_refs, pillar=None, id_=None, roster_grains=None):
|
||||
'''
|
||||
def prep_trans_tar(
|
||||
file_client, chunks, file_refs, pillar=None, id_=None, roster_grains=None
|
||||
):
|
||||
"""
|
||||
Generate the execution package from the saltenv file refs and a low state
|
||||
data structure
|
||||
'''
|
||||
"""
|
||||
gendir = tempfile.mkdtemp()
|
||||
trans_tar = salt.utils.files.mkstemp()
|
||||
lowfn = os.path.join(gendir, 'lowstate.json')
|
||||
pillarfn = os.path.join(gendir, 'pillar.json')
|
||||
roster_grainsfn = os.path.join(gendir, 'roster_grains.json')
|
||||
lowfn = os.path.join(gendir, "lowstate.json")
|
||||
pillarfn = os.path.join(gendir, "pillar.json")
|
||||
roster_grainsfn = os.path.join(gendir, "roster_grains.json")
|
||||
sync_refs = [
|
||||
[salt.utils.url.create('_modules')],
|
||||
[salt.utils.url.create('_states')],
|
||||
[salt.utils.url.create('_grains')],
|
||||
[salt.utils.url.create('_renderers')],
|
||||
[salt.utils.url.create('_returners')],
|
||||
[salt.utils.url.create('_output')],
|
||||
[salt.utils.url.create('_utils')],
|
||||
]
|
||||
with salt.utils.files.fopen(lowfn, 'w+') as fp_:
|
||||
[salt.utils.url.create("_modules")],
|
||||
[salt.utils.url.create("_states")],
|
||||
[salt.utils.url.create("_grains")],
|
||||
[salt.utils.url.create("_renderers")],
|
||||
[salt.utils.url.create("_returners")],
|
||||
[salt.utils.url.create("_output")],
|
||||
[salt.utils.url.create("_utils")],
|
||||
]
|
||||
with salt.utils.files.fopen(lowfn, "w+") as fp_:
|
||||
salt.utils.json.dump(chunks, fp_)
|
||||
if pillar:
|
||||
with salt.utils.files.fopen(pillarfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(pillarfn, "w+") as fp_:
|
||||
salt.utils.json.dump(pillar, fp_)
|
||||
if roster_grains:
|
||||
with salt.utils.files.fopen(roster_grainsfn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(roster_grainsfn, "w+") as fp_:
|
||||
salt.utils.json.dump(roster_grains, fp_)
|
||||
|
||||
if id_ is None:
|
||||
id_ = ''
|
||||
id_ = ""
|
||||
try:
|
||||
cachedir = os.path.join('salt-ssh', id_).rstrip(os.sep)
|
||||
cachedir = os.path.join("salt-ssh", id_).rstrip(os.sep)
|
||||
except AttributeError:
|
||||
# Minion ID should always be a str, but don't let an int break this
|
||||
cachedir = os.path.join('salt-ssh', six.text_type(id_)).rstrip(os.sep)
|
||||
cachedir = os.path.join("salt-ssh", six.text_type(id_)).rstrip(os.sep)
|
||||
|
||||
for saltenv in file_refs:
|
||||
# Location where files in this saltenv will be cached
|
||||
cache_dest_root = os.path.join(cachedir, 'files', saltenv)
|
||||
cache_dest_root = os.path.join(cachedir, "files", saltenv)
|
||||
file_refs[saltenv].extend(sync_refs)
|
||||
env_root = os.path.join(gendir, saltenv)
|
||||
if not os.path.isdir(env_root):
|
||||
os.makedirs(env_root)
|
||||
for ref in file_refs[saltenv]:
|
||||
for name in ref:
|
||||
short = salt.utils.url.parse(name)[0].lstrip('/')
|
||||
short = salt.utils.url.parse(name)[0].lstrip("/")
|
||||
cache_dest = os.path.join(cache_dest_root, short)
|
||||
try:
|
||||
path = file_client.cache_file(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
path = ''
|
||||
path = ""
|
||||
if path:
|
||||
tgt = os.path.join(env_root, short)
|
||||
tgt_dir = os.path.dirname(tgt)
|
||||
|
@ -228,15 +238,13 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None, id_=None, roster
|
|||
try:
|
||||
files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
|
||||
except IOError:
|
||||
files = ''
|
||||
files = ""
|
||||
if files:
|
||||
for filename in files:
|
||||
fn = filename[len(file_client.get_cachedir(cache_dest)):].strip('/')
|
||||
tgt = os.path.join(
|
||||
env_root,
|
||||
short,
|
||||
fn,
|
||||
)
|
||||
fn = filename[
|
||||
len(file_client.get_cachedir(cache_dest)) :
|
||||
].strip("/")
|
||||
tgt = os.path.join(env_root, short, fn,)
|
||||
tgt_dir = os.path.dirname(tgt)
|
||||
if not os.path.isdir(tgt_dir):
|
||||
os.makedirs(tgt_dir)
|
||||
|
@ -248,11 +256,11 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None, id_=None, roster
|
|||
except OSError:
|
||||
cwd = None
|
||||
os.chdir(gendir)
|
||||
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp:
|
||||
with closing(tarfile.open(trans_tar, "w:gz")) as tfp:
|
||||
for root, dirs, files in salt.utils.path.os_walk(gendir):
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
tfp.add(full[len(gendir):].lstrip(os.sep))
|
||||
tfp.add(full[len(gendir) :].lstrip(os.sep))
|
||||
if cwd:
|
||||
os.chdir(cwd)
|
||||
shutil.rmtree(gendir)
|
||||
|
|
|
@ -1,49 +1,52 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
The ssh client wrapper system contains the routines that are used to alter
|
||||
how executions are run in the salt-ssh system, this allows for state routines
|
||||
to be easily rewritten to execute in a way that makes them do the same tasks
|
||||
as ZeroMQ salt, but via ssh.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
|
||||
import salt.client.ssh
|
||||
|
||||
# Import salt libs
|
||||
import salt.loader
|
||||
import salt.utils.data
|
||||
import salt.utils.json
|
||||
import salt.client.ssh
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
class FunctionWrapper(object):
|
||||
'''
|
||||
"""
|
||||
Create an object that acts like the salt function dict and makes function
|
||||
calls remotely via the SSH shell system
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
opts,
|
||||
id_,
|
||||
host,
|
||||
wfuncs=None,
|
||||
mods=None,
|
||||
fsclient=None,
|
||||
cmd_prefix=None,
|
||||
aliases=None,
|
||||
minion_opts=None,
|
||||
**kwargs):
|
||||
self,
|
||||
opts,
|
||||
id_,
|
||||
host,
|
||||
wfuncs=None,
|
||||
mods=None,
|
||||
fsclient=None,
|
||||
cmd_prefix=None,
|
||||
aliases=None,
|
||||
minion_opts=None,
|
||||
**kwargs
|
||||
):
|
||||
super(FunctionWrapper, self).__init__()
|
||||
self.cmd_prefix = cmd_prefix
|
||||
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
||||
self.opts = opts
|
||||
self.mods = mods if isinstance(mods, dict) else {}
|
||||
self.kwargs = {'id_': id_,
|
||||
'host': host}
|
||||
self.kwargs = {"id_": id_, "host": host}
|
||||
self.fsclient = fsclient
|
||||
self.kwargs.update(kwargs)
|
||||
self.aliases = aliases
|
||||
|
@ -52,11 +55,11 @@ class FunctionWrapper(object):
|
|||
self.minion_opts = minion_opts
|
||||
|
||||
def __contains__(self, key):
|
||||
'''
|
||||
"""
|
||||
We need to implement a __contains__ method, othwerwise when someone
|
||||
does a contains comparison python assumes this is a sequence, and does
|
||||
__getitem__ keys 0 and up until IndexError
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
self[key] # pylint: disable=W0104
|
||||
return True
|
||||
|
@ -64,33 +67,35 @@ class FunctionWrapper(object):
|
|||
return False
|
||||
|
||||
def __getitem__(self, cmd):
|
||||
'''
|
||||
"""
|
||||
Return the function call to simulate the salt local lookup system
|
||||
'''
|
||||
if '.' not in cmd and not self.cmd_prefix:
|
||||
"""
|
||||
if "." not in cmd and not self.cmd_prefix:
|
||||
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
|
||||
# containing only 'cmd' module calls, in that case. Create a new
|
||||
# FunctionWrapper which contains the prefix 'cmd' (again, for the
|
||||
# salt.cmd.run example)
|
||||
kwargs = copy.deepcopy(self.kwargs)
|
||||
id_ = kwargs.pop('id_')
|
||||
host = kwargs.pop('host')
|
||||
return FunctionWrapper(self.opts,
|
||||
id_,
|
||||
host,
|
||||
wfuncs=self.wfuncs,
|
||||
mods=self.mods,
|
||||
fsclient=self.fsclient,
|
||||
cmd_prefix=cmd,
|
||||
aliases=self.aliases,
|
||||
minion_opts=self.minion_opts,
|
||||
**kwargs)
|
||||
id_ = kwargs.pop("id_")
|
||||
host = kwargs.pop("host")
|
||||
return FunctionWrapper(
|
||||
self.opts,
|
||||
id_,
|
||||
host,
|
||||
wfuncs=self.wfuncs,
|
||||
mods=self.mods,
|
||||
fsclient=self.fsclient,
|
||||
cmd_prefix=cmd,
|
||||
aliases=self.aliases,
|
||||
minion_opts=self.minion_opts,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
if self.cmd_prefix:
|
||||
# We're in an inner FunctionWrapper as created by the code block
|
||||
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
||||
# then evaluate as normal
|
||||
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
return self.wfuncs[cmd]
|
||||
|
@ -99,60 +104,69 @@ class FunctionWrapper(object):
|
|||
return self.aliases[cmd]
|
||||
|
||||
def caller(*args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
The remote execution function
|
||||
'''
|
||||
"""
|
||||
argv = [cmd]
|
||||
argv.extend([salt.utils.json.dumps(arg) for arg in args])
|
||||
argv.extend(
|
||||
['{0}={1}'.format(salt.utils.stringutils.to_str(key),
|
||||
salt.utils.json.dumps(val))
|
||||
for key, val in six.iteritems(kwargs)]
|
||||
[
|
||||
"{0}={1}".format(
|
||||
salt.utils.stringutils.to_str(key), salt.utils.json.dumps(val)
|
||||
)
|
||||
for key, val in six.iteritems(kwargs)
|
||||
]
|
||||
)
|
||||
single = salt.client.ssh.Single(
|
||||
self.opts,
|
||||
argv,
|
||||
mods=self.mods,
|
||||
disable_wipe=True,
|
||||
fsclient=self.fsclient,
|
||||
minion_opts=self.minion_opts,
|
||||
**self.kwargs
|
||||
self.opts,
|
||||
argv,
|
||||
mods=self.mods,
|
||||
disable_wipe=True,
|
||||
fsclient=self.fsclient,
|
||||
minion_opts=self.minion_opts,
|
||||
**self.kwargs
|
||||
)
|
||||
stdout, stderr, retcode = single.cmd_block()
|
||||
if stderr.count('Permission Denied'):
|
||||
return {'_error': 'Permission Denied',
|
||||
'stdout': stdout,
|
||||
'stderr': stderr,
|
||||
'retcode': retcode}
|
||||
if stderr.count("Permission Denied"):
|
||||
return {
|
||||
"_error": "Permission Denied",
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"retcode": retcode,
|
||||
}
|
||||
try:
|
||||
ret = salt.utils.json.loads(stdout)
|
||||
if len(ret) < 2 and 'local' in ret:
|
||||
ret = ret['local']
|
||||
ret = ret.get('return', {})
|
||||
if len(ret) < 2 and "local" in ret:
|
||||
ret = ret["local"]
|
||||
ret = ret.get("return", {})
|
||||
except ValueError:
|
||||
ret = {'_error': 'Failed to return clean data',
|
||||
'stderr': stderr,
|
||||
'stdout': stdout,
|
||||
'retcode': retcode}
|
||||
ret = {
|
||||
"_error": "Failed to return clean data",
|
||||
"stderr": stderr,
|
||||
"stdout": stdout,
|
||||
"retcode": retcode,
|
||||
}
|
||||
return ret
|
||||
|
||||
return caller
|
||||
|
||||
def __setitem__(self, cmd, value):
|
||||
'''
|
||||
"""
|
||||
Set aliases for functions
|
||||
'''
|
||||
if '.' not in cmd and not self.cmd_prefix:
|
||||
"""
|
||||
if "." not in cmd and not self.cmd_prefix:
|
||||
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
|
||||
# containing only 'cmd' module calls, in that case. We don't
|
||||
# support assigning directly to prefixes in this way
|
||||
raise KeyError('Cannot assign to module key {0} in the '
|
||||
'FunctionWrapper'.format(cmd))
|
||||
raise KeyError(
|
||||
"Cannot assign to module key {0} in the " "FunctionWrapper".format(cmd)
|
||||
)
|
||||
|
||||
if self.cmd_prefix:
|
||||
# We're in an inner FunctionWrapper as created by the first code
|
||||
# block in __getitem__. Reconstruct the original cmd in the form
|
||||
# 'cmd.run' and then evaluate as normal
|
||||
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
cmd = "{0}.{1}".format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
self.wfuncs[cmd] = value
|
||||
|
@ -163,9 +177,9 @@ class FunctionWrapper(object):
|
|||
self.aliases[cmd] = value
|
||||
|
||||
def get(self, cmd, default):
|
||||
'''
|
||||
"""
|
||||
Mirrors behavior of dict.get
|
||||
'''
|
||||
"""
|
||||
if cmd in self:
|
||||
return self[cmd]
|
||||
else:
|
||||
|
|
|
@ -1,61 +1,70 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Return config information
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
import re
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import salt.syspaths as syspaths
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.syspaths as syspaths
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
# Set up the default values for all systems
|
||||
DEFAULTS = {'mongo.db': 'salt',
|
||||
'mongo.host': 'salt',
|
||||
'mongo.password': '',
|
||||
'mongo.port': 27017,
|
||||
'mongo.user': '',
|
||||
'redis.db': '0',
|
||||
'redis.host': 'salt',
|
||||
'redis.port': 6379,
|
||||
'test.foo': 'unconfigured',
|
||||
'ca.cert_base_path': '/etc/pki',
|
||||
'solr.cores': [],
|
||||
'solr.host': 'localhost',
|
||||
'solr.port': '8983',
|
||||
'solr.baseurl': '/solr',
|
||||
'solr.type': 'master',
|
||||
'solr.request_timeout': None,
|
||||
'solr.init_script': '/etc/rc.d/solr',
|
||||
'solr.dih.import_options': {'clean': False, 'optimize': True,
|
||||
'commit': True, 'verbose': False},
|
||||
'solr.backup_path': None,
|
||||
'solr.num_backups': 1,
|
||||
'poudriere.config': '/usr/local/etc/poudriere.conf',
|
||||
'poudriere.config_dir': '/usr/local/etc/poudriere.d',
|
||||
'ldap.server': 'localhost',
|
||||
'ldap.port': '389',
|
||||
'ldap.tls': False,
|
||||
'ldap.scope': 2,
|
||||
'ldap.attrs': None,
|
||||
'ldap.binddn': '',
|
||||
'ldap.bindpw': '',
|
||||
'hosts.file': '/etc/hosts',
|
||||
'aliases.file': '/etc/aliases',
|
||||
'virt': {'tunnel': False,
|
||||
'images': os.path.join(syspaths.SRV_ROOT_DIR, 'salt-images')},
|
||||
}
|
||||
DEFAULTS = {
|
||||
"mongo.db": "salt",
|
||||
"mongo.host": "salt",
|
||||
"mongo.password": "",
|
||||
"mongo.port": 27017,
|
||||
"mongo.user": "",
|
||||
"redis.db": "0",
|
||||
"redis.host": "salt",
|
||||
"redis.port": 6379,
|
||||
"test.foo": "unconfigured",
|
||||
"ca.cert_base_path": "/etc/pki",
|
||||
"solr.cores": [],
|
||||
"solr.host": "localhost",
|
||||
"solr.port": "8983",
|
||||
"solr.baseurl": "/solr",
|
||||
"solr.type": "master",
|
||||
"solr.request_timeout": None,
|
||||
"solr.init_script": "/etc/rc.d/solr",
|
||||
"solr.dih.import_options": {
|
||||
"clean": False,
|
||||
"optimize": True,
|
||||
"commit": True,
|
||||
"verbose": False,
|
||||
},
|
||||
"solr.backup_path": None,
|
||||
"solr.num_backups": 1,
|
||||
"poudriere.config": "/usr/local/etc/poudriere.conf",
|
||||
"poudriere.config_dir": "/usr/local/etc/poudriere.d",
|
||||
"ldap.server": "localhost",
|
||||
"ldap.port": "389",
|
||||
"ldap.tls": False,
|
||||
"ldap.scope": 2,
|
||||
"ldap.attrs": None,
|
||||
"ldap.binddn": "",
|
||||
"ldap.bindpw": "",
|
||||
"hosts.file": "/etc/hosts",
|
||||
"aliases.file": "/etc/aliases",
|
||||
"virt": {
|
||||
"tunnel": False,
|
||||
"images": os.path.join(syspaths.SRV_ROOT_DIR, "salt-images"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def backup_mode(backup=''):
|
||||
'''
|
||||
def backup_mode(backup=""):
|
||||
"""
|
||||
Return the backup mode
|
||||
|
||||
CLI Example:
|
||||
|
@ -63,14 +72,14 @@ def backup_mode(backup=''):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.backup_mode
|
||||
'''
|
||||
"""
|
||||
if backup:
|
||||
return backup
|
||||
return option('backup_mode')
|
||||
return option("backup_mode")
|
||||
|
||||
|
||||
def manage_mode(mode):
|
||||
'''
|
||||
"""
|
||||
Return a mode value, normalized to a string
|
||||
|
||||
CLI Example:
|
||||
|
@ -78,7 +87,7 @@ def manage_mode(mode):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.manage_mode
|
||||
'''
|
||||
"""
|
||||
# config.manage_mode should no longer be invoked from the __salt__ dunder
|
||||
# in Salt code, this function is only being left here for backwards
|
||||
# compatibility.
|
||||
|
@ -86,7 +95,7 @@ def manage_mode(mode):
|
|||
|
||||
|
||||
def valid_fileproto(uri):
|
||||
'''
|
||||
"""
|
||||
Returns a boolean value based on whether or not the URI passed has a valid
|
||||
remote file protocol designation
|
||||
|
||||
|
@ -95,20 +104,15 @@ def valid_fileproto(uri):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.valid_fileproto salt://path/to/file
|
||||
'''
|
||||
"""
|
||||
try:
|
||||
return bool(re.match('^(?:salt|https?|ftp)://', uri))
|
||||
return bool(re.match("^(?:salt|https?|ftp)://", uri))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return False
|
||||
|
||||
|
||||
def option(
|
||||
value,
|
||||
default='',
|
||||
omit_opts=False,
|
||||
omit_master=False,
|
||||
omit_pillar=False):
|
||||
'''
|
||||
def option(value, default="", omit_opts=False, omit_master=False, omit_pillar=False):
|
||||
"""
|
||||
Pass in a generic option and receive the value that will be assigned
|
||||
|
||||
CLI Example:
|
||||
|
@ -116,13 +120,13 @@ def option(
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.option redis.host
|
||||
'''
|
||||
"""
|
||||
if not omit_opts:
|
||||
if value in __opts__:
|
||||
return __opts__[value]
|
||||
if not omit_master:
|
||||
if value in __pillar__.get('master', {}):
|
||||
return __pillar__['master'][value]
|
||||
if value in __pillar__.get("master", {}):
|
||||
return __pillar__["master"][value]
|
||||
if not omit_pillar:
|
||||
if value in __pillar__:
|
||||
return __pillar__[value]
|
||||
|
@ -131,12 +135,8 @@ def option(
|
|||
return default
|
||||
|
||||
|
||||
def merge(value,
|
||||
default='',
|
||||
omit_opts=False,
|
||||
omit_master=False,
|
||||
omit_pillar=False):
|
||||
'''
|
||||
def merge(value, default="", omit_opts=False, omit_master=False, omit_pillar=False):
|
||||
"""
|
||||
Retrieves an option based on key, merging all matches.
|
||||
|
||||
Same as ``option()`` except that it merges all matches, rather than taking
|
||||
|
@ -147,7 +147,7 @@ def merge(value,
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.merge schedule
|
||||
'''
|
||||
"""
|
||||
ret = None
|
||||
if not omit_opts:
|
||||
if value in __opts__:
|
||||
|
@ -155,8 +155,8 @@ def merge(value,
|
|||
if isinstance(ret, six.string_types):
|
||||
return ret
|
||||
if not omit_master:
|
||||
if value in __pillar__.get('master', {}):
|
||||
tmp = __pillar__['master'][value]
|
||||
if value in __pillar__.get("master", {}):
|
||||
tmp = __pillar__["master"][value]
|
||||
if ret is None:
|
||||
ret = tmp
|
||||
if isinstance(ret, six.string_types):
|
||||
|
@ -164,8 +164,7 @@ def merge(value,
|
|||
elif isinstance(ret, dict) and isinstance(tmp, dict):
|
||||
tmp.update(ret)
|
||||
ret = tmp
|
||||
elif (isinstance(ret, (list, tuple)) and
|
||||
isinstance(tmp, (list, tuple))):
|
||||
elif isinstance(ret, (list, tuple)) and isinstance(tmp, (list, tuple)):
|
||||
ret = list(ret) + list(tmp)
|
||||
if not omit_pillar:
|
||||
if value in __pillar__:
|
||||
|
@ -177,16 +176,15 @@ def merge(value,
|
|||
elif isinstance(ret, dict) and isinstance(tmp, dict):
|
||||
tmp.update(ret)
|
||||
ret = tmp
|
||||
elif (isinstance(ret, (list, tuple)) and
|
||||
isinstance(tmp, (list, tuple))):
|
||||
elif isinstance(ret, (list, tuple)) and isinstance(tmp, (list, tuple)):
|
||||
ret = list(ret) + list(tmp)
|
||||
if ret is None and value in DEFAULTS:
|
||||
return DEFAULTS[value]
|
||||
return ret or default
|
||||
|
||||
|
||||
def get(key, default=''):
|
||||
'''
|
||||
def get(key, default=""):
|
||||
"""
|
||||
.. versionadded: 0.14.0
|
||||
|
||||
Attempt to retrieve the named value from opts, pillar, grains of the master
|
||||
|
@ -215,24 +213,26 @@ def get(key, default=''):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.get pkg:apache
|
||||
'''
|
||||
ret = salt.utils.data.traverse_dict_and_list(__opts__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
"""
|
||||
ret = salt.utils.data.traverse_dict_and_list(__opts__, key, "_|-")
|
||||
if ret != "_|-":
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__grains__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__grains__, key, "_|-")
|
||||
if ret != "_|-":
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, '_|-')
|
||||
if ret != '_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, "_|-")
|
||||
if ret != "_|-":
|
||||
return ret
|
||||
ret = salt.utils.data.traverse_dict_and_list(__pillar__.get('master', {}), key, '_|-')
|
||||
if ret != '_|-':
|
||||
ret = salt.utils.data.traverse_dict_and_list(
|
||||
__pillar__.get("master", {}), key, "_|-"
|
||||
)
|
||||
if ret != "_|-":
|
||||
return ret
|
||||
return default
|
||||
|
||||
|
||||
def dot_vals(value):
|
||||
'''
|
||||
"""
|
||||
Pass in a configuration value that should be preceded by the module name
|
||||
and a dot, this will return a list of all read key/value pairs
|
||||
|
||||
|
@ -241,12 +241,12 @@ def dot_vals(value):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' config.dot_vals host
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
for key, val in six.iteritems(__pillar__.get('master', {})):
|
||||
if key.startswith('{0}.'.format(value)):
|
||||
for key, val in six.iteritems(__pillar__.get("master", {})):
|
||||
if key.startswith("{0}.".format(value)):
|
||||
ret[key] = val
|
||||
for key, val in six.iteritems(__opts__):
|
||||
if key.startswith('{0}.'.format(value)):
|
||||
if key.startswith("{0}.".format(value)):
|
||||
ret[key] = val
|
||||
return ret
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Wrap the cp module allowing for managed ssh file transfers
|
||||
'''
|
||||
"""
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -17,147 +18,125 @@ from salt.exceptions import CommandExecutionError
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_file(path,
|
||||
dest,
|
||||
saltenv='base',
|
||||
makedirs=False,
|
||||
template=None,
|
||||
gzip=None):
|
||||
'''
|
||||
def get_file(path, dest, saltenv="base", makedirs=False, template=None, gzip=None):
|
||||
"""
|
||||
Send a file from the master to the location in specified
|
||||
|
||||
.. note::
|
||||
|
||||
gzip compression is not supported in the salt-ssh version of
|
||||
cp.get_file. The argument is only accepted for interface compatibility.
|
||||
'''
|
||||
"""
|
||||
if gzip is not None:
|
||||
log.warning('The gzip argument to cp.get_file in salt-ssh is '
|
||||
'unsupported')
|
||||
log.warning("The gzip argument to cp.get_file in salt-ssh is " "unsupported")
|
||||
|
||||
if template is not None:
|
||||
(path, dest) = _render_filenames(path, dest, saltenv, template)
|
||||
|
||||
src = __context__['fileclient'].cache_file(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
src = __context__["fileclient"].cache_file(
|
||||
path, saltenv, cachedir=os.path.join("salt-ssh", __salt__.kwargs["id_"])
|
||||
)
|
||||
single = salt.client.ssh.Single(__opts__, "", **__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest, makedirs)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def get_dir(path, dest, saltenv='base'):
|
||||
'''
|
||||
def get_dir(path, dest, saltenv="base"):
|
||||
"""
|
||||
Transfer a directory down
|
||||
'''
|
||||
src = __context__['fileclient'].cache_dir(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
src = ' '.join(src)
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
"""
|
||||
src = __context__["fileclient"].cache_dir(
|
||||
path, saltenv, cachedir=os.path.join("salt-ssh", __salt__.kwargs["id_"])
|
||||
)
|
||||
src = " ".join(src)
|
||||
single = salt.client.ssh.Single(__opts__, "", **__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def get_url(path, dest, saltenv='base'):
|
||||
'''
|
||||
def get_url(path, dest, saltenv="base"):
|
||||
"""
|
||||
retrieve a URL
|
||||
'''
|
||||
src = __context__['fileclient'].cache_file(
|
||||
path,
|
||||
saltenv,
|
||||
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_']))
|
||||
single = salt.client.ssh.Single(
|
||||
__opts__,
|
||||
'',
|
||||
**__salt__.kwargs)
|
||||
"""
|
||||
src = __context__["fileclient"].cache_file(
|
||||
path, saltenv, cachedir=os.path.join("salt-ssh", __salt__.kwargs["id_"])
|
||||
)
|
||||
single = salt.client.ssh.Single(__opts__, "", **__salt__.kwargs)
|
||||
ret = single.shell.send(src, dest)
|
||||
return not ret[2]
|
||||
|
||||
|
||||
def list_states(saltenv='base'):
|
||||
'''
|
||||
def list_states(saltenv="base"):
|
||||
"""
|
||||
List all the available state modules in an environment
|
||||
'''
|
||||
return __context__['fileclient'].list_states(saltenv)
|
||||
"""
|
||||
return __context__["fileclient"].list_states(saltenv)
|
||||
|
||||
|
||||
def list_master(saltenv='base', prefix=''):
|
||||
'''
|
||||
def list_master(saltenv="base", prefix=""):
|
||||
"""
|
||||
List all of the files stored on the master
|
||||
'''
|
||||
return __context__['fileclient'].file_list(saltenv, prefix)
|
||||
"""
|
||||
return __context__["fileclient"].file_list(saltenv, prefix)
|
||||
|
||||
|
||||
def list_master_dirs(saltenv='base', prefix=''):
|
||||
'''
|
||||
def list_master_dirs(saltenv="base", prefix=""):
|
||||
"""
|
||||
List all of the directories stored on the master
|
||||
'''
|
||||
return __context__['fileclient'].dir_list(saltenv, prefix)
|
||||
"""
|
||||
return __context__["fileclient"].dir_list(saltenv, prefix)
|
||||
|
||||
|
||||
def list_master_symlinks(saltenv='base', prefix=''):
|
||||
'''
|
||||
def list_master_symlinks(saltenv="base", prefix=""):
|
||||
"""
|
||||
List all of the symlinks stored on the master
|
||||
'''
|
||||
return __context__['fileclient'].symlink_list(saltenv, prefix)
|
||||
"""
|
||||
return __context__["fileclient"].symlink_list(saltenv, prefix)
|
||||
|
||||
|
||||
def _render_filenames(path, dest, saltenv, template):
|
||||
'''
|
||||
"""
|
||||
Process markup in the :param:`path` and :param:`dest` variables (NOT the
|
||||
files under the paths they ultimately point to) according to the markup
|
||||
format provided by :param:`template`.
|
||||
'''
|
||||
"""
|
||||
if not template:
|
||||
return (path, dest)
|
||||
|
||||
# render the path as a template using path_template_engine as the engine
|
||||
if template not in salt.utils.templates.TEMPLATE_REGISTRY:
|
||||
raise CommandExecutionError(
|
||||
'Attempted to render file paths with unavailable engine '
|
||||
'{0}'.format(template)
|
||||
"Attempted to render file paths with unavailable engine "
|
||||
"{0}".format(template)
|
||||
)
|
||||
|
||||
kwargs = {}
|
||||
kwargs['salt'] = __salt__
|
||||
kwargs['pillar'] = __pillar__
|
||||
kwargs['grains'] = __grains__
|
||||
kwargs['opts'] = __opts__
|
||||
kwargs['saltenv'] = saltenv
|
||||
kwargs["salt"] = __salt__
|
||||
kwargs["pillar"] = __pillar__
|
||||
kwargs["grains"] = __grains__
|
||||
kwargs["opts"] = __opts__
|
||||
kwargs["saltenv"] = saltenv
|
||||
|
||||
def _render(contents):
|
||||
'''
|
||||
"""
|
||||
Render :param:`contents` into a literal pathname by writing it to a
|
||||
temp file, rendering that file, and returning the result.
|
||||
'''
|
||||
"""
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(tmp_path_fn, "w+") as fp_:
|
||||
fp_.write(salt.utils.stringutils.to_str(contents))
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
**kwargs
|
||||
tmp_path_fn, to_str=True, **kwargs
|
||||
)
|
||||
salt.utils.files.safe_rm(tmp_path_fn)
|
||||
if not data['result']:
|
||||
if not data["result"]:
|
||||
# Failed to render the template
|
||||
raise CommandExecutionError(
|
||||
'Failed to render file path with error: {0}'.format(
|
||||
data['data']
|
||||
)
|
||||
"Failed to render file path with error: {0}".format(data["data"])
|
||||
)
|
||||
else:
|
||||
return data['data']
|
||||
return data["data"]
|
||||
|
||||
path = _render(path)
|
||||
dest = _render(dest)
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Return/control aspects of the grains data
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import math
|
||||
|
@ -24,34 +25,34 @@ __grains__ = {}
|
|||
|
||||
|
||||
def _serial_sanitizer(instr):
|
||||
'''
|
||||
"""
|
||||
Replaces the last 1/4 of a string with X's
|
||||
'''
|
||||
"""
|
||||
length = len(instr)
|
||||
index = int(math.floor(length * .75))
|
||||
return '{0}{1}'.format(instr[:index], 'X' * (length - index))
|
||||
index = int(math.floor(length * 0.75))
|
||||
return "{0}{1}".format(instr[:index], "X" * (length - index))
|
||||
|
||||
|
||||
_FQDN_SANITIZER = lambda x: 'MINION.DOMAINNAME'
|
||||
_HOSTNAME_SANITIZER = lambda x: 'MINION'
|
||||
_DOMAINNAME_SANITIZER = lambda x: 'DOMAINNAME'
|
||||
_FQDN_SANITIZER = lambda x: "MINION.DOMAINNAME"
|
||||
_HOSTNAME_SANITIZER = lambda x: "MINION"
|
||||
_DOMAINNAME_SANITIZER = lambda x: "DOMAINNAME"
|
||||
|
||||
|
||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||
# is used when the 'sanitize' flag is given.
|
||||
_SANITIZERS = {
|
||||
'serialnumber': _serial_sanitizer,
|
||||
'domain': _DOMAINNAME_SANITIZER,
|
||||
'fqdn': _FQDN_SANITIZER,
|
||||
'id': _FQDN_SANITIZER,
|
||||
'host': _HOSTNAME_SANITIZER,
|
||||
'localhost': _HOSTNAME_SANITIZER,
|
||||
'nodename': _HOSTNAME_SANITIZER,
|
||||
"serialnumber": _serial_sanitizer,
|
||||
"domain": _DOMAINNAME_SANITIZER,
|
||||
"fqdn": _FQDN_SANITIZER,
|
||||
"id": _FQDN_SANITIZER,
|
||||
"host": _HOSTNAME_SANITIZER,
|
||||
"localhost": _HOSTNAME_SANITIZER,
|
||||
"nodename": _HOSTNAME_SANITIZER,
|
||||
}
|
||||
|
||||
|
||||
def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True):
|
||||
'''
|
||||
def get(key, default="", delimiter=DEFAULT_TARGET_DELIM, ordered=True):
|
||||
"""
|
||||
Attempt to retrieve the named value from grains, if the named value is not
|
||||
available return the passed default. The default return is an empty string.
|
||||
|
||||
|
@ -70,20 +71,16 @@ def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.get pkg:apache
|
||||
'''
|
||||
"""
|
||||
if ordered is True:
|
||||
grains = __grains__
|
||||
else:
|
||||
grains = salt.utils.json.loads(salt.utils.json.dumps(__grains__))
|
||||
return salt.utils.data.traverse_dict_and_list(
|
||||
__grains__,
|
||||
key,
|
||||
default,
|
||||
delimiter)
|
||||
return salt.utils.data.traverse_dict_and_list(__grains__, key, default, delimiter)
|
||||
|
||||
|
||||
def has_value(key):
|
||||
'''
|
||||
"""
|
||||
Determine whether a named value exists in the grains dictionary.
|
||||
|
||||
Given a grains dictionary that contains the following structure::
|
||||
|
@ -99,14 +96,16 @@ def has_value(key):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.has_value pkg:apache
|
||||
'''
|
||||
return True \
|
||||
if salt.utils.data.traverse_dict_and_list(__grains__, key, False) \
|
||||
"""
|
||||
return (
|
||||
True
|
||||
if salt.utils.data.traverse_dict_and_list(__grains__, key, False)
|
||||
else False
|
||||
)
|
||||
|
||||
|
||||
def items(sanitize=False):
|
||||
'''
|
||||
"""
|
||||
Return all of the minion's grains
|
||||
|
||||
CLI Example:
|
||||
|
@ -120,7 +119,7 @@ def items(sanitize=False):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.items sanitize=True
|
||||
'''
|
||||
"""
|
||||
if salt.utils.data.is_true(sanitize):
|
||||
out = dict(__grains__)
|
||||
for key, func in six.iteritems(_SANITIZERS):
|
||||
|
@ -132,7 +131,7 @@ def items(sanitize=False):
|
|||
|
||||
|
||||
def item(*args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Return one or more grains
|
||||
|
||||
CLI Example:
|
||||
|
@ -147,14 +146,14 @@ def item(*args, **kwargs):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.item host sanitize=True
|
||||
'''
|
||||
"""
|
||||
ret = {}
|
||||
for arg in args:
|
||||
try:
|
||||
ret[arg] = __grains__[arg]
|
||||
except KeyError:
|
||||
pass
|
||||
if salt.utils.data.is_true(kwargs.get('sanitize')):
|
||||
if salt.utils.data.is_true(kwargs.get("sanitize")):
|
||||
for arg, func in six.iteritems(_SANITIZERS):
|
||||
if arg in ret:
|
||||
ret[arg] = func(ret[arg])
|
||||
|
@ -162,7 +161,7 @@ def item(*args, **kwargs):
|
|||
|
||||
|
||||
def ls(): # pylint: disable=C0103
|
||||
'''
|
||||
"""
|
||||
Return a list of all available grains
|
||||
|
||||
CLI Example:
|
||||
|
@ -170,16 +169,12 @@ def ls(): # pylint: disable=C0103
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.ls
|
||||
'''
|
||||
"""
|
||||
return sorted(__grains__)
|
||||
|
||||
|
||||
def filter_by(lookup_dict,
|
||||
grain='os_family',
|
||||
merge=None,
|
||||
default='default',
|
||||
base=None):
|
||||
'''
|
||||
def filter_by(lookup_dict, grain="os_family", merge=None, default="default", base=None):
|
||||
"""
|
||||
.. versionadded:: 0.17.0
|
||||
|
||||
Look up the given grain in a given dictionary for the current OS and return
|
||||
|
@ -256,13 +251,10 @@ def filter_by(lookup_dict,
|
|||
salt '*' grains.filter_by '{Debian: Debheads rule, RedHat: I love my hat}'
|
||||
# this one will render {D: {E: I, G: H}, J: K}
|
||||
salt '*' grains.filter_by '{A: B, C: {D: {E: F,G: H}}}' 'xxx' '{D: {E: I},J: K}' 'C'
|
||||
'''
|
||||
"""
|
||||
ret = lookup_dict.get(
|
||||
__grains__.get(
|
||||
grain, default),
|
||||
lookup_dict.get(
|
||||
default, None)
|
||||
)
|
||||
__grains__.get(grain, default), lookup_dict.get(default, None)
|
||||
)
|
||||
|
||||
if base and base in lookup_dict:
|
||||
base_values = lookup_dict[base]
|
||||
|
@ -271,12 +263,14 @@ def filter_by(lookup_dict,
|
|||
|
||||
elif isinstance(base_values, collections.Mapping):
|
||||
if not isinstance(ret, collections.Mapping):
|
||||
raise SaltException('filter_by default and look-up values must both be dictionaries.')
|
||||
raise SaltException(
|
||||
"filter_by default and look-up values must both be dictionaries."
|
||||
)
|
||||
ret = salt.utils.dictupdate.update(copy.deepcopy(base_values), ret)
|
||||
|
||||
if merge:
|
||||
if not isinstance(merge, collections.Mapping):
|
||||
raise SaltException('filter_by merge argument must be a dictionary.')
|
||||
raise SaltException("filter_by merge argument must be a dictionary.")
|
||||
else:
|
||||
if ret is None:
|
||||
ret = merge
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
'''
|
||||
"""
|
||||
Wrapper function for mine operations for salt-ssh
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
'''
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import copy
|
||||
|
||||
# Import salt libs
|
||||
import salt.client.ssh
|
||||
|
||||
|
||||
def get(tgt, fun, tgt_type='glob', roster='flat'):
|
||||
'''
|
||||
def get(tgt, fun, tgt_type="glob", roster="flat"):
|
||||
"""
|
||||
Get data from the mine based on the target, function and tgt_type
|
||||
|
||||
This will actually run the function on all targeted minions (like
|
||||
|
@ -34,17 +35,17 @@ def get(tgt, fun, tgt_type='glob', roster='flat'):
|
|||
salt-ssh '*' mine.get '*' network.interfaces
|
||||
salt-ssh '*' mine.get 'myminion' network.interfaces roster=flat
|
||||
salt-ssh '*' mine.get '192.168.5.0' network.ipaddrs roster=scan
|
||||
'''
|
||||
"""
|
||||
# Set up opts for the SSH object
|
||||
opts = copy.deepcopy(__context__['master_opts'])
|
||||
opts = copy.deepcopy(__context__["master_opts"])
|
||||
minopts = copy.deepcopy(__opts__)
|
||||
opts.update(minopts)
|
||||
if roster:
|
||||
opts['roster'] = roster
|
||||
opts['argv'] = [fun]
|
||||
opts['selected_target_option'] = tgt_type
|
||||
opts['tgt'] = tgt
|
||||
opts['arg'] = []
|
||||
opts["roster"] = roster
|
||||
opts["argv"] = [fun]
|
||||
opts["selected_target_option"] = tgt_type
|
||||
opts["tgt"] = tgt
|
||||
opts["arg"] = []
|
||||
|
||||
# Create the SSH object to handle the actual call
|
||||
ssh = salt.client.ssh.SSH(opts)
|
||||
|
@ -56,8 +57,8 @@ def get(tgt, fun, tgt_type='glob', roster='flat'):
|
|||
|
||||
cret = {}
|
||||
for host in rets:
|
||||
if 'return' in rets[host]:
|
||||
cret[host] = rets[host]['return']
|
||||
if "return" in rets[host]:
|
||||
cret[host] = rets[host]["return"]
|
||||
else:
|
||||
cret[host] = rets[host]
|
||||
return cret
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue