mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Update and remove obsolete pylint plugins
This commit is contained in:
parent
f3f54a6eb1
commit
b9be2dec1b
314 changed files with 2209 additions and 3342 deletions
2
.github/workflows/lint-action.yml
vendored
2
.github/workflows/lint-action.yml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
||||||
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
|
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
|
||||||
|
|
||||||
container:
|
container:
|
||||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.8
|
image: ghcr.io/saltstack/salt-ci-containers/python:3.9
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Install System Deps
|
- name: Install System Deps
|
||||||
|
|
|
@ -1370,23 +1370,6 @@ repos:
|
||||||
# <---- Doc CI Requirements ----------------------------------------------------------------------------------------
|
# <---- Doc CI Requirements ----------------------------------------------------------------------------------------
|
||||||
|
|
||||||
# ----- Lint CI Requirements -------------------------------------------------------------------------------------->
|
# ----- Lint CI Requirements -------------------------------------------------------------------------------------->
|
||||||
- id: pip-tools-compile
|
|
||||||
alias: compile-ci-lint-3.7-requirements
|
|
||||||
name: Lint CI Py3.7 Requirements
|
|
||||||
files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.7/linux\.txt)))$
|
|
||||||
pass_filenames: false
|
|
||||||
args:
|
|
||||||
- -v
|
|
||||||
- --build-isolation
|
|
||||||
- --py-version=3.7
|
|
||||||
- --platform=linux
|
|
||||||
- --include=requirements/base.txt
|
|
||||||
- --include=requirements/zeromq.txt
|
|
||||||
- --include=requirements/static/pkg/linux.in
|
|
||||||
- --include=requirements/static/ci/linux.in
|
|
||||||
- --include=requirements/static/ci/common.in
|
|
||||||
- --no-emit-index-url
|
|
||||||
- requirements/static/ci/lint.in
|
|
||||||
|
|
||||||
- id: pip-tools-compile
|
- id: pip-tools-compile
|
||||||
alias: compile-ci-lint-3.8-requirements
|
alias: compile-ci-lint-3.8-requirements
|
||||||
|
@ -1762,7 +1745,7 @@ repos:
|
||||||
- types-attrs
|
- types-attrs
|
||||||
- types-pyyaml
|
- types-pyyaml
|
||||||
- types-requests
|
- types-requests
|
||||||
- python-tools-scripts>=0.20.0
|
- python-tools-scripts==0.20.0
|
||||||
|
|
||||||
- repo: https://github.com/saltstack/mirrors-nox
|
- repo: https://github.com/saltstack/mirrors-nox
|
||||||
rev: v2021.6.12
|
rev: v2021.6.12
|
||||||
|
@ -1770,7 +1753,7 @@ repos:
|
||||||
- id: nox
|
- id: nox
|
||||||
alias: lint-salt
|
alias: lint-salt
|
||||||
name: Lint Salt
|
name: Lint Salt
|
||||||
files: ^((setup|noxfile)|(salt|tasks|tools)/.*)\.py$
|
files: ^((setup|noxfile)|(salt|tools)/.*)\.py$
|
||||||
exclude: >
|
exclude: >
|
||||||
(?x)^(
|
(?x)^(
|
||||||
templates/.*|
|
templates/.*|
|
||||||
|
|
97
noxfile.py
97
noxfile.py
|
@ -1300,7 +1300,7 @@ def decompress_dependencies(session):
|
||||||
# Let's try to fix shebang's
|
# Let's try to fix shebang's
|
||||||
try:
|
try:
|
||||||
fpath = pathlib.Path(path)
|
fpath = pathlib.Path(path)
|
||||||
contents = fpath.read_text().splitlines()
|
contents = fpath.read_text(encoding="utf-8").splitlines()
|
||||||
if (
|
if (
|
||||||
contents[0].startswith("#!")
|
contents[0].startswith("#!")
|
||||||
and contents[0].endswith("python")
|
and contents[0].endswith("python")
|
||||||
|
@ -1310,7 +1310,9 @@ def decompress_dependencies(session):
|
||||||
"Fixing broken shebang in %r",
|
"Fixing broken shebang in %r",
|
||||||
str(fpath.relative_to(REPO_ROOT)),
|
str(fpath.relative_to(REPO_ROOT)),
|
||||||
)
|
)
|
||||||
fpath.write_text("\n".join([fixed_shebang] + contents[1:]))
|
fpath.write_text(
|
||||||
|
"\n".join([fixed_shebang] + contents[1:]), encoding="utf-8"
|
||||||
|
)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -1467,48 +1469,26 @@ class Tee:
|
||||||
return self._first.fileno()
|
return self._first.fileno()
|
||||||
|
|
||||||
|
|
||||||
def _lint(
|
def _lint(session, rcfile, flags, paths, upgrade_setuptools_and_pip=True):
|
||||||
session, rcfile, flags, paths, tee_output=True, upgrade_setuptools_and_pip=True
|
|
||||||
):
|
|
||||||
if _upgrade_pip_setuptools_and_wheel(session, upgrade=upgrade_setuptools_and_pip):
|
if _upgrade_pip_setuptools_and_wheel(session, upgrade=upgrade_setuptools_and_pip):
|
||||||
requirements_file = os.path.join(
|
base_requirements_file = os.path.join(
|
||||||
|
"requirements", "static", "ci", _get_pydir(session), "linux.txt"
|
||||||
|
)
|
||||||
|
lint_requirements_file = os.path.join(
|
||||||
"requirements", "static", "ci", _get_pydir(session), "lint.txt"
|
"requirements", "static", "ci", _get_pydir(session), "lint.txt"
|
||||||
)
|
)
|
||||||
install_command = ["--progress-bar=off", "-r", requirements_file]
|
install_command = [
|
||||||
|
"--progress-bar=off",
|
||||||
|
"-r",
|
||||||
|
base_requirements_file,
|
||||||
|
"-r",
|
||||||
|
lint_requirements_file,
|
||||||
|
]
|
||||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||||
|
|
||||||
if tee_output:
|
|
||||||
session.run("pylint", "--version")
|
|
||||||
pylint_report_path = os.environ.get("PYLINT_REPORT")
|
|
||||||
|
|
||||||
cmd_args = ["pylint", "--rcfile={}".format(rcfile)] + list(flags) + list(paths)
|
cmd_args = ["pylint", "--rcfile={}".format(rcfile)] + list(flags) + list(paths)
|
||||||
|
|
||||||
cmd_kwargs = {"env": {"PYTHONUNBUFFERED": "1"}}
|
cmd_kwargs = {"env": {"PYTHONUNBUFFERED": "1"}}
|
||||||
|
session.run(*cmd_args, **cmd_kwargs)
|
||||||
if tee_output:
|
|
||||||
stdout = tempfile.TemporaryFile(mode="w+b")
|
|
||||||
cmd_kwargs["stdout"] = Tee(stdout, sys.__stdout__)
|
|
||||||
|
|
||||||
lint_failed = False
|
|
||||||
try:
|
|
||||||
session.run(*cmd_args, **cmd_kwargs)
|
|
||||||
except CommandFailed:
|
|
||||||
lint_failed = True
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
if tee_output:
|
|
||||||
stdout.seek(0)
|
|
||||||
contents = stdout.read()
|
|
||||||
if contents:
|
|
||||||
contents = contents.decode("utf-8")
|
|
||||||
sys.stdout.write(contents)
|
|
||||||
sys.stdout.flush()
|
|
||||||
if pylint_report_path:
|
|
||||||
# Write report
|
|
||||||
with open(pylint_report_path, "w") as wfh:
|
|
||||||
wfh.write(contents)
|
|
||||||
session.log("Report file written to %r", pylint_report_path)
|
|
||||||
stdout.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _lint_pre_commit(session, rcfile, flags, paths):
|
def _lint_pre_commit(session, rcfile, flags, paths):
|
||||||
|
@ -1527,26 +1507,17 @@ def _lint_pre_commit(session, rcfile, flags, paths):
|
||||||
from nox.virtualenv import VirtualEnv
|
from nox.virtualenv import VirtualEnv
|
||||||
|
|
||||||
# Let's patch nox to make it run inside the pre-commit virtualenv
|
# Let's patch nox to make it run inside the pre-commit virtualenv
|
||||||
try:
|
session._runner.venv = VirtualEnv(
|
||||||
session._runner.venv = VirtualEnv( # pylint: disable=unexpected-keyword-arg
|
os.environ["VIRTUAL_ENV"],
|
||||||
os.environ["VIRTUAL_ENV"],
|
interpreter=session._runner.func.python,
|
||||||
interpreter=session._runner.func.python,
|
reuse_existing=True,
|
||||||
reuse_existing=True,
|
venv=True,
|
||||||
venv=True,
|
)
|
||||||
)
|
|
||||||
except TypeError:
|
|
||||||
# This is still nox-py2
|
|
||||||
session._runner.venv = VirtualEnv(
|
|
||||||
os.environ["VIRTUAL_ENV"],
|
|
||||||
interpreter=session._runner.func.python,
|
|
||||||
reuse_existing=True,
|
|
||||||
)
|
|
||||||
_lint(
|
_lint(
|
||||||
session,
|
session,
|
||||||
rcfile,
|
rcfile,
|
||||||
flags,
|
flags,
|
||||||
paths,
|
paths,
|
||||||
tee_output=False,
|
|
||||||
upgrade_setuptools_and_pip=False,
|
upgrade_setuptools_and_pip=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1554,7 +1525,7 @@ def _lint_pre_commit(session, rcfile, flags, paths):
|
||||||
@nox.session(python="3")
|
@nox.session(python="3")
|
||||||
def lint(session):
|
def lint(session):
|
||||||
"""
|
"""
|
||||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
Run PyLint against Salt and it's test suite.
|
||||||
"""
|
"""
|
||||||
session.notify("lint-salt-{}".format(session.python))
|
session.notify("lint-salt-{}".format(session.python))
|
||||||
session.notify("lint-tests-{}".format(session.python))
|
session.notify("lint-tests-{}".format(session.python))
|
||||||
|
@ -1563,21 +1534,21 @@ def lint(session):
|
||||||
@nox.session(python="3", name="lint-salt")
|
@nox.session(python="3", name="lint-salt")
|
||||||
def lint_salt(session):
|
def lint_salt(session):
|
||||||
"""
|
"""
|
||||||
Run PyLint against Salt. Set PYLINT_REPORT to a path to capture output.
|
Run PyLint against Salt.
|
||||||
"""
|
"""
|
||||||
flags = ["--disable=I"]
|
flags = ["--disable=I"]
|
||||||
if session.posargs:
|
if session.posargs:
|
||||||
paths = session.posargs
|
paths = session.posargs
|
||||||
else:
|
else:
|
||||||
# TBD replace paths entries when implement pyproject.toml
|
# TBD replace paths entries when implement pyproject.toml
|
||||||
paths = ["setup.py", "noxfile.py", "salt/"]
|
paths = ["setup.py", "noxfile.py", "salt/", "tools/"]
|
||||||
_lint(session, ".pylintrc", flags, paths)
|
_lint(session, ".pylintrc", flags, paths)
|
||||||
|
|
||||||
|
|
||||||
@nox.session(python="3", name="lint-tests")
|
@nox.session(python="3", name="lint-tests")
|
||||||
def lint_tests(session):
|
def lint_tests(session):
|
||||||
"""
|
"""
|
||||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
Run PyLint against Salt and it's test suite.
|
||||||
"""
|
"""
|
||||||
flags = ["--disable=I"]
|
flags = ["--disable=I"]
|
||||||
if session.posargs:
|
if session.posargs:
|
||||||
|
@ -1590,20 +1561,20 @@ def lint_tests(session):
|
||||||
@nox.session(python=False, name="lint-salt-pre-commit")
|
@nox.session(python=False, name="lint-salt-pre-commit")
|
||||||
def lint_salt_pre_commit(session):
|
def lint_salt_pre_commit(session):
|
||||||
"""
|
"""
|
||||||
Run PyLint against Salt. Set PYLINT_REPORT to a path to capture output.
|
Run PyLint against Salt.
|
||||||
"""
|
"""
|
||||||
flags = ["--disable=I"]
|
flags = ["--disable=I"]
|
||||||
if session.posargs:
|
if session.posargs:
|
||||||
paths = session.posargs
|
paths = session.posargs
|
||||||
else:
|
else:
|
||||||
paths = ["setup.py", "noxfile.py", "salt/"]
|
paths = ["setup.py", "noxfile.py", "salt/", "tools/"]
|
||||||
_lint_pre_commit(session, ".pylintrc", flags, paths)
|
_lint_pre_commit(session, ".pylintrc", flags, paths)
|
||||||
|
|
||||||
|
|
||||||
@nox.session(python=False, name="lint-tests-pre-commit")
|
@nox.session(python=False, name="lint-tests-pre-commit")
|
||||||
def lint_tests_pre_commit(session):
|
def lint_tests_pre_commit(session):
|
||||||
"""
|
"""
|
||||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
Run PyLint against Salt and it's test suite.
|
||||||
"""
|
"""
|
||||||
flags = ["--disable=I"]
|
flags = ["--disable=I"]
|
||||||
if session.posargs:
|
if session.posargs:
|
||||||
|
@ -1960,8 +1931,8 @@ def ci_test_onedir_pkgs(session):
|
||||||
+ cmd_args[:]
|
+ cmd_args[:]
|
||||||
+ [
|
+ [
|
||||||
"--no-install",
|
"--no-install",
|
||||||
f"--junitxml=artifacts/xml-unittests-output/test-results-install.xml",
|
"--junitxml=artifacts/xml-unittests-output/test-results-install.xml",
|
||||||
f"--log-file=artifacts/logs/runtests-install.log",
|
"--log-file=artifacts/logs/runtests-install.log",
|
||||||
]
|
]
|
||||||
+ session.posargs
|
+ session.posargs
|
||||||
)
|
)
|
||||||
|
@ -1978,8 +1949,8 @@ def ci_test_onedir_pkgs(session):
|
||||||
+ cmd_args[:]
|
+ cmd_args[:]
|
||||||
+ [
|
+ [
|
||||||
"--no-install",
|
"--no-install",
|
||||||
f"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml",
|
"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml",
|
||||||
f"--log-file=artifacts/logs/runtests-install-rerun.log",
|
"--log-file=artifacts/logs/runtests-install-rerun.log",
|
||||||
"--lf",
|
"--lf",
|
||||||
]
|
]
|
||||||
+ session.posargs
|
+ session.posargs
|
||||||
|
|
|
@ -2,6 +2,6 @@
|
||||||
--constraint=./py{py_version}/{platform}.txt
|
--constraint=./py{py_version}/{platform}.txt
|
||||||
|
|
||||||
docker
|
docker
|
||||||
pylint==2.4.4
|
pylint~=3.1.0
|
||||||
SaltPyLint>=2023.3.8
|
SaltPyLint>=2024.2.2
|
||||||
toml
|
toml
|
||||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# certvalidator
|
# certvalidator
|
||||||
# oscrypto
|
# oscrypto
|
||||||
astroid==2.3.3
|
astroid==3.1.0
|
||||||
# via pylint
|
# via pylint
|
||||||
async-timeout==4.0.2
|
async-timeout==4.0.2
|
||||||
# via
|
# via
|
||||||
|
@ -145,6 +145,8 @@ cryptography==42.0.3
|
||||||
# paramiko
|
# paramiko
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# vcert
|
# vcert
|
||||||
|
dill==0.3.8
|
||||||
|
# via pylint
|
||||||
distlib==0.3.2
|
distlib==0.3.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
@ -285,8 +287,6 @@ kubernetes==3.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
@ -320,8 +320,6 @@ mercurial==6.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
more-itertools==5.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||||
|
@ -385,6 +383,7 @@ pathtools==0.1.2
|
||||||
platformdirs==2.2.0
|
platformdirs==2.2.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
# pylint
|
||||||
# virtualenv
|
# virtualenv
|
||||||
portend==2.4
|
portend==2.4
|
||||||
# via
|
# via
|
||||||
|
@ -405,8 +404,6 @@ pyasn1==0.4.8
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.21 ; python_version >= "3.9"
|
pycparser==2.21 ; python_version >= "3.9"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||||
|
@ -435,7 +432,7 @@ pyjwt==2.4.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# twilio
|
# twilio
|
||||||
pylint==2.4.4
|
pylint==3.1.0
|
||||||
# via
|
# via
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
# saltpylint
|
# saltpylint
|
||||||
|
@ -571,7 +568,7 @@ s3transfer==0.5.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# boto3
|
# boto3
|
||||||
saltpylint==2023.8.3
|
saltpylint==2024.2.5
|
||||||
# via -r requirements/static/ci/lint.in
|
# via -r requirements/static/ci/lint.in
|
||||||
scp==0.13.2
|
scp==0.13.2
|
||||||
# via
|
# via
|
||||||
|
@ -591,7 +588,6 @@ six==1.16.0
|
||||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# apscheduler
|
# apscheduler
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
# cassandra-driver
|
||||||
# cheroot
|
# cheroot
|
||||||
# etcd3-py
|
# etcd3-py
|
||||||
|
@ -646,6 +642,12 @@ toml==0.10.2
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
|
tomli==2.0.1
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
# pylint
|
||||||
|
tomlkit==0.12.3
|
||||||
|
# via pylint
|
||||||
tornado==6.1
|
tornado==6.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
@ -658,6 +660,10 @@ twilio==7.9.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
|
typing-extensions==4.8.0
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
# astroid
|
||||||
tzlocal==3.0
|
tzlocal==3.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
@ -696,8 +702,6 @@ werkzeug==3.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
# moto
|
# moto
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.10/linux.txt
|
# -c requirements/static/ci/py3.10/linux.txt
|
||||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# certvalidator
|
# certvalidator
|
||||||
# oscrypto
|
# oscrypto
|
||||||
astroid==2.3.3
|
astroid==3.1.0
|
||||||
# via pylint
|
# via pylint
|
||||||
attrs==23.1.0
|
attrs==23.1.0
|
||||||
# via
|
# via
|
||||||
|
@ -141,6 +141,8 @@ cryptography==42.0.3
|
||||||
# paramiko
|
# paramiko
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# vcert
|
# vcert
|
||||||
|
dill==0.3.8
|
||||||
|
# via pylint
|
||||||
distlib==0.3.2
|
distlib==0.3.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
|
@ -276,8 +278,6 @@ kubernetes==3.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
|
@ -306,8 +306,6 @@ mercurial==6.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
more-itertools==5.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||||
|
@ -360,6 +358,7 @@ pathtools==0.1.2
|
||||||
platformdirs==2.2.0
|
platformdirs==2.2.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
|
# pylint
|
||||||
# virtualenv
|
# virtualenv
|
||||||
portend==2.4
|
portend==2.4
|
||||||
# via
|
# via
|
||||||
|
@ -380,8 +379,6 @@ pyasn1==0.4.8
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.21 ; python_version >= "3.9"
|
pycparser==2.21 ; python_version >= "3.9"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||||
|
@ -410,7 +407,7 @@ pyjwt==2.4.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# twilio
|
# twilio
|
||||||
pylint==2.4.4
|
pylint==3.1.0
|
||||||
# via
|
# via
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
# saltpylint
|
# saltpylint
|
||||||
|
@ -536,7 +533,7 @@ s3transfer==0.5.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# boto3
|
# boto3
|
||||||
saltpylint==2023.8.3
|
saltpylint==2024.2.5
|
||||||
# via -r requirements/static/ci/lint.in
|
# via -r requirements/static/ci/lint.in
|
||||||
semantic-version==2.9.0
|
semantic-version==2.9.0
|
||||||
# via
|
# via
|
||||||
|
@ -552,7 +549,6 @@ six==1.16.0
|
||||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# apscheduler
|
# apscheduler
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
# cassandra-driver
|
||||||
# cheroot
|
# cheroot
|
||||||
# etcd3-py
|
# etcd3-py
|
||||||
|
@ -604,6 +600,8 @@ toml==0.10.2
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
|
tomlkit==0.12.3
|
||||||
|
# via pylint
|
||||||
tornado==6.1
|
tornado==6.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
|
@ -650,8 +648,6 @@ werkzeug==3.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
# moto
|
# moto
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.11/linux.txt
|
# -c requirements/static/ci/py3.11/linux.txt
|
||||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# certvalidator
|
# certvalidator
|
||||||
# oscrypto
|
# oscrypto
|
||||||
astroid==2.3.3
|
astroid==3.1.0
|
||||||
# via pylint
|
# via pylint
|
||||||
attrs==23.1.0
|
attrs==23.1.0
|
||||||
# via
|
# via
|
||||||
|
@ -141,6 +141,8 @@ cryptography==42.0.3
|
||||||
# paramiko
|
# paramiko
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# vcert
|
# vcert
|
||||||
|
dill==0.3.8
|
||||||
|
# via pylint
|
||||||
distlib==0.3.2
|
distlib==0.3.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
|
@ -276,8 +278,6 @@ kubernetes==3.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
|
@ -306,8 +306,6 @@ mercurial==6.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
more-itertools==5.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||||
|
@ -360,6 +358,7 @@ pathtools==0.1.2
|
||||||
platformdirs==2.2.0
|
platformdirs==2.2.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
|
# pylint
|
||||||
# virtualenv
|
# virtualenv
|
||||||
portend==2.4
|
portend==2.4
|
||||||
# via
|
# via
|
||||||
|
@ -380,8 +379,6 @@ pyasn1==0.4.8
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.21 ; python_version >= "3.9"
|
pycparser==2.21 ; python_version >= "3.9"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||||
|
@ -410,7 +407,7 @@ pyjwt==2.4.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# twilio
|
# twilio
|
||||||
pylint==2.4.4
|
pylint==3.1.0
|
||||||
# via
|
# via
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
# saltpylint
|
# saltpylint
|
||||||
|
@ -536,7 +533,7 @@ s3transfer==0.5.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# boto3
|
# boto3
|
||||||
saltpylint==2023.8.3
|
saltpylint==2024.2.5
|
||||||
# via -r requirements/static/ci/lint.in
|
# via -r requirements/static/ci/lint.in
|
||||||
semantic-version==2.9.0
|
semantic-version==2.9.0
|
||||||
# via
|
# via
|
||||||
|
@ -552,7 +549,6 @@ six==1.16.0
|
||||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# apscheduler
|
# apscheduler
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
# cassandra-driver
|
||||||
# cheroot
|
# cheroot
|
||||||
# etcd3-py
|
# etcd3-py
|
||||||
|
@ -604,6 +600,8 @@ toml==0.10.2
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
|
tomlkit==0.12.3
|
||||||
|
# via pylint
|
||||||
tornado==6.1
|
tornado==6.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
|
@ -650,8 +648,6 @@ werkzeug==3.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
# moto
|
# moto
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.12/linux.txt
|
# -c requirements/static/ci/py3.12/linux.txt
|
||||||
|
|
|
@ -1,796 +0,0 @@
|
||||||
#
|
|
||||||
# This file is autogenerated by pip-compile
|
|
||||||
# To update, run:
|
|
||||||
#
|
|
||||||
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt
|
|
||||||
#
|
|
||||||
aiohttp==3.8.6
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# etcd3-py
|
|
||||||
aiosignal==1.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
apache-libcloud==2.5.0 ; sys_platform != "win32"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
apscheduler==3.6.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# python-telegram-bot
|
|
||||||
asn1crypto==1.3.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# certvalidator
|
|
||||||
# oscrypto
|
|
||||||
astroid==2.3.3
|
|
||||||
# via pylint
|
|
||||||
async-timeout==4.0.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
asynctest==0.13.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
attrs==23.1.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
# jsonschema
|
|
||||||
backports.entry-points-selectable==1.1.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# virtualenv
|
|
||||||
backports.zoneinfo==0.2.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# tzlocal
|
|
||||||
bcrypt==4.1.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# paramiko
|
|
||||||
boto3==1.21.46
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# moto
|
|
||||||
boto==2.49.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
botocore==1.24.46
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# boto3
|
|
||||||
# moto
|
|
||||||
# s3transfer
|
|
||||||
cached-property==1.5.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# pygit2
|
|
||||||
cachetools==4.2.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# google-auth
|
|
||||||
# python-telegram-bot
|
|
||||||
cassandra-driver==3.23.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
certifi==2023.07.22
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# kubernetes
|
|
||||||
# python-telegram-bot
|
|
||||||
# requests
|
|
||||||
certvalidator==0.11.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# vcert
|
|
||||||
cffi==1.14.6
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# cryptography
|
|
||||||
# napalm
|
|
||||||
# pygit2
|
|
||||||
# pynacl
|
|
||||||
charset-normalizer==3.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
# requests
|
|
||||||
cheetah3==3.2.6.post2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
cheroot==8.5.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cherrypy
|
|
||||||
cherrypy==18.6.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
ciscoconfparse==1.5.19
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# napalm
|
|
||||||
click==7.1.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# geomet
|
|
||||||
clustershell==1.8.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
colorama==0.4.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# ciscoconfparse
|
|
||||||
contextvars==2.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
croniter==0.3.29 ; sys_platform != "win32"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
cryptography==42.0.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
# etcd3-py
|
|
||||||
# moto
|
|
||||||
# paramiko
|
|
||||||
# pyopenssl
|
|
||||||
# vcert
|
|
||||||
distlib==0.3.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# virtualenv
|
|
||||||
distro==1.5.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
dnspython==1.16.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# ciscoconfparse
|
|
||||||
# python-etcd
|
|
||||||
docker==6.1.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/lint.in
|
|
||||||
etcd3-py==0.1.6
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
filelock==3.0.12
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# virtualenv
|
|
||||||
frozenlist==1.3.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
# aiosignal
|
|
||||||
future==0.18.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# napalm
|
|
||||||
# textfsm
|
|
||||||
genshi==0.7.5
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
geomet==0.1.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cassandra-driver
|
|
||||||
gitdb==4.0.7
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# gitpython
|
|
||||||
gitpython==3.1.41
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
google-auth==2.1.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# kubernetes
|
|
||||||
hglib==2.6.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
idna==3.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# etcd3-py
|
|
||||||
# requests
|
|
||||||
# yarl
|
|
||||||
immutables==0.15
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# contextvars
|
|
||||||
importlib-metadata==4.6.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
# attrs
|
|
||||||
# backports.entry-points-selectable
|
|
||||||
# jsonschema
|
|
||||||
# mako
|
|
||||||
# moto
|
|
||||||
# virtualenv
|
|
||||||
ipaddress==1.0.22
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# kubernetes
|
|
||||||
isort==4.3.21
|
|
||||||
# via pylint
|
|
||||||
jaraco.classes==3.2.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# jaraco.collections
|
|
||||||
jaraco.collections==3.4.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cherrypy
|
|
||||||
jaraco.functools==2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cheroot
|
|
||||||
# jaraco.text
|
|
||||||
# tempora
|
|
||||||
jaraco.text==3.5.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# jaraco.collections
|
|
||||||
jinja2==3.1.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# junos-eznc
|
|
||||||
# moto
|
|
||||||
# napalm
|
|
||||||
jmespath==1.0.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# boto3
|
|
||||||
# botocore
|
|
||||||
jsonschema==3.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# napalm
|
|
||||||
jxmlease==1.0.1 ; sys_platform != "win32"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
keyring==5.7.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
kubernetes==3.0.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
looseversion==1.0.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
lxml==4.9.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
# napalm
|
|
||||||
# ncclient
|
|
||||||
mako==1.2.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
markupsafe==2.1.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# jinja2
|
|
||||||
# mako
|
|
||||||
# moto
|
|
||||||
# werkzeug
|
|
||||||
mccabe==0.6.1
|
|
||||||
# via pylint
|
|
||||||
mercurial==6.0.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cheroot
|
|
||||||
# cherrypy
|
|
||||||
# jaraco.classes
|
|
||||||
# jaraco.functools
|
|
||||||
moto==3.0.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
msgpack==1.0.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
multidict==6.0.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
# yarl
|
|
||||||
napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
ncclient==0.6.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
netaddr==0.7.19
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
# napalm
|
|
||||||
# pyeapi
|
|
||||||
netmiko==3.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# napalm
|
|
||||||
ntc-templates==1.4.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
oscrypto==1.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# certvalidator
|
|
||||||
packaging==22.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# docker
|
|
||||||
paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# junos-eznc
|
|
||||||
# napalm
|
|
||||||
# ncclient
|
|
||||||
# netmiko
|
|
||||||
# scp
|
|
||||||
passlib==1.7.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# ciscoconfparse
|
|
||||||
pathspec==0.9.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# yamllint
|
|
||||||
pathtools==0.1.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# watchdog
|
|
||||||
platformdirs==2.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# virtualenv
|
|
||||||
portend==2.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cherrypy
|
|
||||||
psutil==5.8.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
pyasn1-modules==0.2.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# google-auth
|
|
||||||
pyasn1==0.4.8
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# pyasn1-modules
|
|
||||||
# rsa
|
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.17
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cffi
|
|
||||||
pycryptodomex==3.19.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/crypto.txt
|
|
||||||
pyeapi==0.8.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# napalm
|
|
||||||
pygit2==1.10.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
pyiface==0.0.11
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
pyjwt==2.4.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# twilio
|
|
||||||
pylint==2.4.4
|
|
||||||
# via
|
|
||||||
# -r requirements/static/ci/lint.in
|
|
||||||
# saltpylint
|
|
||||||
pymysql==1.0.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
pynacl==1.5.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# paramiko
|
|
||||||
pyopenssl==24.0.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
# etcd3-py
|
|
||||||
pyparsing==3.0.9
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
pyrsistent==0.17.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# jsonschema
|
|
||||||
pyserial==3.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
# netmiko
|
|
||||||
python-consul==1.1.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
python-dateutil==2.8.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
# botocore
|
|
||||||
# croniter
|
|
||||||
# kubernetes
|
|
||||||
# moto
|
|
||||||
# vcert
|
|
||||||
python-etcd==0.4.5
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
python-gnupg==0.4.8
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
python-telegram-bot==13.7
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
pytz==2022.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# apscheduler
|
|
||||||
# moto
|
|
||||||
# python-telegram-bot
|
|
||||||
# tempora
|
|
||||||
# twilio
|
|
||||||
pyvmomi==6.7.1.2018.12
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
pyyaml==6.0.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# clustershell
|
|
||||||
# junos-eznc
|
|
||||||
# kubernetes
|
|
||||||
# napalm
|
|
||||||
# yamllint
|
|
||||||
# yamlordereddictloader
|
|
||||||
pyzmq==23.2.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/zeromq.txt
|
|
||||||
redis-py-cluster==2.1.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
redis==3.5.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# redis-py-cluster
|
|
||||||
requests==2.31.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/base.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# apache-libcloud
|
|
||||||
# docker
|
|
||||||
# etcd3-py
|
|
||||||
# kubernetes
|
|
||||||
# moto
|
|
||||||
# napalm
|
|
||||||
# python-consul
|
|
||||||
# pyvmomi
|
|
||||||
# responses
|
|
||||||
# twilio
|
|
||||||
# vcert
|
|
||||||
responses==0.10.6
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# moto
|
|
||||||
rfc3987==1.3.8
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
rpm-vercmp==0.1.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
rsa==4.7.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# google-auth
|
|
||||||
s3transfer==0.5.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# boto3
|
|
||||||
saltpylint==2023.8.3
|
|
||||||
# via -r requirements/static/ci/lint.in
|
|
||||||
scp==0.13.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
# napalm
|
|
||||||
# netmiko
|
|
||||||
semantic-version==2.9.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# etcd3-py
|
|
||||||
setproctitle==1.3.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
six==1.16.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# apscheduler
|
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
|
||||||
# cheroot
|
|
||||||
# etcd3-py
|
|
||||||
# genshi
|
|
||||||
# geomet
|
|
||||||
# jsonschema
|
|
||||||
# junos-eznc
|
|
||||||
# kazoo
|
|
||||||
# kubernetes
|
|
||||||
# more-itertools
|
|
||||||
# ncclient
|
|
||||||
# python-consul
|
|
||||||
# python-dateutil
|
|
||||||
# pyvmomi
|
|
||||||
# responses
|
|
||||||
# textfsm
|
|
||||||
# transitions
|
|
||||||
# vcert
|
|
||||||
# virtualenv
|
|
||||||
# websocket-client
|
|
||||||
slack-bolt==1.15.5
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
slack-sdk==3.19.5
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# slack-bolt
|
|
||||||
smmap==4.0.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# gitdb
|
|
||||||
sqlparse==0.4.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
strict-rfc3339==0.7
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
tempora==4.1.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# portend
|
|
||||||
terminal==0.4.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# ntc-templates
|
|
||||||
textfsm==1.1.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# napalm
|
|
||||||
# netmiko
|
|
||||||
# ntc-templates
|
|
||||||
timelib==0.2.5
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/pkg/linux.in
|
|
||||||
toml==0.10.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
# -r requirements/static/ci/lint.in
|
|
||||||
tornado==6.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# python-telegram-bot
|
|
||||||
transitions==0.8.9
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
twilio==7.9.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
typed-ast==1.4.1
|
|
||||||
# via astroid
|
|
||||||
typing-extensions==3.10.0.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
# async-timeout
|
|
||||||
# gitpython
|
|
||||||
# importlib-metadata
|
|
||||||
# yarl
|
|
||||||
tzlocal==3.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# apscheduler
|
|
||||||
urllib3==1.26.18
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# botocore
|
|
||||||
# docker
|
|
||||||
# kubernetes
|
|
||||||
# python-etcd
|
|
||||||
# requests
|
|
||||||
vcert==0.7.4 ; sys_platform != "win32"
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
virtualenv==20.7.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
watchdog==0.10.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
websocket-client==0.40.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# docker
|
|
||||||
# kubernetes
|
|
||||||
wempy==0.2.1
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/common.in
|
|
||||||
werkzeug==2.2.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# moto
|
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# moto
|
|
||||||
yamllint==1.26.3
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# -r requirements/static/ci/linux.in
|
|
||||||
yamlordereddictloader==0.4.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# junos-eznc
|
|
||||||
yarl==1.7.2
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# aiohttp
|
|
||||||
zc.lockfile==1.4
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# cherrypy
|
|
||||||
zipp==3.5.0
|
|
||||||
# via
|
|
||||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
|
||||||
# -c requirements/static/ci/py3.7/linux.txt
|
|
||||||
# importlib-metadata
|
|
||||||
|
|
||||||
# The following packages are considered to be unsafe in a requirements file:
|
|
||||||
# setuptools
|
|
|
@ -25,7 +25,7 @@ asn1crypto==1.3.0
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# certvalidator
|
# certvalidator
|
||||||
# oscrypto
|
# oscrypto
|
||||||
astroid==2.3.3
|
astroid==3.1.0
|
||||||
# via pylint
|
# via pylint
|
||||||
async-timeout==4.0.2
|
async-timeout==4.0.2
|
||||||
# via
|
# via
|
||||||
|
@ -149,6 +149,8 @@ cryptography==42.0.3
|
||||||
# paramiko
|
# paramiko
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# vcert
|
# vcert
|
||||||
|
dill==0.3.8
|
||||||
|
# via pylint
|
||||||
distlib==0.3.2
|
distlib==0.3.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
@ -296,8 +298,6 @@ kubernetes==3.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
@ -332,8 +332,6 @@ mercurial==6.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
more-itertools==5.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||||
|
@ -413,6 +411,7 @@ pathtools==0.1.2
|
||||||
platformdirs==2.2.0
|
platformdirs==2.2.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
# pylint
|
||||||
# virtualenv
|
# virtualenv
|
||||||
portend==2.4
|
portend==2.4
|
||||||
# via
|
# via
|
||||||
|
@ -433,8 +432,6 @@ pyasn1==0.4.8
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.17
|
pycparser==2.17
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||||
|
@ -465,7 +462,7 @@ pyjwt==2.4.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# twilio
|
# twilio
|
||||||
pylint==2.4.4
|
pylint==3.1.0
|
||||||
# via
|
# via
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
# saltpylint
|
# saltpylint
|
||||||
|
@ -599,7 +596,7 @@ s3transfer==0.5.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# boto3
|
# boto3
|
||||||
saltpylint==2023.8.3
|
saltpylint==2024.2.5
|
||||||
# via -r requirements/static/ci/lint.in
|
# via -r requirements/static/ci/lint.in
|
||||||
scp==0.13.2
|
scp==0.13.2
|
||||||
# via
|
# via
|
||||||
|
@ -621,7 +618,6 @@ six==1.16.0
|
||||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# apscheduler
|
# apscheduler
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
# cassandra-driver
|
||||||
# cheroot
|
# cheroot
|
||||||
# etcd3-py
|
# etcd3-py
|
||||||
|
@ -687,6 +683,12 @@ toml==0.10.2
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
|
tomli==2.0.1
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
# pylint
|
||||||
|
tomlkit==0.12.3
|
||||||
|
# via pylint
|
||||||
tornado==6.1
|
tornado==6.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
@ -699,6 +701,11 @@ twilio==7.9.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
|
typing-extensions==4.8.0
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
# astroid
|
||||||
|
# pylint
|
||||||
tzlocal==3.0
|
tzlocal==3.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
@ -737,8 +744,6 @@ werkzeug==3.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
# moto
|
# moto
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.8/linux.txt
|
# -c requirements/static/ci/py3.8/linux.txt
|
||||||
|
|
|
@ -25,7 +25,7 @@ asn1crypto==1.3.0
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# certvalidator
|
# certvalidator
|
||||||
# oscrypto
|
# oscrypto
|
||||||
astroid==2.3.3
|
astroid==3.1.0
|
||||||
# via pylint
|
# via pylint
|
||||||
async-timeout==4.0.2
|
async-timeout==4.0.2
|
||||||
# via
|
# via
|
||||||
|
@ -145,6 +145,8 @@ cryptography==42.0.3
|
||||||
# paramiko
|
# paramiko
|
||||||
# pyopenssl
|
# pyopenssl
|
||||||
# vcert
|
# vcert
|
||||||
|
dill==0.3.8
|
||||||
|
# via pylint
|
||||||
distlib==0.3.2
|
distlib==0.3.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
@ -292,8 +294,6 @@ kubernetes==3.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
lazy-object-proxy==1.4.3
|
|
||||||
# via astroid
|
|
||||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
@ -328,8 +328,6 @@ mercurial==6.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
modernize==0.5
|
|
||||||
# via saltpylint
|
|
||||||
more-itertools==5.0.0
|
more-itertools==5.0.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||||
|
@ -409,6 +407,7 @@ pathtools==0.1.2
|
||||||
platformdirs==2.2.0
|
platformdirs==2.2.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
# pylint
|
||||||
# virtualenv
|
# virtualenv
|
||||||
portend==2.4
|
portend==2.4
|
||||||
# via
|
# via
|
||||||
|
@ -429,8 +428,6 @@ pyasn1==0.4.8
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# pyasn1-modules
|
# pyasn1-modules
|
||||||
# rsa
|
# rsa
|
||||||
pycodestyle==2.5.0
|
|
||||||
# via saltpylint
|
|
||||||
pycparser==2.21 ; python_version >= "3.9"
|
pycparser==2.21 ; python_version >= "3.9"
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||||
|
@ -463,7 +460,7 @@ pyjwt==2.4.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# twilio
|
# twilio
|
||||||
pylint==2.4.4
|
pylint==3.1.0
|
||||||
# via
|
# via
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
# saltpylint
|
# saltpylint
|
||||||
|
@ -597,7 +594,7 @@ s3transfer==0.5.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# boto3
|
# boto3
|
||||||
saltpylint==2023.8.3
|
saltpylint==2024.2.5
|
||||||
# via -r requirements/static/ci/lint.in
|
# via -r requirements/static/ci/lint.in
|
||||||
scp==0.13.2
|
scp==0.13.2
|
||||||
# via
|
# via
|
||||||
|
@ -619,7 +616,6 @@ six==1.16.0
|
||||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# apscheduler
|
# apscheduler
|
||||||
# astroid
|
|
||||||
# cassandra-driver
|
# cassandra-driver
|
||||||
# cheroot
|
# cheroot
|
||||||
# etcd3-py
|
# etcd3-py
|
||||||
|
@ -685,6 +681,12 @@ toml==0.10.2
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# -r requirements/static/ci/common.in
|
# -r requirements/static/ci/common.in
|
||||||
# -r requirements/static/ci/lint.in
|
# -r requirements/static/ci/lint.in
|
||||||
|
tomli==2.0.1
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
# pylint
|
||||||
|
tomlkit==0.12.3
|
||||||
|
# via pylint
|
||||||
tornado==6.1
|
tornado==6.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
@ -697,6 +699,11 @@ twilio==7.9.2
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# -r requirements/static/ci/linux.in
|
# -r requirements/static/ci/linux.in
|
||||||
|
typing-extensions==4.8.0
|
||||||
|
# via
|
||||||
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
# astroid
|
||||||
|
# pylint
|
||||||
tzlocal==3.0
|
tzlocal==3.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
@ -735,8 +742,6 @@ werkzeug==3.0.1
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
# moto
|
# moto
|
||||||
wrapt==1.11.1
|
|
||||||
# via astroid
|
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
# via
|
# via
|
||||||
# -c requirements/static/ci/py3.9/linux.txt
|
# -c requirements/static/ci/py3.9/linux.txt
|
||||||
|
|
|
@ -169,7 +169,7 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
|
||||||
logging.getLogger(__name__)
|
logging.getLogger(__name__)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
instance = super().__new__(cls)
|
instance = super().__new__(cls) # pylint: disable=no-value-for-parameter
|
||||||
|
|
||||||
try:
|
try:
|
||||||
max_logger_length = len(
|
max_logger_length = len(
|
||||||
|
|
|
@ -84,11 +84,10 @@ def auth(username, password, **kwargs):
|
||||||
if cert.verify(cacert.get_pubkey()):
|
if cert.verify(cacert.get_pubkey()):
|
||||||
log.info("Successfully authenticated certificate: %s", pem)
|
log.info("Successfully authenticated certificate: %s", pem)
|
||||||
return True
|
return True
|
||||||
else:
|
log.info("Failed to authenticate certificate: %s", pem)
|
||||||
log.info("Failed to authenticate certificate: %s", pem)
|
return False
|
||||||
return False
|
|
||||||
|
|
||||||
c = OpenSSL.crypto
|
c = OpenSSL.crypto # pylint: disable=used-before-assignment
|
||||||
cert = c.load_certificate(c.FILETYPE_PEM, pem)
|
cert = c.load_certificate(c.FILETYPE_PEM, pem)
|
||||||
|
|
||||||
with salt.utils.files.fopen(cacert_file) as f:
|
with salt.utils.files.fopen(cacert_file) as f:
|
||||||
|
@ -101,7 +100,7 @@ def auth(username, password, **kwargs):
|
||||||
cert_asn1 = c.dump_certificate(c.FILETYPE_ASN1, cert)
|
cert_asn1 = c.dump_certificate(c.FILETYPE_ASN1, cert)
|
||||||
|
|
||||||
# Decode the certificate
|
# Decode the certificate
|
||||||
der = asn1.DerSequence()
|
der = asn1.DerSequence() # pylint: disable=used-before-assignment
|
||||||
der.decode(cert_asn1)
|
der.decode(cert_asn1)
|
||||||
|
|
||||||
# The certificate has three parts:
|
# The certificate has three parts:
|
||||||
|
|
|
@ -225,7 +225,7 @@ class AsyncReqChannel:
|
||||||
if HAS_M2:
|
if HAS_M2:
|
||||||
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
|
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
|
||||||
else:
|
else:
|
||||||
cipher = PKCS1_OAEP.new(key)
|
cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment
|
||||||
aes = cipher.decrypt(ret["key"])
|
aes = cipher.decrypt(ret["key"])
|
||||||
|
|
||||||
# Decrypt using the public key.
|
# Decrypt using the public key.
|
||||||
|
|
|
@ -206,7 +206,7 @@ class ReqServerChannel:
|
||||||
if HAS_M2:
|
if HAS_M2:
|
||||||
pret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding)
|
pret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding)
|
||||||
else:
|
else:
|
||||||
cipher = PKCS1_OAEP.new(pub)
|
cipher = PKCS1_OAEP.new(pub) # pylint: disable=used-before-assignment
|
||||||
pret["key"] = cipher.encrypt(key)
|
pret["key"] = cipher.encrypt(key)
|
||||||
if ret is False:
|
if ret is False:
|
||||||
ret = {}
|
ret = {}
|
||||||
|
|
|
@ -90,7 +90,10 @@ class Batch:
|
||||||
"""
|
"""
|
||||||
Return the active number of minions to maintain
|
Return the active number of minions to maintain
|
||||||
"""
|
"""
|
||||||
partition = lambda x: float(x) / 100.0 * len(self.minions)
|
|
||||||
|
def partition(x):
|
||||||
|
return float(x) / 100.0 * len(self.minions)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(self.opts["batch"], str) and "%" in self.opts["batch"]:
|
if isinstance(self.opts["batch"], str) and "%" in self.opts["batch"]:
|
||||||
res = partition(float(self.opts["batch"].strip("%")))
|
res = partition(float(self.opts["batch"].strip("%")))
|
||||||
|
|
|
@ -68,7 +68,7 @@ class SaltCP:
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno == errno.ENOENT:
|
if exc.errno == errno.ENOENT:
|
||||||
# Path does not exist
|
# Path does not exist
|
||||||
sys.stderr.write("{} does not exist\n".format(path))
|
sys.stderr.write(f"{path} does not exist\n")
|
||||||
sys.exit(42)
|
sys.exit(42)
|
||||||
elif exc.errno in (errno.EINVAL, errno.ENOTDIR):
|
elif exc.errno in (errno.EINVAL, errno.ENOTDIR):
|
||||||
# Path is a file (EINVAL on Windows, ENOTDIR otherwise)
|
# Path is a file (EINVAL on Windows, ENOTDIR otherwise)
|
||||||
|
@ -97,7 +97,7 @@ class SaltCP:
|
||||||
Take a path and return the contents of the file as a string
|
Take a path and return the contents of the file as a string
|
||||||
"""
|
"""
|
||||||
if not os.path.isfile(fn_):
|
if not os.path.isfile(fn_):
|
||||||
err = "The referenced file, {} is not available.".format(fn_)
|
err = f"The referenced file, {fn_} is not available."
|
||||||
sys.stderr.write(err + "\n")
|
sys.stderr.write(err + "\n")
|
||||||
sys.exit(42)
|
sys.exit(42)
|
||||||
with salt.utils.files.fopen(fn_, "r") as fp_:
|
with salt.utils.files.fopen(fn_, "r") as fp_:
|
||||||
|
@ -211,12 +211,10 @@ class SaltCP:
|
||||||
log.debug(
|
log.debug(
|
||||||
"Copying %s to %starget '%s' as %s%s",
|
"Copying %s to %starget '%s' as %s%s",
|
||||||
fn_,
|
fn_,
|
||||||
"{} ".format(selected_target_option)
|
f"{selected_target_option} " if selected_target_option else "",
|
||||||
if selected_target_option
|
|
||||||
else "",
|
|
||||||
tgt,
|
tgt,
|
||||||
remote_path,
|
remote_path,
|
||||||
" (chunk #{})".format(index) if append else "",
|
f" (chunk #{index})" if append else "",
|
||||||
)
|
)
|
||||||
args = [
|
args = [
|
||||||
tgt,
|
tgt,
|
||||||
|
@ -261,11 +259,7 @@ class SaltCP:
|
||||||
log.debug(
|
log.debug(
|
||||||
"Creating empty dir %s on %starget '%s'",
|
"Creating empty dir %s on %starget '%s'",
|
||||||
dirname,
|
dirname,
|
||||||
"{} ".format(
|
f"{selected_target_option} " if selected_target_option else "",
|
||||||
selected_target_option
|
|
||||||
) # pylint: disable=str-format-in-logging
|
|
||||||
if selected_target_option
|
|
||||||
else "",
|
|
||||||
tgt,
|
tgt,
|
||||||
)
|
)
|
||||||
args = [tgt, "cp.recv_chunked", [remote_path, None], timeout]
|
args = [tgt, "cp.recv_chunked", [remote_path, None], timeout]
|
||||||
|
|
|
@ -8,7 +8,6 @@ The data structure needs to be:
|
||||||
'key': '<read in the key file>'}
|
'key': '<read in the key file>'}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# The components here are simple, and they need to be and stay simple, we
|
# The components here are simple, and they need to be and stay simple, we
|
||||||
|
@ -1296,7 +1295,7 @@ class LocalClient:
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
# This is a safe pass. We're just using the try/except to
|
# This is a safe pass. We're just using the try/except to
|
||||||
# avoid having to deep-check for keys.
|
# avoid having to deep-check for keys.
|
||||||
missing_key = exc.__str__().strip("'\"")
|
missing_key = str(exc).strip("'\"")
|
||||||
if missing_key == "retcode":
|
if missing_key == "retcode":
|
||||||
log.debug("retcode missing from client return")
|
log.debug("retcode missing from client return")
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -559,7 +559,7 @@ class SSH(MultiprocessingStateMixin):
|
||||||
try:
|
try:
|
||||||
retcode = int(retcode)
|
retcode = int(retcode)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
log.warning(f"Got an invalid retcode for host '{host}': '{retcode}'")
|
log.warning("Got an invalid retcode for host '%s': '%s'", host, retcode)
|
||||||
retcode = 1
|
retcode = 1
|
||||||
# This job is done, yield
|
# This job is done, yield
|
||||||
try:
|
try:
|
||||||
|
@ -573,7 +573,9 @@ class SSH(MultiprocessingStateMixin):
|
||||||
retcode = int(remote_retcode)
|
retcode = int(remote_retcode)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
log.warning(
|
log.warning(
|
||||||
f"Host '{host}' reported an invalid retcode: '{remote_retcode}'"
|
"Host '%s' reported an invalid retcode: '%s'",
|
||||||
|
host,
|
||||||
|
remote_retcode,
|
||||||
)
|
)
|
||||||
retcode = max(retcode, 1)
|
retcode = max(retcode, 1)
|
||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
|
@ -599,7 +601,7 @@ class SSH(MultiprocessingStateMixin):
|
||||||
"""
|
"""
|
||||||
que = multiprocessing.Queue()
|
que = multiprocessing.Queue()
|
||||||
running = {}
|
running = {}
|
||||||
target_iter = self.targets.__iter__()
|
target_iter = iter(self.targets)
|
||||||
returned = set()
|
returned = set()
|
||||||
rets = set()
|
rets = set()
|
||||||
init = False
|
init = False
|
||||||
|
@ -829,7 +831,7 @@ class SSH(MultiprocessingStateMixin):
|
||||||
for ret, retcode in self.handle_ssh():
|
for ret, retcode in self.handle_ssh():
|
||||||
host = next(iter(ret))
|
host = next(iter(ret))
|
||||||
if not isinstance(retcode, int):
|
if not isinstance(retcode, int):
|
||||||
log.warning(f"Host '{host}' returned an invalid retcode: {retcode}")
|
log.warning("Host '%s' returned an invalid retcode: %s", host, retcode)
|
||||||
retcode = 1
|
retcode = 1
|
||||||
final_exit = max(final_exit, retcode)
|
final_exit = max(final_exit, retcode)
|
||||||
|
|
||||||
|
@ -1784,7 +1786,7 @@ def ssh_version():
|
||||||
["ssh", "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
["ssh", "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
).communicate()
|
).communicate()
|
||||||
try:
|
try:
|
||||||
version_parts = ret[1].split(b",")[0].split(b"_")[1]
|
version_parts = ret[1].split(b",", maxsplit=1)[0].split(b"_")[1]
|
||||||
parts = []
|
parts = []
|
||||||
for part in version_parts:
|
for part in version_parts:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -41,7 +41,6 @@ ARGS = None
|
||||||
# The below line is where OPTIONS can be redefined with internal options
|
# The below line is where OPTIONS can be redefined with internal options
|
||||||
# (rather than cli arguments) when the shim is bundled by
|
# (rather than cli arguments) when the shim is bundled by
|
||||||
# client.ssh.Single._cmd_str()
|
# client.ssh.Single._cmd_str()
|
||||||
# pylint: disable=block-comment-should-start-with-cardinal-space
|
|
||||||
#%%OPTS
|
#%%OPTS
|
||||||
|
|
||||||
|
|
||||||
|
@ -230,7 +229,9 @@ def get_executable():
|
||||||
Find executable which matches supported python version in the thin
|
Find executable which matches supported python version in the thin
|
||||||
"""
|
"""
|
||||||
pymap = {}
|
pymap = {}
|
||||||
with open(os.path.join(OPTIONS.saltdir, "supported-versions")) as _fp:
|
with open(
|
||||||
|
os.path.join(OPTIONS.saltdir, "supported-versions"), encoding="utf-8"
|
||||||
|
) as _fp:
|
||||||
for line in _fp.readlines():
|
for line in _fp.readlines():
|
||||||
ns, v_maj, v_min = line.strip().split(":")
|
ns, v_maj, v_min = line.strip().split(":")
|
||||||
pymap[ns] = (int(v_maj), int(v_min))
|
pymap[ns] = (int(v_maj), int(v_min))
|
||||||
|
@ -314,7 +315,7 @@ def main(argv): # pylint: disable=W0613
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
need_deployment()
|
need_deployment()
|
||||||
with open(code_checksum_path, "r") as vpo:
|
with open(code_checksum_path, "r", encoding="utf-8") as vpo:
|
||||||
cur_code_cs = vpo.readline().strip()
|
cur_code_cs = vpo.readline().strip()
|
||||||
if cur_code_cs != OPTIONS.code_checksum:
|
if cur_code_cs != OPTIONS.code_checksum:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
|
@ -330,7 +331,7 @@ def main(argv): # pylint: disable=W0613
|
||||||
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
||||||
need_deployment()
|
need_deployment()
|
||||||
|
|
||||||
with open(os.path.join(OPTIONS.saltdir, "minion"), "w") as config:
|
with open(os.path.join(OPTIONS.saltdir, "minion"), "w", encoding="utf-8") as config:
|
||||||
config.write(OPTIONS.config + "\n")
|
config.write(OPTIONS.config + "\n")
|
||||||
if OPTIONS.ext_mods:
|
if OPTIONS.ext_mods:
|
||||||
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
|
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
|
||||||
|
@ -340,7 +341,7 @@ def main(argv): # pylint: disable=W0613
|
||||||
version_path = os.path.join(OPTIONS.saltdir, "ext_version")
|
version_path = os.path.join(OPTIONS.saltdir, "ext_version")
|
||||||
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
||||||
need_ext()
|
need_ext()
|
||||||
with open(version_path, "r") as vpo:
|
with open(version_path, "r", encoding="utf-8") as vpo:
|
||||||
cur_version = vpo.readline().strip()
|
cur_version = vpo.readline().strip()
|
||||||
if cur_version != OPTIONS.ext_mods:
|
if cur_version != OPTIONS.ext_mods:
|
||||||
need_ext()
|
need_ext()
|
||||||
|
|
|
@ -23,21 +23,28 @@ def _serial_sanitizer(instr):
|
||||||
return "{}{}".format(instr[:index], "X" * (length - index))
|
return "{}{}".format(instr[:index], "X" * (length - index))
|
||||||
|
|
||||||
|
|
||||||
_FQDN_SANITIZER = lambda x: "MINION.DOMAINNAME"
|
def _fqdn_sanitizer(x):
|
||||||
_HOSTNAME_SANITIZER = lambda x: "MINION"
|
return "MINION.DOMAINNAME"
|
||||||
_DOMAINNAME_SANITIZER = lambda x: "DOMAINNAME"
|
|
||||||
|
|
||||||
|
def _hostname_sanitizer(x):
|
||||||
|
return "MINION"
|
||||||
|
|
||||||
|
|
||||||
|
def _domainname_sanitizer(x):
|
||||||
|
return "DOMAINNAME"
|
||||||
|
|
||||||
|
|
||||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||||
# is used when the 'sanitize' flag is given.
|
# is used when the 'sanitize' flag is given.
|
||||||
_SANITIZERS = {
|
_SANITIZERS = {
|
||||||
"serialnumber": _serial_sanitizer,
|
"serialnumber": _serial_sanitizer,
|
||||||
"domain": _DOMAINNAME_SANITIZER,
|
"domain": _domainname_sanitizer,
|
||||||
"fqdn": _FQDN_SANITIZER,
|
"fqdn": _fqdn_sanitizer,
|
||||||
"id": _FQDN_SANITIZER,
|
"id": _fqdn_sanitizer,
|
||||||
"host": _HOSTNAME_SANITIZER,
|
"host": _hostname_sanitizer,
|
||||||
"localhost": _HOSTNAME_SANITIZER,
|
"localhost": _hostname_sanitizer,
|
||||||
"nodename": _HOSTNAME_SANITIZER,
|
"nodename": _hostname_sanitizer,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,9 @@ try:
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# We still allow Py2 import because this could be executed in a machine with Py2.
|
# We still allow Py2 import because this could be executed in a machine with Py2.
|
||||||
from collections import Mapping # pylint: disable=no-name-in-module
|
from collections import ( # pylint: disable=no-name-in-module,deprecated-class
|
||||||
|
Mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get(key, default="", merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
def get(key, default="", merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
||||||
|
|
|
@ -174,11 +174,11 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
msg = "The following virtual machines are set to be destroyed:\n"
|
msg = "The following virtual machines are set to be destroyed:\n"
|
||||||
names = set()
|
names = set()
|
||||||
for alias, drivers in matching.items():
|
for alias, drivers in matching.items():
|
||||||
msg += " {}:\n".format(alias)
|
msg += f" {alias}:\n"
|
||||||
for driver, vms in drivers.items():
|
for driver, vms in drivers.items():
|
||||||
msg += " {}:\n".format(driver)
|
msg += f" {driver}:\n"
|
||||||
for name in vms:
|
for name in vms:
|
||||||
msg += " {}\n".format(name)
|
msg += f" {name}\n"
|
||||||
names.add(name)
|
names.add(name)
|
||||||
try:
|
try:
|
||||||
if self.print_confirm(msg):
|
if self.print_confirm(msg):
|
||||||
|
@ -212,7 +212,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
key, value = name.split("=", 1)
|
key, value = name.split("=", 1)
|
||||||
kwargs[key] = value
|
kwargs[key] = value
|
||||||
else:
|
else:
|
||||||
msg += " {}\n".format(name)
|
msg += f" {name}\n"
|
||||||
machines.append(name)
|
machines.append(name)
|
||||||
names = machines
|
names = machines
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
|
|
||||||
elif self.options.set_password:
|
elif self.options.set_password:
|
||||||
username = self.credential_username
|
username = self.credential_username
|
||||||
provider_name = "salt.cloud.provider.{}".format(self.credential_provider)
|
provider_name = f"salt.cloud.provider.{self.credential_provider}"
|
||||||
# TODO: check if provider is configured
|
# TODO: check if provider is configured
|
||||||
# set the password
|
# set the password
|
||||||
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
||||||
|
@ -275,7 +275,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
# display profile errors
|
# display profile errors
|
||||||
msg += "Found the following errors:\n"
|
msg += "Found the following errors:\n"
|
||||||
for profile_name, error in dmap["errors"].items():
|
for profile_name, error in dmap["errors"].items():
|
||||||
msg += " {}: {}\n".format(profile_name, error)
|
msg += f" {profile_name}: {error}\n"
|
||||||
sys.stderr.write(msg)
|
sys.stderr.write(msg)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
@ -283,17 +283,17 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
if "existing" in dmap:
|
if "existing" in dmap:
|
||||||
msg += "The following virtual machines already exist:\n"
|
msg += "The following virtual machines already exist:\n"
|
||||||
for name in dmap["existing"]:
|
for name in dmap["existing"]:
|
||||||
msg += " {}\n".format(name)
|
msg += f" {name}\n"
|
||||||
|
|
||||||
if dmap["create"]:
|
if dmap["create"]:
|
||||||
msg += "The following virtual machines are set to be created:\n"
|
msg += "The following virtual machines are set to be created:\n"
|
||||||
for name in dmap["create"]:
|
for name in dmap["create"]:
|
||||||
msg += " {}\n".format(name)
|
msg += f" {name}\n"
|
||||||
|
|
||||||
if "destroy" in dmap:
|
if "destroy" in dmap:
|
||||||
msg += "The following virtual machines are set to be destroyed:\n"
|
msg += "The following virtual machines are set to be destroyed:\n"
|
||||||
for name in dmap["destroy"]:
|
for name in dmap["destroy"]:
|
||||||
msg += " {}\n".format(name)
|
msg += f" {name}\n"
|
||||||
|
|
||||||
if not dmap["create"] and not dmap.get("destroy", None):
|
if not dmap["create"] and not dmap.get("destroy", None):
|
||||||
if not dmap.get("existing", None):
|
if not dmap.get("existing", None):
|
||||||
|
@ -382,19 +382,17 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
||||||
# This is a salt cloud system exit
|
# This is a salt cloud system exit
|
||||||
if exc.exit_code > 0:
|
if exc.exit_code > 0:
|
||||||
# the exit code is bigger than 0, it's an error
|
# the exit code is bigger than 0, it's an error
|
||||||
msg = "Error: {}".format(msg)
|
msg = f"Error: {msg}"
|
||||||
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
||||||
# It's not a system exit but it's an error we can
|
# It's not a system exit but it's an error we can
|
||||||
# handle
|
# handle
|
||||||
self.error(msg.format(exc))
|
self.error(msg.format(exc))
|
||||||
# This is a generic exception, log it, include traceback if
|
# This is a generic exception, log it, include traceback if
|
||||||
# debug logging is enabled and exit.
|
# debug logging is enabled and exit.
|
||||||
# pylint: disable=str-format-in-logging
|
|
||||||
log.error(
|
log.error(
|
||||||
msg.format(exc),
|
msg.format(exc),
|
||||||
# Show the traceback if the debug logging level is
|
# Show the traceback if the debug logging level is
|
||||||
# enabled
|
# enabled
|
||||||
exc_info_on_loglevel=logging.DEBUG,
|
exc_info_on_loglevel=logging.DEBUG,
|
||||||
)
|
)
|
||||||
# pylint: enable=str-format-in-logging
|
|
||||||
self.exit(salt.defaults.exitcodes.EX_GENERIC)
|
self.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||||
|
|
|
@ -798,7 +798,7 @@ def query(params=None):
|
||||||
signature = _compute_signature(parameters, access_key_secret)
|
signature = _compute_signature(parameters, access_key_secret)
|
||||||
parameters["Signature"] = signature
|
parameters["Signature"] = signature
|
||||||
|
|
||||||
request = requests.get(path, params=parameters, verify=True)
|
request = requests.get(path, params=parameters, verify=True, timeout=120)
|
||||||
if request.status_code != 200:
|
if request.status_code != 200:
|
||||||
raise SaltCloudSystemExit(
|
raise SaltCloudSystemExit(
|
||||||
"An error occurred while querying aliyun ECS. HTTP Code: {} "
|
"An error occurred while querying aliyun ECS. HTTP Code: {} "
|
||||||
|
|
|
@ -308,7 +308,7 @@ def get_build_status(req_id, nodename):
|
||||||
counter = 0
|
counter = 0
|
||||||
req_id = str(req_id)
|
req_id = str(req_id)
|
||||||
while counter < 10:
|
while counter < 10:
|
||||||
queue = clc.v1.Blueprint.GetStatus(request_id=(req_id))
|
queue = clc.v1.Blueprint.GetStatus(request_id=req_id)
|
||||||
if queue["PercentComplete"] == 100:
|
if queue["PercentComplete"] == 100:
|
||||||
server_name = queue["Servers"][0]
|
server_name = queue["Servers"][0]
|
||||||
creds = get_creds()
|
creds = get_creds()
|
||||||
|
|
|
@ -474,9 +474,14 @@ def create(vm_):
|
||||||
dns_hostname,
|
dns_hostname,
|
||||||
dns_domain,
|
dns_domain,
|
||||||
)
|
)
|
||||||
__add_dns_addr__ = lambda t, d: post_dns_record(
|
|
||||||
dns_domain=dns_domain, name=dns_hostname, record_type=t, record_data=d
|
def __add_dns_addr__(t, d):
|
||||||
)
|
return post_dns_record(
|
||||||
|
dns_domain=dns_domain,
|
||||||
|
name=dns_hostname,
|
||||||
|
record_type=t,
|
||||||
|
record_data=d,
|
||||||
|
)
|
||||||
|
|
||||||
log.debug("create_dns_record: %s", __add_dns_addr__)
|
log.debug("create_dns_record: %s", __add_dns_addr__)
|
||||||
else:
|
else:
|
||||||
|
@ -639,6 +644,7 @@ def query(
|
||||||
"Authorization": "Bearer " + personal_access_token,
|
"Authorization": "Bearer " + personal_access_token,
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
if request.status_code > 299:
|
if request.status_code > 299:
|
||||||
raise SaltCloudSystemExit(
|
raise SaltCloudSystemExit(
|
||||||
|
|
|
@ -403,7 +403,7 @@ def query(
|
||||||
log.trace("EC2 Request Parameters: %s", params_with_headers)
|
log.trace("EC2 Request Parameters: %s", params_with_headers)
|
||||||
try:
|
try:
|
||||||
result = requests.get(
|
result = requests.get(
|
||||||
requesturl, headers=headers, params=params_with_headers
|
requesturl, headers=headers, params=params_with_headers, timeout=120
|
||||||
)
|
)
|
||||||
log.debug(
|
log.debug(
|
||||||
"EC2 Response Status Code: %s",
|
"EC2 Response Status Code: %s",
|
||||||
|
@ -1198,9 +1198,9 @@ def get_imageid(vm_):
|
||||||
"Filter.0.Value.0": image,
|
"Filter.0.Value.0": image,
|
||||||
}
|
}
|
||||||
# Query AWS, sort by 'creationDate' and get the last imageId
|
# Query AWS, sort by 'creationDate' and get the last imageId
|
||||||
_t = lambda x: datetime.datetime.strptime(
|
def _t(x):
|
||||||
x["creationDate"], "%Y-%m-%dT%H:%M:%S.%fZ"
|
return datetime.datetime.strptime(x["creationDate"], "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||||
)
|
|
||||||
image_id = sorted(
|
image_id = sorted(
|
||||||
aws.query(
|
aws.query(
|
||||||
params,
|
params,
|
||||||
|
|
|
@ -1168,8 +1168,8 @@ def query(action=None, command=None, args=None, method="GET", location=None, dat
|
||||||
digest = md.final()
|
digest = md.final()
|
||||||
signed = rsa_key.sign(digest, algo="sha256")
|
signed = rsa_key.sign(digest, algo="sha256")
|
||||||
else:
|
else:
|
||||||
rsa_ = PKCS1_v1_5.new(rsa_key)
|
rsa_ = PKCS1_v1_5.new(rsa_key) # pylint: disable=used-before-assignment
|
||||||
hash_ = SHA256.new()
|
hash_ = SHA256.new() # pylint: disable=used-before-assignment
|
||||||
hash_.update(timestamp.encode(__salt_system_encoding__))
|
hash_.update(timestamp.encode(__salt_system_encoding__))
|
||||||
signed = rsa_.sign(hash_)
|
signed = rsa_.sign(hash_)
|
||||||
signed = base64.b64encode(signed)
|
signed = base64.b64encode(signed)
|
||||||
|
|
|
@ -530,7 +530,9 @@ class LinodeAPIv4(LinodeAPI):
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
result = requests.request(method, url, json=data, headers=headers)
|
result = requests.request(
|
||||||
|
method, url, json=data, headers=headers, timeout=120
|
||||||
|
)
|
||||||
|
|
||||||
log.debug("Linode API response status code: %d", result.status_code)
|
log.debug("Linode API response status code: %d", result.status_code)
|
||||||
log.trace("Linode API response body: %s", result.text)
|
log.trace("Linode API response body: %s", result.text)
|
||||||
|
@ -1092,7 +1094,9 @@ class LinodeAPIv4(LinodeAPI):
|
||||||
"entity.type": entity,
|
"entity.type": entity,
|
||||||
}
|
}
|
||||||
last_event = None
|
last_event = None
|
||||||
condition = lambda event: self._check_event_status(event, status)
|
|
||||||
|
def condition(event):
|
||||||
|
return self._check_event_status(event, status)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if last_event is not None:
|
if last_event is not None:
|
||||||
|
@ -1965,8 +1969,8 @@ class LinodeAPIv3(LinodeAPI):
|
||||||
|
|
||||||
for key, val in ips.items():
|
for key, val in ips.items():
|
||||||
if key == linode_id:
|
if key == linode_id:
|
||||||
this_node["private_ips"] = val["private_ips"]
|
this_node["private_ips"] = val[1]
|
||||||
this_node["public_ips"] = val["public_ips"]
|
this_node["public_ips"] = val[0]
|
||||||
|
|
||||||
if full:
|
if full:
|
||||||
this_node["extra"] = node
|
this_node["extra"] = node
|
||||||
|
|
|
@ -137,7 +137,9 @@ def _authenticate():
|
||||||
connect_data = {"username": username, "password": passwd}
|
connect_data = {"username": username, "password": passwd}
|
||||||
full_url = "https://{}:{}/api2/json/access/ticket".format(url, port)
|
full_url = "https://{}:{}/api2/json/access/ticket".format(url, port)
|
||||||
|
|
||||||
response = requests.post(full_url, verify=verify_ssl, data=connect_data)
|
response = requests.post(
|
||||||
|
full_url, verify=verify_ssl, data=connect_data, timeout=120
|
||||||
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
returned_data = response.json()
|
returned_data = response.json()
|
||||||
|
|
||||||
|
@ -171,6 +173,7 @@ def query(conn_type, option, post_data=None):
|
||||||
data=post_data,
|
data=post_data,
|
||||||
cookies=ticket,
|
cookies=ticket,
|
||||||
headers=httpheaders,
|
headers=httpheaders,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
elif conn_type == "put":
|
elif conn_type == "put":
|
||||||
httpheaders["CSRFPreventionToken"] = csrf
|
httpheaders["CSRFPreventionToken"] = csrf
|
||||||
|
@ -180,6 +183,7 @@ def query(conn_type, option, post_data=None):
|
||||||
data=post_data,
|
data=post_data,
|
||||||
cookies=ticket,
|
cookies=ticket,
|
||||||
headers=httpheaders,
|
headers=httpheaders,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
elif conn_type == "delete":
|
elif conn_type == "delete":
|
||||||
httpheaders["CSRFPreventionToken"] = csrf
|
httpheaders["CSRFPreventionToken"] = csrf
|
||||||
|
@ -189,9 +193,12 @@ def query(conn_type, option, post_data=None):
|
||||||
data=post_data,
|
data=post_data,
|
||||||
cookies=ticket,
|
cookies=ticket,
|
||||||
headers=httpheaders,
|
headers=httpheaders,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
elif conn_type == "get":
|
elif conn_type == "get":
|
||||||
response = requests.get(full_url, verify=verify_ssl, cookies=ticket)
|
response = requests.get(
|
||||||
|
full_url, verify=verify_ssl, cookies=ticket, timeout=120
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
@ -862,7 +869,7 @@ def _import_api():
|
||||||
"""
|
"""
|
||||||
global api
|
global api
|
||||||
full_url = "https://{}:{}/pve-docs/api-viewer/apidoc.js".format(url, port)
|
full_url = "https://{}:{}/pve-docs/api-viewer/apidoc.js".format(url, port)
|
||||||
returned_data = requests.get(full_url, verify=verify_ssl)
|
returned_data = requests.get(full_url, verify=verify_ssl, timeout=120)
|
||||||
|
|
||||||
re_filter = re.compile(" (?:pveapi|apiSchema) = (.*)^;", re.DOTALL | re.MULTILINE)
|
re_filter = re.compile(" (?:pveapi|apiSchema) = (.*)^;", re.DOTALL | re.MULTILINE)
|
||||||
api_json = re_filter.findall(returned_data.text)[0]
|
api_json = re_filter.findall(returned_data.text)[0]
|
||||||
|
|
|
@ -179,7 +179,7 @@ def query(params=None):
|
||||||
# print('parameters:')
|
# print('parameters:')
|
||||||
# pprint.pprint(real_parameters)
|
# pprint.pprint(real_parameters)
|
||||||
|
|
||||||
request = requests.get(path, params=real_parameters, verify=verify_ssl)
|
request = requests.get(path, params=real_parameters, verify=verify_ssl, timeout=120)
|
||||||
|
|
||||||
# print('url:')
|
# print('url:')
|
||||||
# print(request.url)
|
# print(request.url)
|
||||||
|
@ -439,7 +439,7 @@ def _get_size(vm_):
|
||||||
if not vm_size:
|
if not vm_size:
|
||||||
raise SaltCloudNotFound("No size specified for this instance.")
|
raise SaltCloudNotFound("No size specified for this instance.")
|
||||||
|
|
||||||
if vm_size in sizes.keys():
|
if vm_size in sizes:
|
||||||
return vm_size
|
return vm_size
|
||||||
|
|
||||||
raise SaltCloudNotFound(
|
raise SaltCloudNotFound(
|
||||||
|
|
|
@ -1628,7 +1628,7 @@ def _get_snapshots(snapshot_list, current_snapshot=None, parent_snapshot_path=""
|
||||||
snapshots[snapshot_path] = {
|
snapshots[snapshot_path] = {
|
||||||
"name": snapshot.name,
|
"name": snapshot.name,
|
||||||
"description": snapshot.description,
|
"description": snapshot.description,
|
||||||
"created": str(snapshot.createTime).split(".")[0],
|
"created": str(snapshot.createTime).split(".", maxsplit=1)[0],
|
||||||
"state": snapshot.state,
|
"state": snapshot.state,
|
||||||
"path": snapshot_path,
|
"path": snapshot_path,
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ class MinionDefaultInclude(DefaultIncludeConfig):
|
||||||
|
|
||||||
class MasterDefaultInclude(DefaultIncludeConfig):
|
class MasterDefaultInclude(DefaultIncludeConfig):
|
||||||
__target__ = "master"
|
__target__ = "master"
|
||||||
__confd_directory = "master.d"
|
__confd_directory = "master.d" # pylint: disable=unused-private-member
|
||||||
|
|
||||||
|
|
||||||
class IncludeConfig(Schema):
|
class IncludeConfig(Schema):
|
||||||
|
|
|
@ -890,8 +890,6 @@ class SlackClient:
|
||||||
if cmd in runner_functions:
|
if cmd in runner_functions:
|
||||||
runner = salt.runner.RunnerClient(__opts__)
|
runner = salt.runner.RunnerClient(__opts__)
|
||||||
log.debug("Command %s will run via runner_functions", cmd)
|
log.debug("Command %s will run via runner_functions", cmd)
|
||||||
# pylint is tripping
|
|
||||||
# pylint: disable=missing-whitespace-after-comma
|
|
||||||
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
||||||
job_id = job_id_dict["jid"]
|
job_id = job_id_dict["jid"]
|
||||||
|
|
||||||
|
|
|
@ -995,8 +995,6 @@ class SlackClient:
|
||||||
if cmd in runner_functions:
|
if cmd in runner_functions:
|
||||||
runner = salt.runner.RunnerClient(__opts__)
|
runner = salt.runner.RunnerClient(__opts__)
|
||||||
log.debug("Command %s will run via runner_functions", cmd)
|
log.debug("Command %s will run via runner_functions", cmd)
|
||||||
# pylint is tripping
|
|
||||||
# pylint: disable=missing-whitespace-after-comma
|
|
||||||
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
||||||
job_id = job_id_dict["jid"]
|
job_id = job_id_dict["jid"]
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ def mk_gen():
|
||||||
return True
|
return True
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
generator = type((lambda: (yield))())
|
generator = type((lambda: (yield))()) # pylint: disable=unnecessary-direct-lambda-call
|
||||||
Generator.register(generator)
|
Generator.register(generator)
|
||||||
return Generator
|
return Generator
|
||||||
|
|
||||||
|
|
|
@ -2162,8 +2162,7 @@ class IPv6Interface(IPv6Address):
|
||||||
return x
|
return x
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '%s/%d' % (super().__str__(),
|
return '%s/%d' % (super(), self._prefixlen)
|
||||||
self._prefixlen)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
address_equal = IPv6Address.__eq__(self, other)
|
address_equal = IPv6Address.__eq__(self, other)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# auto.py is full of patterns mypy doesn't like, so for type checking
|
# auto.py is full of patterns mypy doesn't like, so for type checking
|
||||||
# purposes we replace it with interface.py.
|
# purposes we replace it with interface.py.
|
||||||
|
|
||||||
from .interface import *
|
from .interface import * # pylint: disable=unused-wildcard-import,wildcard-import
|
||||||
|
|
|
@ -726,7 +726,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||||
if cached_md5 == file_md5:
|
if cached_md5 == file_md5:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
log.info(f"found different hash for file {path}, updating...")
|
log.info("found different hash for file %s, updating...", path)
|
||||||
else:
|
else:
|
||||||
cached_file_stat = os.stat(cached_file_path)
|
cached_file_stat = os.stat(cached_file_path)
|
||||||
cached_file_size = cached_file_stat.st_size
|
cached_file_size = cached_file_stat.st_size
|
||||||
|
@ -762,6 +762,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||||
https_enable=https_enable,
|
https_enable=https_enable,
|
||||||
)
|
)
|
||||||
if ret is not None:
|
if ret is not None:
|
||||||
|
s3_file_mtime = s3_file_size = None
|
||||||
for header_name, header_value in ret["headers"].items():
|
for header_name, header_value in ret["headers"].items():
|
||||||
name = header_name.strip()
|
name = header_name.strip()
|
||||||
value = header_value.strip()
|
value = header_value.strip()
|
||||||
|
@ -771,9 +772,8 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||||
)
|
)
|
||||||
elif str(name).lower() == "content-length":
|
elif str(name).lower() == "content-length":
|
||||||
s3_file_size = int(value)
|
s3_file_size = int(value)
|
||||||
if (
|
if (s3_file_size and cached_file_size == s3_file_size) and (
|
||||||
cached_file_size == s3_file_size
|
s3_file_mtime and cached_file_mtime > s3_file_mtime
|
||||||
and cached_file_mtime > s3_file_mtime
|
|
||||||
):
|
):
|
||||||
log.info(
|
log.info(
|
||||||
"%s - %s : %s skipped download since cached file size "
|
"%s - %s : %s skipped download since cached file size "
|
||||||
|
|
|
@ -2910,8 +2910,8 @@ def ip_fqdn():
|
||||||
if not ret["ipv" + ipv_num]:
|
if not ret["ipv" + ipv_num]:
|
||||||
ret[key] = []
|
ret[key] = []
|
||||||
else:
|
else:
|
||||||
|
start_time = datetime.datetime.utcnow()
|
||||||
try:
|
try:
|
||||||
start_time = datetime.datetime.utcnow()
|
|
||||||
info = socket.getaddrinfo(_fqdn, None, socket_type)
|
info = socket.getaddrinfo(_fqdn, None, socket_type)
|
||||||
ret[key] = list({item[4][0] for item in info})
|
ret[key] = list({item[4][0] for item in info})
|
||||||
except (OSError, UnicodeError):
|
except (OSError, UnicodeError):
|
||||||
|
|
|
@ -15,7 +15,7 @@ import time
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
from collections.abc import MutableMapping
|
from collections.abc import MutableMapping
|
||||||
from zipimport import zipimporter
|
from zipimport import zipimporter # pylint: disable=no-name-in-module
|
||||||
|
|
||||||
import salt.config
|
import salt.config
|
||||||
import salt.defaults.events
|
import salt.defaults.events
|
||||||
|
|
|
@ -769,7 +769,9 @@ class Master(SMaster):
|
||||||
mod = ".".join(proc.split(".")[:-1])
|
mod = ".".join(proc.split(".")[:-1])
|
||||||
cls = proc.split(".")[-1]
|
cls = proc.split(".")[-1]
|
||||||
_tmp = __import__(mod, globals(), locals(), [cls], -1)
|
_tmp = __import__(mod, globals(), locals(), [cls], -1)
|
||||||
cls = _tmp.__getattribute__(cls)
|
cls = _tmp.__getattribute__( # pylint: disable=unnecessary-dunder-call
|
||||||
|
cls
|
||||||
|
)
|
||||||
name = "ExtProcess({})".format(cls.__qualname__)
|
name = "ExtProcess({})".format(cls.__qualname__)
|
||||||
self.process_manager.add_process(cls, args=(self.opts,), name=name)
|
self.process_manager.add_process(cls, args=(self.opts,), name=name)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
|
|
|
@ -239,7 +239,7 @@ if not HAS_APT:
|
||||||
opts = _get_opts(self.line)
|
opts = _get_opts(self.line)
|
||||||
self.architectures.extend(opts["arch"]["value"])
|
self.architectures.extend(opts["arch"]["value"])
|
||||||
self.signedby = opts["signedby"]["value"]
|
self.signedby = opts["signedby"]["value"]
|
||||||
for opt in opts.keys():
|
for opt in opts:
|
||||||
opt = opts[opt]["full"]
|
opt = opts[opt]["full"]
|
||||||
if opt:
|
if opt:
|
||||||
try:
|
try:
|
||||||
|
@ -1609,9 +1609,11 @@ def _get_upgradable(dist_upgrade=True, **kwargs):
|
||||||
|
|
||||||
# rexp parses lines that look like the following:
|
# rexp parses lines that look like the following:
|
||||||
# Conf libxfont1 (1:1.4.5-1 Debian:testing [i386])
|
# Conf libxfont1 (1:1.4.5-1 Debian:testing [i386])
|
||||||
rexp = re.compile("(?m)^Conf " "([^ ]+) " r"\(([^ ]+)") # Package name # Version
|
rexp = re.compile(r"(?m)^Conf ([^ ]+) \(([^ ]+)") # Package name # Version
|
||||||
keys = ["name", "version"]
|
keys = ["name", "version"]
|
||||||
_get = lambda l, k: l[keys.index(k)]
|
|
||||||
|
def _get(line, k):
|
||||||
|
return line[keys.index(k)]
|
||||||
|
|
||||||
upgrades = rexp.findall(out)
|
upgrades = rexp.findall(out)
|
||||||
|
|
||||||
|
@ -1685,7 +1687,10 @@ def version_cmp(pkg1, pkg2, ignore_epoch=False, **kwargs):
|
||||||
|
|
||||||
salt '*' pkg.version_cmp '0.2.4-0ubuntu1' '0.2.4.1-0ubuntu1'
|
salt '*' pkg.version_cmp '0.2.4-0ubuntu1' '0.2.4.1-0ubuntu1'
|
||||||
"""
|
"""
|
||||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
|
||||||
|
def normalize(x):
|
||||||
|
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||||
|
|
||||||
# both apt_pkg.version_compare and _cmd_quote need string arguments.
|
# both apt_pkg.version_compare and _cmd_quote need string arguments.
|
||||||
pkg1 = normalize(pkg1)
|
pkg1 = normalize(pkg1)
|
||||||
pkg2 = normalize(pkg2)
|
pkg2 = normalize(pkg2)
|
||||||
|
|
|
@ -380,7 +380,7 @@ def list_(
|
||||||
dirs, files, links = func(name, cached, *args)
|
dirs, files, links = func(name, cached, *args)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
"Failed to list contents of {}: {}".format(name, exc.__str__())
|
"Failed to list contents of {}: {}".format(name, exc)
|
||||||
)
|
)
|
||||||
except CommandExecutionError as exc:
|
except CommandExecutionError as exc:
|
||||||
raise
|
raise
|
||||||
|
@ -395,9 +395,7 @@ def list_(
|
||||||
log.debug("Cleaned cached archive %s", cached)
|
log.debug("Cleaned cached archive %s", cached)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno != errno.ENOENT:
|
if exc.errno != errno.ENOENT:
|
||||||
log.warning(
|
log.warning("Failed to clean cached archive %s: %s", cached, exc)
|
||||||
"Failed to clean cached archive %s: %s", cached, exc.__str__()
|
|
||||||
)
|
|
||||||
|
|
||||||
if strip_components:
|
if strip_components:
|
||||||
for item in (dirs, files, links):
|
for item in (dirs, files, links):
|
||||||
|
@ -796,8 +794,8 @@ def zip_(zip_file, sources, template=None, cwd=None, runas=None, zip64=False):
|
||||||
os.setegid(uinfo["gid"])
|
os.setegid(uinfo["gid"])
|
||||||
os.seteuid(uinfo["uid"])
|
os.seteuid(uinfo["uid"])
|
||||||
|
|
||||||
|
exc = None
|
||||||
try:
|
try:
|
||||||
exc = None
|
|
||||||
archived_files = []
|
archived_files = []
|
||||||
with contextlib.closing(
|
with contextlib.closing(
|
||||||
zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED, zip64)
|
zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED, zip64)
|
||||||
|
@ -1203,7 +1201,7 @@ def is_encrypted(name, clean=False, saltenv="base", source_hash=None, use_etag=F
|
||||||
"{} is not a ZIP file".format(name), info=archive_info
|
"{} is not a ZIP file".format(name), info=archive_info
|
||||||
)
|
)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise CommandExecutionError(exc.__str__(), info=archive_info)
|
raise CommandExecutionError(exc, info=archive_info)
|
||||||
else:
|
else:
|
||||||
ret = False
|
ret = False
|
||||||
|
|
||||||
|
@ -1213,9 +1211,7 @@ def is_encrypted(name, clean=False, saltenv="base", source_hash=None, use_etag=F
|
||||||
log.debug("Cleaned cached archive %s", cached)
|
log.debug("Cleaned cached archive %s", cached)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno != errno.ENOENT:
|
if exc.errno != errno.ENOENT:
|
||||||
log.warning(
|
log.warning("Failed to clean cached archive %s: %s", cached, exc)
|
||||||
"Failed to clean cached archive %s: %s", cached, exc.__str__()
|
|
||||||
)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -946,7 +946,7 @@ def _aws_decode(x):
|
||||||
if "\\" in x:
|
if "\\" in x:
|
||||||
return x.decode("unicode_escape")
|
return x.decode("unicode_escape")
|
||||||
|
|
||||||
if type(x) == bytes:
|
if isinstance(x, bytes):
|
||||||
return x.decode("idna")
|
return x.decode("idna")
|
||||||
|
|
||||||
return x
|
return x
|
||||||
|
|
|
@ -137,7 +137,7 @@ def topic_exists(name, region=None, key=None, keyid=None, profile=None):
|
||||||
salt myminion boto3_sns.topic_exists mytopic region=us-east-1
|
salt myminion boto3_sns.topic_exists mytopic region=us-east-1
|
||||||
"""
|
"""
|
||||||
topics = list_topics(region=region, key=key, keyid=keyid, profile=profile)
|
topics = list_topics(region=region, key=key, keyid=keyid, profile=profile)
|
||||||
return name in list(topics.values() + topics.keys())
|
return name in list(topics.values()) + list(topics)
|
||||||
|
|
||||||
|
|
||||||
def create_topic(Name, region=None, key=None, keyid=None, profile=None):
|
def create_topic(Name, region=None, key=None, keyid=None, profile=None):
|
||||||
|
|
|
@ -223,7 +223,10 @@ def _usage_specific(raw):
|
||||||
"""
|
"""
|
||||||
Parse usage/specific.
|
Parse usage/specific.
|
||||||
"""
|
"""
|
||||||
get_key = lambda val: dict([tuple(val.split(":"))])
|
|
||||||
|
def get_key(val):
|
||||||
|
return dict([tuple(val.split(":"))])
|
||||||
|
|
||||||
raw = raw.split("\n")
|
raw = raw.split("\n")
|
||||||
section, size, used = raw[0].split(" ")
|
section, size, used = raw[0].split(" ")
|
||||||
section = section.replace(",", "_").replace(":", "").lower()
|
section = section.replace(",", "_").replace(":", "").lower()
|
||||||
|
|
|
@ -75,7 +75,7 @@ def _query(
|
||||||
data = None
|
data = None
|
||||||
else:
|
else:
|
||||||
if data is not None:
|
if data is not None:
|
||||||
if type(data) != str:
|
if not isinstance(data, str):
|
||||||
data = salt.utils.json.dumps(data)
|
data = salt.utils.json.dumps(data)
|
||||||
else:
|
else:
|
||||||
data = salt.utils.json.dumps({})
|
data = salt.utils.json.dumps({})
|
||||||
|
|
|
@ -115,7 +115,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
||||||
if os.path.isfile(dest):
|
if os.path.isfile(dest):
|
||||||
return "Path exists and is a file"
|
return "Path exists and is a file"
|
||||||
else:
|
else:
|
||||||
return _error(exc.__str__())
|
return _error(str(exc))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
chunk = base64.b64decode(chunk)
|
chunk = base64.b64decode(chunk)
|
||||||
|
@ -126,12 +126,12 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno != errno.ENOENT:
|
if exc.errno != errno.ENOENT:
|
||||||
# Parent dir does not exist, we need to create it
|
# Parent dir does not exist, we need to create it
|
||||||
return _error(exc.__str__())
|
return _error(str(exc))
|
||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(dest))
|
os.makedirs(os.path.dirname(dest))
|
||||||
except OSError as makedirs_exc:
|
except OSError as makedirs_exc:
|
||||||
# Failed to make directory
|
# Failed to make directory
|
||||||
return _error(makedirs_exc.__str__())
|
return _error(str(makedirs_exc))
|
||||||
fh_ = salt.utils.files.fopen(dest, open_mode) # pylint: disable=W8470
|
fh_ = salt.utils.files.fopen(dest, open_mode) # pylint: disable=W8470
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -139,7 +139,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
||||||
fh_.write(salt.utils.gzip_util.uncompress(chunk) if compressed else chunk)
|
fh_.write(salt.utils.gzip_util.uncompress(chunk) if compressed else chunk)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
# Write failed
|
# Write failed
|
||||||
return _error(exc.__str__())
|
return _error(str(exc))
|
||||||
else:
|
else:
|
||||||
# Write successful
|
# Write successful
|
||||||
if not append and mode is not None:
|
if not append and mode is not None:
|
||||||
|
@ -149,7 +149,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
||||||
try:
|
try:
|
||||||
os.chmod(dest, mode)
|
os.chmod(dest, mode)
|
||||||
except OSError:
|
except OSError:
|
||||||
return _error(exc.__str__())
|
return _error(str(exc))
|
||||||
return True
|
return True
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -263,7 +263,10 @@ def set_crypttab(
|
||||||
criteria = entry.pick(match_on)
|
criteria = entry.pick(match_on)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
filterFn = lambda key: key not in _crypttab_entry.crypttab_keys
|
|
||||||
|
def filterFn(key):
|
||||||
|
return key not in _crypttab_entry.crypttab_keys
|
||||||
|
|
||||||
invalid_keys = filter(filterFn, match_on)
|
invalid_keys = filter(filterFn, match_on)
|
||||||
|
|
||||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||||
|
|
|
@ -29,7 +29,7 @@ def _temp_exists(method, ip):
|
||||||
"""
|
"""
|
||||||
_type = method.replace("temp", "").upper()
|
_type = method.replace("temp", "").upper()
|
||||||
cmd = (
|
cmd = (
|
||||||
"csf -t | awk -v code=1 -v type=_type -v ip=ip '$1==type && $2==ip {{code=0}}"
|
"csf -t | awk -v code=1 -v type={_type} -v ip={ip} '$1==type && $2==ip {{code=0}}"
|
||||||
" END {{exit code}}'".format(_type=_type, ip=ip)
|
" END {{exit code}}'".format(_type=_type, ip=ip)
|
||||||
)
|
)
|
||||||
exists = __salt__["cmd.run_all"](cmd)
|
exists = __salt__["cmd.run_all"](cmd)
|
||||||
|
|
|
@ -157,7 +157,9 @@ def cancel_downtime(api_key=None, app_key=None, scope=None, id=None):
|
||||||
elif scope:
|
elif scope:
|
||||||
params = {"api_key": api_key, "application_key": app_key, "scope": scope}
|
params = {"api_key": api_key, "application_key": app_key, "scope": scope}
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
"https://app.datadoghq.com/api/v1/downtime/cancel/by_scope", params=params
|
"https://app.datadoghq.com/api/v1/downtime/cancel/by_scope",
|
||||||
|
params=params,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
ret["result"] = True
|
ret["result"] = True
|
||||||
|
|
|
@ -314,7 +314,7 @@ def MX(domain, resolve=False, nameserver=None):
|
||||||
stdout = [x.split() for x in cmd["stdout"].split("\n")]
|
stdout = [x.split() for x in cmd["stdout"].split("\n")]
|
||||||
|
|
||||||
if resolve:
|
if resolve:
|
||||||
return [(lambda x: [x[0], A(x[1], nameserver)[0]])(x) for x in stdout]
|
return [[x[0], A(x[1], nameserver)[0]] for x in stdout]
|
||||||
|
|
||||||
return stdout
|
return stdout
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ def _parse_numbers(text):
|
||||||
"Z": "10E21",
|
"Z": "10E21",
|
||||||
"Y": "10E24",
|
"Y": "10E24",
|
||||||
}
|
}
|
||||||
if text[-1] in postPrefixes.keys():
|
if text[-1] in postPrefixes:
|
||||||
v = decimal.Decimal(text[:-1])
|
v = decimal.Decimal(text[:-1])
|
||||||
v = v * decimal.Decimal(postPrefixes[text[-1]])
|
v = v * decimal.Decimal(postPrefixes[text[-1]])
|
||||||
return v
|
return v
|
||||||
|
|
|
@ -399,7 +399,6 @@ def _get_client(timeout=NOTSET, **kwargs):
|
||||||
docker_machine_tls["ClientKeyPath"],
|
docker_machine_tls["ClientKeyPath"],
|
||||||
),
|
),
|
||||||
ca_cert=docker_machine_tls["CaCertPath"],
|
ca_cert=docker_machine_tls["CaCertPath"],
|
||||||
assert_hostname=False,
|
|
||||||
verify=True,
|
verify=True,
|
||||||
)
|
)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
@ -690,9 +689,9 @@ def _client_wrapper(attr, *args, **kwargs):
|
||||||
raise
|
raise
|
||||||
except docker.errors.DockerException as exc:
|
except docker.errors.DockerException as exc:
|
||||||
# More general docker exception (catches InvalidVersion, etc.)
|
# More general docker exception (catches InvalidVersion, etc.)
|
||||||
raise CommandExecutionError(exc.__str__())
|
raise CommandExecutionError(str(exc))
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
err = exc.__str__()
|
err = str(exc)
|
||||||
else:
|
else:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -1333,7 +1332,10 @@ def compare_networks(first, second, ignore="Name,Id,Created,Containers"):
|
||||||
if bool(subval1) is bool(subval2) is False:
|
if bool(subval1) is bool(subval2) is False:
|
||||||
continue
|
continue
|
||||||
elif subkey == "Config":
|
elif subkey == "Config":
|
||||||
kvsort = lambda x: (list(x.keys()), list(x.values()))
|
|
||||||
|
def kvsort(x):
|
||||||
|
return (list(x.keys()), list(x.values()))
|
||||||
|
|
||||||
config1 = sorted(val1["Config"], key=kvsort)
|
config1 = sorted(val1["Config"], key=kvsort)
|
||||||
config2 = sorted(val2.get("Config", []), key=kvsort)
|
config2 = sorted(val2.get("Config", []), key=kvsort)
|
||||||
if config1 != config2:
|
if config1 != config2:
|
||||||
|
@ -3312,7 +3314,7 @@ def create(
|
||||||
except CommandExecutionError as exc:
|
except CommandExecutionError as exc:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
"Failed to start container after creation",
|
"Failed to start container after creation",
|
||||||
info={"response": response, "error": exc.__str__()},
|
info={"response": response, "error": str(exc)},
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
response["Started"] = True
|
response["Started"] = True
|
||||||
|
@ -3502,7 +3504,7 @@ def run_container(
|
||||||
f"Failed to auto_remove container: {rm_exc}"
|
f"Failed to auto_remove container: {rm_exc}"
|
||||||
)
|
)
|
||||||
# Raise original exception with additional info
|
# Raise original exception with additional info
|
||||||
raise CommandExecutionError(exc.__str__(), info=exc_info)
|
raise CommandExecutionError(str(exc), info=exc_info)
|
||||||
|
|
||||||
# Start the container
|
# Start the container
|
||||||
output = []
|
output = []
|
||||||
|
@ -3554,7 +3556,7 @@ def run_container(
|
||||||
# it to other_errors as a fallback.
|
# it to other_errors as a fallback.
|
||||||
exc_info.setdefault("other_errors", []).append(exc.info)
|
exc_info.setdefault("other_errors", []).append(exc.info)
|
||||||
# Re-raise with all of the available additional info
|
# Re-raise with all of the available additional info
|
||||||
raise CommandExecutionError(exc.__str__(), info=exc_info)
|
raise CommandExecutionError(str(exc), info=exc_info)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -4286,7 +4288,7 @@ def dangling(prune=False, force=False):
|
||||||
try:
|
try:
|
||||||
ret.setdefault(image, {})["Removed"] = rmi(image, force=force)
|
ret.setdefault(image, {})["Removed"] = rmi(image, force=force)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
err = exc.__str__()
|
err = str(exc)
|
||||||
log.error(err)
|
log.error(err)
|
||||||
ret.setdefault(image, {})["Comment"] = err
|
ret.setdefault(image, {})["Comment"] = err
|
||||||
ret[image]["Removed"] = False
|
ret[image]["Removed"] = False
|
||||||
|
@ -4606,7 +4608,7 @@ def pull(
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
f"Unable to interpret API event: '{event}'",
|
f"Unable to interpret API event: '{event}'",
|
||||||
info={"Error": exc.__str__()},
|
info={"Error": str(exc)},
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
event_type = next(iter(event))
|
event_type = next(iter(event))
|
||||||
|
@ -4700,7 +4702,7 @@ def push(
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
f"Unable to interpret API event: '{event}'",
|
f"Unable to interpret API event: '{event}'",
|
||||||
info={"Error": exc.__str__()},
|
info={"Error": str(exc)},
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
event_type = next(iter(event))
|
event_type = next(iter(event))
|
||||||
|
@ -5496,7 +5498,7 @@ def disconnect_all_containers_from_network(network_id):
|
||||||
disconnect_container_from_network(cname, network_id)
|
disconnect_container_from_network(cname, network_id)
|
||||||
ret.append(cname)
|
ret.append(cname)
|
||||||
except CommandExecutionError as exc:
|
except CommandExecutionError as exc:
|
||||||
msg = exc.__str__()
|
msg = str(exc)
|
||||||
if "404" not in msg:
|
if "404" not in msg:
|
||||||
# If 404 was in the error, then the container no longer exists,
|
# If 404 was in the error, then the container no longer exists,
|
||||||
# so to avoid a race condition we won't consider 404 errors to
|
# so to avoid a race condition we won't consider 404 errors to
|
||||||
|
|
|
@ -148,9 +148,9 @@ def _process_emerge_err(stdout, stderr):
|
||||||
if slot_conflicts:
|
if slot_conflicts:
|
||||||
ret["slot conflicts"] = slot_conflicts
|
ret["slot conflicts"] = slot_conflicts
|
||||||
|
|
||||||
blocked = re.compile(
|
blocked = re.compile(r"(?m)^\[blocks .+\] ([^ ]+/[^ ]+-[0-9]+[^ ]+).*$").findall(
|
||||||
r"(?m)^\[blocks .+\] " r"([^ ]+/[^ ]+-[0-9]+[^ ]+)" r".*$"
|
stdout
|
||||||
).findall(stdout)
|
)
|
||||||
|
|
||||||
unsatisfied = re.compile(r"Error: The above package list contains").findall(stderr)
|
unsatisfied = re.compile(r"Error: The above package list contains").findall(stderr)
|
||||||
|
|
||||||
|
@ -331,7 +331,9 @@ def _get_upgradable(backtrack=3):
|
||||||
r".*$"
|
r".*$"
|
||||||
)
|
)
|
||||||
keys = ["name", "version"]
|
keys = ["name", "version"]
|
||||||
_get = lambda l, k: l[keys.index(k)]
|
|
||||||
|
def _get(line, k):
|
||||||
|
return line[keys.index(k)]
|
||||||
|
|
||||||
upgrades = rexp.findall(out)
|
upgrades = rexp.findall(out)
|
||||||
|
|
||||||
|
|
|
@ -4027,7 +4027,7 @@ def readlink(path, canonicalize=False):
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno == errno.EINVAL:
|
if exc.errno == errno.EINVAL:
|
||||||
raise CommandExecutionError("Not a symbolic link: {}".format(path))
|
raise CommandExecutionError("Not a symbolic link: {}".format(path))
|
||||||
raise CommandExecutionError(exc.__str__())
|
raise CommandExecutionError(str(exc))
|
||||||
|
|
||||||
|
|
||||||
def readdir(path):
|
def readdir(path):
|
||||||
|
@ -5927,7 +5927,7 @@ def get_diff(
|
||||||
continue
|
continue
|
||||||
paths.append(cached_path)
|
paths.append(cached_path)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
errors.append(salt.utils.stringutils.to_unicode(exc.__str__()))
|
errors.append(salt.utils.stringutils.to_unicode(str(exc)))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
|
|
|
@ -38,7 +38,7 @@ def start(jail=""):
|
||||||
|
|
||||||
salt '*' jail.start [<jail name>]
|
salt '*' jail.start [<jail name>]
|
||||||
"""
|
"""
|
||||||
cmd = "service jail onestart {}".format(jail)
|
cmd = f"service jail onestart {jail}"
|
||||||
return not __salt__["cmd.retcode"](cmd)
|
return not __salt__["cmd.retcode"](cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ def stop(jail=""):
|
||||||
|
|
||||||
salt '*' jail.stop [<jail name>]
|
salt '*' jail.stop [<jail name>]
|
||||||
"""
|
"""
|
||||||
cmd = "service jail onestop {}".format(jail)
|
cmd = f"service jail onestop {jail}"
|
||||||
return not __salt__["cmd.retcode"](cmd)
|
return not __salt__["cmd.retcode"](cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ def restart(jail=""):
|
||||||
|
|
||||||
salt '*' jail.restart [<jail name>]
|
salt '*' jail.restart [<jail name>]
|
||||||
"""
|
"""
|
||||||
cmd = "service jail onerestart {}".format(jail)
|
cmd = f"service jail onerestart {jail}"
|
||||||
return not __salt__["cmd.retcode"](cmd)
|
return not __salt__["cmd.retcode"](cmd)
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,9 +126,7 @@ def show_config(jail):
|
||||||
"""
|
"""
|
||||||
ret = {}
|
ret = {}
|
||||||
if subprocess.call(["jls", "-nq", "-j", jail]) == 0:
|
if subprocess.call(["jls", "-nq", "-j", jail]) == 0:
|
||||||
jls = subprocess.check_output(
|
jls = subprocess.check_output(["jls", "-nq", "-j", jail])
|
||||||
["jls", "-nq", "-j", jail]
|
|
||||||
) # pylint: disable=minimum-python-version
|
|
||||||
jailopts = salt.utils.args.shlex_split(salt.utils.stringutils.to_unicode(jls))
|
jailopts = salt.utils.args.shlex_split(salt.utils.stringutils.to_unicode(jls))
|
||||||
for jailopt in jailopts:
|
for jailopt in jailopts:
|
||||||
if "=" not in jailopt:
|
if "=" not in jailopt:
|
||||||
|
@ -145,7 +143,7 @@ def show_config(jail):
|
||||||
line = salt.utils.stringutils.to_unicode(line)
|
line = salt.utils.stringutils.to_unicode(line)
|
||||||
if not line.strip():
|
if not line.strip():
|
||||||
continue
|
continue
|
||||||
if not line.startswith("jail_{}_".format(jail)):
|
if not line.startswith(f"jail_{jail}_"):
|
||||||
continue
|
continue
|
||||||
key, value = line.split("=")
|
key, value = line.split("=")
|
||||||
ret[key.split("_", 2)[2]] = value.split('"')[1]
|
ret[key.split("_", 2)[2]] = value.split('"')[1]
|
||||||
|
|
|
@ -1067,7 +1067,7 @@ def clone(
|
||||||
url, https_user, https_pass, https_only=True
|
url, https_user, https_pass, https_only=True
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise SaltInvocationError(exc.__str__())
|
raise SaltInvocationError(str(exc))
|
||||||
|
|
||||||
command = ["git"] + _format_git_opts(git_opts)
|
command = ["git"] + _format_git_opts(git_opts)
|
||||||
command.append("clone")
|
command.append("clone")
|
||||||
|
@ -3044,7 +3044,7 @@ def ls_remote(
|
||||||
remote, https_user, https_pass, https_only=True
|
remote, https_user, https_pass, https_only=True
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise SaltInvocationError(exc.__str__())
|
raise SaltInvocationError(str(exc))
|
||||||
command = ["git"] + _format_git_opts(git_opts)
|
command = ["git"] + _format_git_opts(git_opts)
|
||||||
command.append("ls-remote")
|
command.append("ls-remote")
|
||||||
command.extend(_format_opts(opts))
|
command.extend(_format_opts(opts))
|
||||||
|
@ -4051,7 +4051,7 @@ def remote_refs(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise SaltInvocationError(exc.__str__())
|
raise SaltInvocationError(str(exc))
|
||||||
if filter_:
|
if filter_:
|
||||||
command.append(filter_)
|
command.append(filter_)
|
||||||
output = _git_run(
|
output = _git_run(
|
||||||
|
@ -4185,7 +4185,7 @@ def remote_set(
|
||||||
url, https_user, https_pass, https_only=True
|
url, https_user, https_pass, https_only=True
|
||||||
)
|
)
|
||||||
except ValueError as exc:
|
except ValueError as exc:
|
||||||
raise SaltInvocationError(exc.__str__())
|
raise SaltInvocationError(str(exc))
|
||||||
command = ["git", "remote", "add", remote, url]
|
command = ["git", "remote", "add", remote, url]
|
||||||
_git_run(
|
_git_run(
|
||||||
command,
|
command,
|
||||||
|
|
|
@ -126,6 +126,7 @@ def _api_get(path, server=None):
|
||||||
auth=_get_auth(server["user"], server["password"]),
|
auth=_get_auth(server["user"], server["password"]),
|
||||||
headers=_get_headers(),
|
headers=_get_headers(),
|
||||||
verify=True,
|
verify=True,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
return _api_response(response)
|
return _api_response(response)
|
||||||
|
|
||||||
|
@ -141,6 +142,7 @@ def _api_post(path, data, server=None):
|
||||||
headers=_get_headers(),
|
headers=_get_headers(),
|
||||||
data=salt.utils.json.dumps(data),
|
data=salt.utils.json.dumps(data),
|
||||||
verify=True,
|
verify=True,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
return _api_response(response)
|
return _api_response(response)
|
||||||
|
|
||||||
|
@ -156,6 +158,7 @@ def _api_delete(path, data, server=None):
|
||||||
headers=_get_headers(),
|
headers=_get_headers(),
|
||||||
params=data,
|
params=data,
|
||||||
verify=True,
|
verify=True,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
return _api_response(response)
|
return _api_response(response)
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,9 @@ __outputter__ = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# http://stackoverflow.com/a/12414913/127816
|
# http://stackoverflow.com/a/12414913/127816
|
||||||
_infinitedict = lambda: collections.defaultdict(_infinitedict)
|
def _infinitedict():
|
||||||
|
return collections.defaultdict(_infinitedict)
|
||||||
|
|
||||||
|
|
||||||
_non_existent_key = "NonExistentValueMagicNumberSpK3hnufdHfeBUXCfqVK"
|
_non_existent_key = "NonExistentValueMagicNumberSpK3hnufdHfeBUXCfqVK"
|
||||||
|
|
||||||
|
@ -54,21 +56,28 @@ def _serial_sanitizer(instr):
|
||||||
return "{}{}".format(instr[:index], "X" * (length - index))
|
return "{}{}".format(instr[:index], "X" * (length - index))
|
||||||
|
|
||||||
|
|
||||||
_FQDN_SANITIZER = lambda x: "MINION.DOMAINNAME"
|
def _fqdn_sanitizer(x):
|
||||||
_HOSTNAME_SANITIZER = lambda x: "MINION"
|
return "MINION.DOMAINNAME"
|
||||||
_DOMAINNAME_SANITIZER = lambda x: "DOMAINNAME"
|
|
||||||
|
|
||||||
|
def _hostname_sanitizer(x):
|
||||||
|
return "MINION"
|
||||||
|
|
||||||
|
|
||||||
|
def _domainname_sanitizer(x):
|
||||||
|
return "DOMAINNAME"
|
||||||
|
|
||||||
|
|
||||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||||
# is used when the 'sanitize' flag is given.
|
# is used when the 'sanitize' flag is given.
|
||||||
_SANITIZERS = {
|
_SANITIZERS = {
|
||||||
"serialnumber": _serial_sanitizer,
|
"serialnumber": _serial_sanitizer,
|
||||||
"domain": _DOMAINNAME_SANITIZER,
|
"domain": _domainname_sanitizer,
|
||||||
"fqdn": _FQDN_SANITIZER,
|
"fqdn": _fqdn_sanitizer,
|
||||||
"id": _FQDN_SANITIZER,
|
"id": _fqdn_sanitizer,
|
||||||
"host": _HOSTNAME_SANITIZER,
|
"host": _hostname_sanitizer,
|
||||||
"localhost": _HOSTNAME_SANITIZER,
|
"localhost": _hostname_sanitizer,
|
||||||
"nodename": _HOSTNAME_SANITIZER,
|
"nodename": _hostname_sanitizer,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -241,9 +241,15 @@ def _poll_for_events(
|
||||||
"""
|
"""
|
||||||
if action:
|
if action:
|
||||||
stop_status = ("{}_FAILED".format(action), "{}_COMPLETE".format(action))
|
stop_status = ("{}_FAILED".format(action), "{}_COMPLETE".format(action))
|
||||||
stop_check = lambda a: a in stop_status
|
|
||||||
|
def stop_check(a):
|
||||||
|
return a in stop_status
|
||||||
|
|
||||||
else:
|
else:
|
||||||
stop_check = lambda a: a.endswith("_COMPLETE") or a.endswith("_FAILED")
|
|
||||||
|
def stop_check(a):
|
||||||
|
return a.endswith("_COMPLETE") or a.endswith("_FAILED")
|
||||||
|
|
||||||
timeout_sec = timeout * 60
|
timeout_sec = timeout * 60
|
||||||
no_event_polls = 0
|
no_event_polls = 0
|
||||||
msg_template = "\n Stack %(name)s %(status)s \n"
|
msg_template = "\n Stack %(name)s %(status)s \n"
|
||||||
|
|
|
@ -16,15 +16,12 @@
|
||||||
"""
|
"""
|
||||||
:codeauthor: Bo Maryniuk <bo@suse.de>
|
:codeauthor: Bo Maryniuk <bo@suse.de>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
import datetime
|
import datetime
|
||||||
import gzip
|
import gzip
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
|
|
||||||
from salt.utils.odict import OrderedDict
|
from salt.utils.odict import OrderedDict
|
||||||
|
|
||||||
|
@ -182,12 +179,15 @@ class CsvDB:
|
||||||
:param obj:
|
:param obj:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
get_type = lambda item: str(type(item)).split("'")[1]
|
|
||||||
|
def get_type(item):
|
||||||
|
return str(type(item)).split("'")[1]
|
||||||
|
|
||||||
if not os.path.exists(os.path.join(self.db_path, obj._TABLE)):
|
if not os.path.exists(os.path.join(self.db_path, obj._TABLE)):
|
||||||
with gzip.open(os.path.join(self.db_path, obj._TABLE), "wt") as table_file:
|
with gzip.open(os.path.join(self.db_path, obj._TABLE), "wt") as table_file:
|
||||||
csv.writer(table_file).writerow(
|
csv.writer(table_file).writerow(
|
||||||
[
|
[
|
||||||
"{col}:{type}".format(col=elm[0], type=get_type(elm[1]))
|
f"{elm[0]}:{get_type(elm[1])}"
|
||||||
for elm in tuple(obj.__dict__.items())
|
for elm in tuple(obj.__dict__.items())
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -270,7 +270,7 @@ class CsvDB:
|
||||||
def _validate_object(self, obj):
|
def _validate_object(self, obj):
|
||||||
descr = self._tables.get(obj._TABLE)
|
descr = self._tables.get(obj._TABLE)
|
||||||
if descr is None:
|
if descr is None:
|
||||||
raise Exception("Table {} not found.".format(obj._TABLE))
|
raise Exception(f"Table {obj._TABLE} not found.")
|
||||||
return obj._serialize(self._tables[obj._TABLE])
|
return obj._serialize(self._tables[obj._TABLE])
|
||||||
|
|
||||||
def __criteria(self, obj, matches=None, mt=None, lt=None, eq=None):
|
def __criteria(self, obj, matches=None, mt=None, lt=None, eq=None):
|
||||||
|
@ -333,14 +333,10 @@ class CsvDB:
|
||||||
return objects
|
return objects
|
||||||
|
|
||||||
def _to_type(self, data, type):
|
def _to_type(self, data, type):
|
||||||
if type == "int":
|
if type in ("int", "long"):
|
||||||
data = int(data)
|
data = int(data)
|
||||||
elif type == "float":
|
elif type == "float":
|
||||||
data = float(data)
|
data = float(data)
|
||||||
elif type == "long":
|
|
||||||
# pylint: disable=undefined-variable,incompatible-py3-code
|
|
||||||
data = sys.version_info[0] == 2 and long(data) or int(data)
|
|
||||||
# pylint: enable=undefined-variable,incompatible-py3-code
|
|
||||||
else:
|
else:
|
||||||
data = str(data)
|
data = str(data)
|
||||||
return data
|
return data
|
||||||
|
|
|
@ -480,11 +480,13 @@ class Query(EnvLoader):
|
||||||
raise InspectorQueryException(
|
raise InspectorQueryException(
|
||||||
'Unknown "{}" value for parameter "time"'.format(timeformat)
|
'Unknown "{}" value for parameter "time"'.format(timeformat)
|
||||||
)
|
)
|
||||||
tfmt = (
|
|
||||||
lambda param: timeformat == "tz"
|
def tfmt(param):
|
||||||
and time.strftime("%b %d %Y %H:%M:%S", time.gmtime(param))
|
return (
|
||||||
or int(param)
|
timeformat == "tz"
|
||||||
)
|
and time.strftime("%b %d %Y %H:%M:%S", time.gmtime(param))
|
||||||
|
or int(param)
|
||||||
|
)
|
||||||
|
|
||||||
size_fmt = kwargs.get("size")
|
size_fmt = kwargs.get("size")
|
||||||
if size_fmt is not None and size_fmt.lower() not in ["b", "kb", "mb", "gb"]:
|
if size_fmt is not None and size_fmt.lower() not in ["b", "kb", "mb", "gb"]:
|
||||||
|
@ -525,9 +527,9 @@ class Query(EnvLoader):
|
||||||
pld_files.append(pld_data.path)
|
pld_files.append(pld_data.path)
|
||||||
else:
|
else:
|
||||||
pld_files[pld_data.path] = {
|
pld_files[pld_data.path] = {
|
||||||
"uid": self._id_resolv(pld_data.uid, named=(owners == "id")),
|
"uid": self._id_resolv(pld_data.uid, named=owners == "id"),
|
||||||
"gid": self._id_resolv(
|
"gid": self._id_resolv(
|
||||||
pld_data.gid, named=(owners == "id"), uid=False
|
pld_data.gid, named=owners == "id", uid=False
|
||||||
),
|
),
|
||||||
"size": _size_format(pld_data.p_size, fmt=size_fmt),
|
"size": _size_format(pld_data.p_size, fmt=size_fmt),
|
||||||
"mode": oct(pld_data.mode),
|
"mode": oct(pld_data.mode),
|
||||||
|
|
|
@ -25,11 +25,11 @@ master config. The configuration is read using :py:func:`config.get
|
||||||
- "-A FORWARD"
|
- "-A FORWARD"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
import sys
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import salt.utils.args
|
import salt.utils.args
|
||||||
|
@ -73,7 +73,7 @@ def _has_option(option, family="ipv4"):
|
||||||
_has_option('--wait')
|
_has_option('--wait')
|
||||||
_has_option('--check', family='ipv6')
|
_has_option('--check', family='ipv6')
|
||||||
"""
|
"""
|
||||||
cmd = "{} --help".format(_iptables_cmd(family))
|
cmd = f"{_iptables_cmd(family)} --help"
|
||||||
if option in __salt__["cmd.run_stdout"](cmd, output_loglevel="quiet"):
|
if option in __salt__["cmd.run_stdout"](cmd, output_loglevel="quiet"):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@ -192,7 +192,7 @@ def version(family="ipv4"):
|
||||||
IPv6:
|
IPv6:
|
||||||
salt '*' iptables.version family=ipv6
|
salt '*' iptables.version family=ipv6
|
||||||
"""
|
"""
|
||||||
cmd = "{} --version".format(_iptables_cmd(family))
|
cmd = f"{_iptables_cmd(family)} --version"
|
||||||
out = __salt__["cmd.run_stdout"](cmd).split()
|
out = __salt__["cmd.run_stdout"](cmd).split()
|
||||||
return out[1]
|
return out[1]
|
||||||
|
|
||||||
|
@ -204,7 +204,7 @@ def build_rule(
|
||||||
position="",
|
position="",
|
||||||
full=None,
|
full=None,
|
||||||
family="ipv4",
|
family="ipv4",
|
||||||
**kwargs
|
**kwargs,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Build a well-formatted iptables rule based on kwargs. A `table` and `chain`
|
Build a well-formatted iptables rule based on kwargs. A `table` and `chain`
|
||||||
|
@ -316,7 +316,7 @@ def build_rule(
|
||||||
if not isinstance(match_value, list):
|
if not isinstance(match_value, list):
|
||||||
match_value = match_value.split(",")
|
match_value = match_value.split(",")
|
||||||
for match in match_value:
|
for match in match_value:
|
||||||
rule.append("-m {}".format(match))
|
rule.append(f"-m {match}")
|
||||||
if "name_" in kwargs and match.strip() in ("pknock", "quota2", "recent"):
|
if "name_" in kwargs and match.strip() in ("pknock", "quota2", "recent"):
|
||||||
rule.append("--name {}".format(kwargs["name_"]))
|
rule.append("--name {}".format(kwargs["name_"]))
|
||||||
del kwargs["name_"]
|
del kwargs["name_"]
|
||||||
|
@ -335,7 +335,7 @@ def build_rule(
|
||||||
if match_set.startswith("!") or match_set.startswith("not"):
|
if match_set.startswith("!") or match_set.startswith("not"):
|
||||||
negative_match_set = "! "
|
negative_match_set = "! "
|
||||||
match_set = re.sub(bang_not_pat, "", match_set)
|
match_set = re.sub(bang_not_pat, "", match_set)
|
||||||
rule.append("-m set {}--match-set {}".format(negative_match_set, match_set))
|
rule.append(f"-m set {negative_match_set}--match-set {match_set}")
|
||||||
del kwargs["match-set"]
|
del kwargs["match-set"]
|
||||||
|
|
||||||
if "connstate" in kwargs:
|
if "connstate" in kwargs:
|
||||||
|
@ -382,7 +382,7 @@ def build_rule(
|
||||||
else:
|
else:
|
||||||
dports = mp_value
|
dports = mp_value
|
||||||
|
|
||||||
rule.append("--{} {}".format(multiport_arg, dports))
|
rule.append(f"--{multiport_arg} {dports}")
|
||||||
del kwargs[multiport_arg]
|
del kwargs[multiport_arg]
|
||||||
|
|
||||||
if "comment" in kwargs:
|
if "comment" in kwargs:
|
||||||
|
@ -526,11 +526,11 @@ def build_rule(
|
||||||
if after_jump_argument in kwargs:
|
if after_jump_argument in kwargs:
|
||||||
value = kwargs[after_jump_argument]
|
value = kwargs[after_jump_argument]
|
||||||
if value in (None, ""): # options without arguments
|
if value in (None, ""): # options without arguments
|
||||||
after_jump.append("--{}".format(after_jump_argument))
|
after_jump.append(f"--{after_jump_argument}")
|
||||||
elif any(ws_char in str(value) for ws_char in string.whitespace):
|
elif any(ws_char in str(value) for ws_char in string.whitespace):
|
||||||
after_jump.append('--{} "{}"'.format(after_jump_argument, value))
|
after_jump.append(f'--{after_jump_argument} "{value}"')
|
||||||
else:
|
else:
|
||||||
after_jump.append("--{} {}".format(after_jump_argument, value))
|
after_jump.append(f"--{after_jump_argument} {value}")
|
||||||
del kwargs[after_jump_argument]
|
del kwargs[after_jump_argument]
|
||||||
|
|
||||||
for key in kwargs:
|
for key in kwargs:
|
||||||
|
@ -539,8 +539,8 @@ def build_rule(
|
||||||
# the value in the kwargs, thus we need to fetch it after that has run
|
# the value in the kwargs, thus we need to fetch it after that has run
|
||||||
value = kwargs[key]
|
value = kwargs[key]
|
||||||
flag = "-" if len(key) == 1 else "--"
|
flag = "-" if len(key) == 1 else "--"
|
||||||
value = "" if value in (None, "") else " {}".format(value)
|
value = "" if value in (None, "") else f" {value}"
|
||||||
rule.append("{}{}{}{}".format(negation, flag, key, value))
|
rule.append(f"{negation}{flag}{key}{value}")
|
||||||
|
|
||||||
rule += after_jump
|
rule += after_jump
|
||||||
|
|
||||||
|
@ -704,7 +704,7 @@ def save(filename=None, family="ipv4"):
|
||||||
parent_dir = os.path.dirname(filename)
|
parent_dir = os.path.dirname(filename)
|
||||||
if not os.path.isdir(parent_dir):
|
if not os.path.isdir(parent_dir):
|
||||||
os.makedirs(parent_dir)
|
os.makedirs(parent_dir)
|
||||||
cmd = "{}-save".format(_iptables_cmd(family))
|
cmd = f"{_iptables_cmd(family)}-save"
|
||||||
ipt = __salt__["cmd.run_stdout"](cmd)
|
ipt = __salt__["cmd.run_stdout"](cmd)
|
||||||
|
|
||||||
# regex out the output if configured with filters
|
# regex out the output if configured with filters
|
||||||
|
@ -743,26 +743,24 @@ def check(table="filter", chain=None, rule=None, family="ipv4"):
|
||||||
ipt_cmd = _iptables_cmd(family)
|
ipt_cmd = _iptables_cmd(family)
|
||||||
|
|
||||||
if _has_option("--check", family):
|
if _has_option("--check", family):
|
||||||
cmd = "{} -t {} -C {} {}".format(ipt_cmd, table, chain, rule)
|
cmd = f"{ipt_cmd} -t {table} -C {chain} {rule}"
|
||||||
__salt__["cmd.run_stderr"](cmd, output_loglevel="quiet")
|
__salt__["cmd.run_stderr"](cmd, output_loglevel="quiet")
|
||||||
return not __context__["retcode"]
|
return not __context__["retcode"]
|
||||||
else:
|
else:
|
||||||
_chain_name = hex(uuid.getnode())
|
_chain_name = hex(uuid.getnode())
|
||||||
|
|
||||||
# Create temporary table
|
# Create temporary table
|
||||||
__salt__["cmd.run"]("{} -t {} -N {}".format(ipt_cmd, table, _chain_name))
|
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -N {_chain_name}")
|
||||||
__salt__["cmd.run"](
|
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -A {_chain_name} {rule}")
|
||||||
"{} -t {} -A {} {}".format(ipt_cmd, table, _chain_name, rule)
|
|
||||||
)
|
|
||||||
|
|
||||||
out = __salt__["cmd.run_stdout"]("{}-save".format(ipt_cmd))
|
out = __salt__["cmd.run_stdout"](f"{ipt_cmd}-save")
|
||||||
|
|
||||||
# Clean up temporary table
|
# Clean up temporary table
|
||||||
__salt__["cmd.run"]("{} -t {} -F {}".format(ipt_cmd, table, _chain_name))
|
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -F {_chain_name}")
|
||||||
__salt__["cmd.run"]("{} -t {} -X {}".format(ipt_cmd, table, _chain_name))
|
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -X {_chain_name}")
|
||||||
|
|
||||||
for i in out.splitlines():
|
for i in out.splitlines():
|
||||||
if i.startswith("-A {}".format(_chain_name)):
|
if i.startswith(f"-A {_chain_name}"):
|
||||||
if i.replace(_chain_name, chain) in out.splitlines():
|
if i.replace(_chain_name, chain) in out.splitlines():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -792,8 +790,8 @@ def check_chain(table="filter", chain=None, family="ipv4"):
|
||||||
if not chain:
|
if not chain:
|
||||||
return "Error: Chain needs to be specified"
|
return "Error: Chain needs to be specified"
|
||||||
|
|
||||||
cmd = "{}-save -t {}".format(_iptables_cmd(family), table)
|
cmd = f"{_iptables_cmd(family)}-save -t {table}"
|
||||||
out = __salt__["cmd.run_stdout"](cmd).find(":{} ".format(chain))
|
out = __salt__["cmd.run_stdout"](cmd).find(f":{chain} ")
|
||||||
|
|
||||||
if out != -1:
|
if out != -1:
|
||||||
out = True
|
out = True
|
||||||
|
@ -823,7 +821,7 @@ def new_chain(table="filter", chain=None, family="ipv4"):
|
||||||
return "Error: Chain needs to be specified"
|
return "Error: Chain needs to be specified"
|
||||||
|
|
||||||
wait = "--wait" if _has_option("--wait", family) else ""
|
wait = "--wait" if _has_option("--wait", family) else ""
|
||||||
cmd = "{} {} -t {} -N {}".format(_iptables_cmd(family), wait, table, chain)
|
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -N {chain}"
|
||||||
out = __salt__["cmd.run_stderr"](cmd)
|
out = __salt__["cmd.run_stderr"](cmd)
|
||||||
|
|
||||||
if not out:
|
if not out:
|
||||||
|
@ -851,7 +849,7 @@ def delete_chain(table="filter", chain=None, family="ipv4"):
|
||||||
return "Error: Chain needs to be specified"
|
return "Error: Chain needs to be specified"
|
||||||
|
|
||||||
wait = "--wait" if _has_option("--wait", family) else ""
|
wait = "--wait" if _has_option("--wait", family) else ""
|
||||||
cmd = "{} {} -t {} -X {}".format(_iptables_cmd(family), wait, table, chain)
|
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -X {chain}"
|
||||||
out = __salt__["cmd.run_stderr"](cmd)
|
out = __salt__["cmd.run_stderr"](cmd)
|
||||||
|
|
||||||
if not out:
|
if not out:
|
||||||
|
@ -889,7 +887,7 @@ def append(table="filter", chain=None, rule=None, family="ipv4"):
|
||||||
returnCheck = check(table, chain, rule, family)
|
returnCheck = check(table, chain, rule, family)
|
||||||
if isinstance(returnCheck, bool) and returnCheck:
|
if isinstance(returnCheck, bool) and returnCheck:
|
||||||
return False
|
return False
|
||||||
cmd = "{} {} -t {} -A {} {}".format(_iptables_cmd(family), wait, table, chain, rule)
|
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -A {chain} {rule}"
|
||||||
out = __salt__["cmd.run_stderr"](cmd)
|
out = __salt__["cmd.run_stderr"](cmd)
|
||||||
return not out
|
return not out
|
||||||
|
|
||||||
|
@ -977,7 +975,7 @@ def delete(table, chain=None, position=None, rule=None, family="ipv4"):
|
||||||
rule = position
|
rule = position
|
||||||
|
|
||||||
wait = "--wait" if _has_option("--wait", family) else ""
|
wait = "--wait" if _has_option("--wait", family) else ""
|
||||||
cmd = "{} {} -t {} -D {} {}".format(_iptables_cmd(family), wait, table, chain, rule)
|
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -D {chain} {rule}"
|
||||||
out = __salt__["cmd.run_stderr"](cmd)
|
out = __salt__["cmd.run_stderr"](cmd)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
@ -998,7 +996,7 @@ def flush(table="filter", chain="", family="ipv4"):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
wait = "--wait" if _has_option("--wait", family) else ""
|
wait = "--wait" if _has_option("--wait", family) else ""
|
||||||
cmd = "{} {} -t {} -F {}".format(_iptables_cmd(family), wait, table, chain)
|
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -F {chain}"
|
||||||
out = __salt__["cmd.run_stderr"](cmd)
|
out = __salt__["cmd.run_stderr"](cmd)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
@ -1016,7 +1014,7 @@ def _parse_conf(conf_file=None, in_mem=False, family="ipv4"):
|
||||||
with salt.utils.files.fopen(conf_file, "r") as ifile:
|
with salt.utils.files.fopen(conf_file, "r") as ifile:
|
||||||
rules = ifile.read()
|
rules = ifile.read()
|
||||||
elif in_mem:
|
elif in_mem:
|
||||||
cmd = "{}-save".format(_iptables_cmd(family))
|
cmd = f"{_iptables_cmd(family)}-save"
|
||||||
rules = __salt__["cmd.run_stdout"](cmd)
|
rules = __salt__["cmd.run_stdout"](cmd)
|
||||||
else:
|
else:
|
||||||
raise SaltException("A file was not found to parse")
|
raise SaltException("A file was not found to parse")
|
||||||
|
@ -1057,7 +1055,7 @@ def _parse_conf(conf_file=None, in_mem=False, family="ipv4"):
|
||||||
and args[index + 1] != "!"
|
and args[index + 1] != "!"
|
||||||
and not args[index + 1].startswith("-")
|
and not args[index + 1].startswith("-")
|
||||||
):
|
):
|
||||||
args[index] += " {}".format(args.pop(index + 1))
|
args[index] += f" {args.pop(index + 1)}"
|
||||||
index += 1
|
index += 1
|
||||||
if args[-1].startswith("-"):
|
if args[-1].startswith("-"):
|
||||||
args.append("")
|
args.append("")
|
||||||
|
@ -1082,17 +1080,8 @@ def _parser():
|
||||||
iptables(8) and iptables-extensions(8) man pages. They will not all be
|
iptables(8) and iptables-extensions(8) man pages. They will not all be
|
||||||
used by all parts of the module; use them intelligently and appropriately.
|
used by all parts of the module; use them intelligently and appropriately.
|
||||||
"""
|
"""
|
||||||
add_arg = None
|
parser = argparse.ArgumentParser()
|
||||||
if sys.version.startswith("2.6"):
|
add_arg = parser.add_argument
|
||||||
import optparse
|
|
||||||
|
|
||||||
parser = optparse.OptionParser()
|
|
||||||
add_arg = parser.add_option
|
|
||||||
else:
|
|
||||||
import argparse # pylint: disable=minimum-python-version
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
add_arg = parser.add_argument
|
|
||||||
|
|
||||||
# COMMANDS
|
# COMMANDS
|
||||||
add_arg("-A", "--append", dest="append", action="append")
|
add_arg("-A", "--append", dest="append", action="append")
|
||||||
|
|
|
@ -112,7 +112,7 @@ def _valid_iface(iface):
|
||||||
Validate the specified interface
|
Validate the specified interface
|
||||||
"""
|
"""
|
||||||
ifaces = list_interfaces()
|
ifaces = list_interfaces()
|
||||||
if iface in ifaces.keys():
|
if iface in ifaces:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -581,7 +581,7 @@ def _source_encode(source, saltenv):
|
||||||
try:
|
try:
|
||||||
source_url = urllib.parse.urlparse(source)
|
source_url = urllib.parse.urlparse(source)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
return "", {}, "Invalid format for source parameter"
|
return "", {}
|
||||||
|
|
||||||
protos = ("salt", "http", "https", "ftp", "swift", "s3", "file")
|
protos = ("salt", "http", "https", "ftp", "swift", "s3", "file")
|
||||||
|
|
||||||
|
|
|
@ -154,7 +154,7 @@ def _setup_conn_old(**kwargs):
|
||||||
or kubernetes.client.configuration.password != password
|
or kubernetes.client.configuration.password != password
|
||||||
):
|
):
|
||||||
# Recreates API connection if settings are changed
|
# Recreates API connection if settings are changed
|
||||||
kubernetes.client.configuration.__init__()
|
kubernetes.client.configuration.__init__() # pylint: disable=unnecessary-dunder-call
|
||||||
|
|
||||||
kubernetes.client.configuration.host = host
|
kubernetes.client.configuration.host = host
|
||||||
kubernetes.client.configuration.user = username
|
kubernetes.client.configuration.user = username
|
||||||
|
|
|
@ -32,7 +32,9 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
def __virtual__():
|
def __virtual__():
|
||||||
"""Only load this module if the Python ldap module is present"""
|
"""Only load this module if the Python ldap module is present"""
|
||||||
return bool(len(available_backends))
|
if available_backends:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class LDAPError(Exception):
|
class LDAPError(Exception):
|
||||||
|
|
|
@ -176,7 +176,7 @@ def list_ignored():
|
||||||
# "Safari6.1.2MountainLion-6.1.2",
|
# "Safari6.1.2MountainLion-6.1.2",
|
||||||
# or:
|
# or:
|
||||||
# Safari6.1.2MountainLion-6.1.2
|
# Safari6.1.2MountainLion-6.1.2
|
||||||
rexp = re.compile('(?m)^ ["]?' r'([^,|\s].*[^"|\n|,])[,|"]?')
|
rexp = re.compile(r'(?m)^ ["]?([^,|\s].*[^"|\n|,])[,|"]?')
|
||||||
|
|
||||||
return rexp.findall(out)
|
return rexp.findall(out)
|
||||||
|
|
||||||
|
|
|
@ -186,7 +186,7 @@ def update(clear=False, mine_functions=None):
|
||||||
res = salt.utils.functools.call_function(
|
res = salt.utils.functools.call_function(
|
||||||
__salt__[function_name or function_alias],
|
__salt__[function_name or function_alias],
|
||||||
*function_args,
|
*function_args,
|
||||||
**function_kwargs
|
**function_kwargs,
|
||||||
)
|
)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
trace = traceback.format_exc()
|
trace = traceback.format_exc()
|
||||||
|
@ -309,17 +309,8 @@ def get(tgt, fun, tgt_type="glob", exclude_minion=False):
|
||||||
# Load from local minion's cache
|
# Load from local minion's cache
|
||||||
if __opts__["file_client"] == "local":
|
if __opts__["file_client"] == "local":
|
||||||
ret = {}
|
ret = {}
|
||||||
is_target = {
|
|
||||||
"glob": __salt__["match.glob"],
|
is_target = __salt__[f"match.{tgt_type}"](tgt)
|
||||||
"pcre": __salt__["match.pcre"],
|
|
||||||
"list": __salt__["match.list"],
|
|
||||||
"grain": __salt__["match.grain"],
|
|
||||||
"grain_pcre": __salt__["match.grain_pcre"],
|
|
||||||
"ipcidr": __salt__["match.ipcidr"],
|
|
||||||
"compound": __salt__["match.compound"],
|
|
||||||
"pillar": __salt__["match.pillar"],
|
|
||||||
"pillar_pcre": __salt__["match.pillar_pcre"],
|
|
||||||
}[tgt_type](tgt)
|
|
||||||
if not is_target:
|
if not is_target:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
|
@ -866,7 +866,10 @@ def set_fstab(
|
||||||
criteria = entry.pick(match_on)
|
criteria = entry.pick(match_on)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
filterFn = lambda key: key not in _fstab_entry.fstab_keys
|
|
||||||
|
def filterFn(key):
|
||||||
|
return key not in _fstab_entry.fstab_keys
|
||||||
|
|
||||||
invalid_keys = filter(filterFn, match_on)
|
invalid_keys = filter(filterFn, match_on)
|
||||||
|
|
||||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||||
|
@ -996,7 +999,10 @@ def set_vfstab(
|
||||||
criteria = entry.pick(match_on)
|
criteria = entry.pick(match_on)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
filterFn = lambda key: key not in _vfstab_entry.vfstab_keys
|
|
||||||
|
def filterFn(key):
|
||||||
|
return key not in _vfstab_entry.vfstab_keys
|
||||||
|
|
||||||
invalid_keys = filter(filterFn, match_on)
|
invalid_keys = filter(filterFn, match_on)
|
||||||
|
|
||||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||||
|
@ -1878,7 +1884,10 @@ def set_filesystems(
|
||||||
criteria = entry_ip.pick(match_on)
|
criteria = entry_ip.pick(match_on)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
filterFn = lambda key: key not in _FileSystemsEntry.compatibility_keys
|
|
||||||
|
def filterFn(key):
|
||||||
|
return key not in _FileSystemsEntry.compatibility_keys
|
||||||
|
|
||||||
invalid_keys = filter(filterFn, match_on)
|
invalid_keys = filter(filterFn, match_on)
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||||
|
|
|
@ -2394,7 +2394,7 @@ def __grant_generate(
|
||||||
if dbc != "*":
|
if dbc != "*":
|
||||||
# _ and % are authorized on GRANT queries and should get escaped
|
# _ and % are authorized on GRANT queries and should get escaped
|
||||||
# on the db name, but only if not requesting a table level grant
|
# on the db name, but only if not requesting a table level grant
|
||||||
dbc = quote_identifier(dbc, for_grants=(table == "*"))
|
dbc = quote_identifier(dbc, for_grants=table == "*")
|
||||||
if table != "*":
|
if table != "*":
|
||||||
table = quote_identifier(table)
|
table = quote_identifier(table)
|
||||||
# identifiers cannot be used as values, and same thing for grants
|
# identifiers cannot be used as values, and same thing for grants
|
||||||
|
@ -2663,7 +2663,7 @@ def grant_revoke(
|
||||||
if dbc != "*":
|
if dbc != "*":
|
||||||
# _ and % are authorized on GRANT queries and should get escaped
|
# _ and % are authorized on GRANT queries and should get escaped
|
||||||
# on the db name, but only if not requesting a table level grant
|
# on the db name, but only if not requesting a table level grant
|
||||||
s_database = quote_identifier(dbc, for_grants=(table == "*"))
|
s_database = quote_identifier(dbc, for_grants=table == "*")
|
||||||
if dbc == "*":
|
if dbc == "*":
|
||||||
# add revoke for *.*
|
# add revoke for *.*
|
||||||
# before the modification query send to mysql will looks like
|
# before the modification query send to mysql will looks like
|
||||||
|
@ -2764,11 +2764,13 @@ def __do_query_into_hash(conn, sql_str):
|
||||||
|
|
||||||
rtn_results = []
|
rtn_results = []
|
||||||
|
|
||||||
|
cursor = None
|
||||||
try:
|
try:
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
except MySQLdb.MySQLError:
|
except MySQLdb.MySQLError:
|
||||||
log.error("%s: Can't get cursor for SQL->%s", mod, sql_str)
|
log.error("%s: Can't get cursor for SQL->%s", mod, sql_str)
|
||||||
cursor.close()
|
if cursor:
|
||||||
|
cursor.close()
|
||||||
log.debug("%s-->", mod)
|
log.debug("%s-->", mod)
|
||||||
return rtn_results
|
return rtn_results
|
||||||
|
|
||||||
|
|
|
@ -2015,7 +2015,7 @@ def iphexval(ip):
|
||||||
salt '*' network.iphexval 10.0.0.1
|
salt '*' network.iphexval 10.0.0.1
|
||||||
"""
|
"""
|
||||||
a = ip.split(".")
|
a = ip.split(".")
|
||||||
hexval = ["%02X" % int(x) for x in a] # pylint: disable=E1321
|
hexval = ["%02X" % int(x) for x in a]
|
||||||
return "".join(hexval)
|
return "".join(hexval)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,24 +2,12 @@
|
||||||
Module for OpenSCAP Management
|
Module for OpenSCAP Management
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import argparse
|
||||||
|
|
||||||
import shlex
|
import shlex
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from subprocess import PIPE, Popen
|
from subprocess import PIPE, Popen
|
||||||
|
|
||||||
ArgumentParser = object
|
|
||||||
|
|
||||||
try:
|
|
||||||
import argparse # pylint: disable=minimum-python-version
|
|
||||||
|
|
||||||
ArgumentParser = argparse.ArgumentParser
|
|
||||||
HAS_ARGPARSE = True
|
|
||||||
except ImportError: # python 2.6
|
|
||||||
HAS_ARGPARSE = False
|
|
||||||
|
|
||||||
|
|
||||||
_XCCDF_MAP = {
|
_XCCDF_MAP = {
|
||||||
"eval": {
|
"eval": {
|
||||||
"parser_arguments": [(("--profile",), {"required": True})],
|
"parser_arguments": [(("--profile",), {"required": True})],
|
||||||
|
@ -32,15 +20,10 @@ _XCCDF_MAP = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
class _ArgumentParser(argparse.ArgumentParser):
|
||||||
return HAS_ARGPARSE, "argparse module is required."
|
|
||||||
|
|
||||||
|
|
||||||
class _ArgumentParser(ArgumentParser):
|
|
||||||
def __init__(self, action=None, *args, **kwargs):
|
def __init__(self, action=None, *args, **kwargs):
|
||||||
super().__init__(*args, prog="oscap", **kwargs)
|
super().__init__(*args, prog="oscap", **kwargs)
|
||||||
self.add_argument("action", choices=["eval"])
|
self.add_argument("action", choices=["eval"])
|
||||||
add_arg = None
|
|
||||||
for params, kwparams in _XCCDF_MAP["eval"]["parser_arguments"]:
|
for params, kwparams in _XCCDF_MAP["eval"]["parser_arguments"]:
|
||||||
self.add_argument(*params, **kwparams)
|
self.add_argument(*params, **kwparams)
|
||||||
|
|
||||||
|
|
|
@ -1222,7 +1222,10 @@ def version_cmp(
|
||||||
|
|
||||||
salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
|
salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
|
||||||
"""
|
"""
|
||||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
|
||||||
|
def normalize(x):
|
||||||
|
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||||
|
|
||||||
pkg1 = normalize(pkg1)
|
pkg1 = normalize(pkg1)
|
||||||
pkg2 = normalize(pkg2)
|
pkg2 = normalize(pkg2)
|
||||||
|
|
||||||
|
|
|
@ -99,6 +99,7 @@ def post_data(
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": "GenieKey " + api_key,
|
"Authorization": "GenieKey " + api_key,
|
||||||
},
|
},
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
|
@ -108,6 +109,7 @@ def post_data(
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": "GenieKey " + api_key,
|
"Authorization": "GenieKey " + api_key,
|
||||||
},
|
},
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
|
|
||||||
return response.status_code, response.text
|
return response.status_code, response.text
|
||||||
|
|
|
@ -180,9 +180,12 @@ def version(*dbs):
|
||||||
salt '*' oracle.version my_db
|
salt '*' oracle.version my_db
|
||||||
"""
|
"""
|
||||||
pillar_dbs = __salt__["pillar.get"]("oracle:dbs")
|
pillar_dbs = __salt__["pillar.get"]("oracle:dbs")
|
||||||
get_version = lambda x: [
|
|
||||||
r[0] for r in run_query(x, "select banner from v$version order by banner")
|
def get_version(x):
|
||||||
]
|
return [
|
||||||
|
r[0] for r in run_query(x, "select banner from v$version order by banner")
|
||||||
|
]
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
if dbs:
|
if dbs:
|
||||||
log.debug("get db versions for: %s", dbs)
|
log.debug("get db versions for: %s", dbs)
|
||||||
|
|
|
@ -174,6 +174,7 @@ def _query(
|
||||||
params=params,
|
params=params,
|
||||||
data=salt.utils.json.dumps(data),
|
data=salt.utils.json.dumps(data),
|
||||||
verify=verify_ssl,
|
verify=verify_ssl,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
|
|
||||||
if result.text is None or result.text == "":
|
if result.text is None or result.text == "":
|
||||||
|
@ -196,6 +197,7 @@ def _query(
|
||||||
params=params,
|
params=params,
|
||||||
data=data, # Already serialized above, don't do it again
|
data=data, # Already serialized above, don't do it again
|
||||||
verify=verify_ssl,
|
verify=verify_ssl,
|
||||||
|
timeout=120,
|
||||||
).json()
|
).json()
|
||||||
offset = next_page_results["offset"]
|
offset = next_page_results["offset"]
|
||||||
limit = next_page_results["limit"]
|
limit = next_page_results["limit"]
|
||||||
|
|
|
@ -26,7 +26,9 @@ def _repack_pkgs(pkgs, normalize=True):
|
||||||
if normalize and "pkg.normalize_name" in __salt__:
|
if normalize and "pkg.normalize_name" in __salt__:
|
||||||
_normalize_name = __salt__["pkg.normalize_name"]
|
_normalize_name = __salt__["pkg.normalize_name"]
|
||||||
else:
|
else:
|
||||||
_normalize_name = lambda pkgname: pkgname
|
|
||||||
|
def _normalize_name(pkgname):
|
||||||
|
return pkgname
|
||||||
|
|
||||||
repacked_pkgs = {
|
repacked_pkgs = {
|
||||||
_normalize_name(str(x)): str(y) if y is not None else y
|
_normalize_name(str(x)): str(y) if y is not None else y
|
||||||
|
@ -71,7 +73,9 @@ def pack_sources(sources, normalize=True):
|
||||||
if normalize and "pkg.normalize_name" in __salt__:
|
if normalize and "pkg.normalize_name" in __salt__:
|
||||||
_normalize_name = __salt__["pkg.normalize_name"]
|
_normalize_name = __salt__["pkg.normalize_name"]
|
||||||
else:
|
else:
|
||||||
_normalize_name = lambda pkgname: pkgname
|
|
||||||
|
def _normalize_name(pkgname):
|
||||||
|
return pkgname
|
||||||
|
|
||||||
if isinstance(sources, str):
|
if isinstance(sources, str):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -88,7 +88,7 @@ def _get_config_file(conf, atom):
|
||||||
# parts.repo will be empty if there is no repo part
|
# parts.repo will be empty if there is no repo part
|
||||||
relative_path = parts.repo or "gentoo"
|
relative_path = parts.repo or "gentoo"
|
||||||
elif str(parts.cp).endswith("/*"):
|
elif str(parts.cp).endswith("/*"):
|
||||||
relative_path = str(parts.cp).split("/")[0] + "_"
|
relative_path = str(parts.cp).split("/", maxsplit=1)[0] + "_"
|
||||||
else:
|
else:
|
||||||
relative_path = os.path.join(
|
relative_path = os.path.join(
|
||||||
*[x for x in os.path.split(parts.cp) if x != "*"]
|
*[x for x in os.path.split(parts.cp) if x != "*"]
|
||||||
|
|
|
@ -35,10 +35,6 @@ To prevent Postgres commands from running arbitrarily long, a timeout (in second
|
||||||
postgres.bins_dir: '/usr/pgsql-9.5/bin/'
|
postgres.bins_dir: '/usr/pgsql-9.5/bin/'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# This pylint error is popping up where there are no colons?
|
|
||||||
# pylint: disable=E8203
|
|
||||||
|
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
|
@ -1007,7 +1003,8 @@ def user_list(
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# will return empty string if return_password = False
|
# will return empty string if return_password = False
|
||||||
_x = lambda s: s if return_password else ""
|
def _x(s):
|
||||||
|
return s if return_password else ""
|
||||||
|
|
||||||
query = "".join(
|
query = "".join(
|
||||||
[
|
[
|
||||||
|
|
|
@ -105,7 +105,7 @@ def set_(device, **kwargs):
|
||||||
"file-hard-limit": 0,
|
"file-hard-limit": 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
current = None
|
current = ret = None
|
||||||
cmd = "setquota"
|
cmd = "setquota"
|
||||||
if "user" in kwargs:
|
if "user" in kwargs:
|
||||||
cmd += " -u {} ".format(kwargs["user"])
|
cmd += " -u {} ".format(kwargs["user"])
|
||||||
|
|
|
@ -165,7 +165,9 @@ def _output_to_dict(cmdoutput, values_mapper=None):
|
||||||
|
|
||||||
ret = {}
|
ret = {}
|
||||||
if values_mapper is None:
|
if values_mapper is None:
|
||||||
values_mapper = lambda string: string.split("\t")
|
|
||||||
|
def values_mapper(string):
|
||||||
|
return string.split("\t")
|
||||||
|
|
||||||
# remove first and last line: Listing ... - ...done
|
# remove first and last line: Listing ... - ...done
|
||||||
data_rows = _strip_listing_to_done(cmdoutput.splitlines())
|
data_rows = _strip_listing_to_done(cmdoutput.splitlines())
|
||||||
|
@ -237,11 +239,11 @@ def list_users(runas=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
# func to get tags from string such as "[admin, monitoring]"
|
# func to get tags from string such as "[admin, monitoring]"
|
||||||
func = (
|
def func(string):
|
||||||
lambda string: [x.strip() for x in string[1:-1].split(",")]
|
if "," in string:
|
||||||
if "," in string
|
return [x.strip() for x in string[1:-1].split(",")]
|
||||||
else [x for x in string[1:-1].split(" ")]
|
return [x for x in string[1:-1].split(" ")]
|
||||||
)
|
|
||||||
return _output_to_dict(res, func)
|
return _output_to_dict(res, func)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -710,7 +710,10 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
||||||
|
|
||||||
salt '*' pkg.version_cmp '0.2-001' '0.2.0.1-002'
|
salt '*' pkg.version_cmp '0.2-001' '0.2.0.1-002'
|
||||||
"""
|
"""
|
||||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
|
||||||
|
def normalize(x):
|
||||||
|
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||||
|
|
||||||
ver1 = normalize(ver1)
|
ver1 = normalize(ver1)
|
||||||
ver2 = normalize(ver2)
|
ver2 = normalize(ver2)
|
||||||
|
|
||||||
|
|
|
@ -476,7 +476,7 @@ def run_state_tests(state, saltenv=None, check_all=False, only_fails=False):
|
||||||
|
|
||||||
# Check for situations to disable parallization
|
# Check for situations to disable parallization
|
||||||
if parallel:
|
if parallel:
|
||||||
if type(num_proc) == float:
|
if isinstance(num_proc, float):
|
||||||
num_proc = int(num_proc)
|
num_proc = int(num_proc)
|
||||||
|
|
||||||
if multiprocessing.cpu_count() < 2:
|
if multiprocessing.cpu_count() < 2:
|
||||||
|
|
|
@ -375,7 +375,7 @@ def _validate_filetype(filetype):
|
||||||
Checks if the given filetype is a valid SELinux filetype
|
Checks if the given filetype is a valid SELinux filetype
|
||||||
specification. Throws an SaltInvocationError if it isn't.
|
specification. Throws an SaltInvocationError if it isn't.
|
||||||
"""
|
"""
|
||||||
if filetype not in _SELINUX_FILETYPES.keys():
|
if filetype not in _SELINUX_FILETYPES:
|
||||||
raise SaltInvocationError("Invalid filetype given: {}".format(filetype))
|
raise SaltInvocationError("Invalid filetype given: {}".format(filetype))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ def __virtual__():
|
||||||
"""
|
"""
|
||||||
if has_sense_hat:
|
if has_sense_hat:
|
||||||
try:
|
try:
|
||||||
|
global _sensehat
|
||||||
_sensehat = SenseHat()
|
_sensehat = SenseHat()
|
||||||
except OSError:
|
except OSError:
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -89,6 +89,7 @@ def create(name, **params):
|
||||||
"https://api.serverdensity.io/inventory/devices/",
|
"https://api.serverdensity.io/inventory/devices/",
|
||||||
params={"token": get_sd_auth("api_token")},
|
params={"token": get_sd_auth("api_token")},
|
||||||
data=params,
|
data=params,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
log.debug("Server Density API Response: %s", api_response)
|
log.debug("Server Density API Response: %s", api_response)
|
||||||
log.debug("Server Density API Response content: %s", api_response.content)
|
log.debug("Server Density API Response content: %s", api_response.content)
|
||||||
|
@ -120,6 +121,7 @@ def delete(device_id):
|
||||||
api_response = requests.delete(
|
api_response = requests.delete(
|
||||||
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
||||||
params={"token": get_sd_auth("api_token")},
|
params={"token": get_sd_auth("api_token")},
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
log.debug("Server Density API Response: %s", api_response)
|
log.debug("Server Density API Response: %s", api_response)
|
||||||
log.debug("Server Density API Response content: %s", api_response.content)
|
log.debug("Server Density API Response content: %s", api_response.content)
|
||||||
|
@ -171,6 +173,7 @@ def ls(**params):
|
||||||
"token": get_sd_auth("api_token"),
|
"token": get_sd_auth("api_token"),
|
||||||
"filter": salt.utils.json.dumps(params),
|
"filter": salt.utils.json.dumps(params),
|
||||||
},
|
},
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
log.debug("Server Density API Response: %s", api_response)
|
log.debug("Server Density API Response: %s", api_response)
|
||||||
log.debug("Server Density API Response content: %s", api_response.content)
|
log.debug("Server Density API Response content: %s", api_response.content)
|
||||||
|
@ -209,6 +212,7 @@ def update(device_id, **params):
|
||||||
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
||||||
params={"token": get_sd_auth("api_token")},
|
params={"token": get_sd_auth("api_token")},
|
||||||
data=params,
|
data=params,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
log.debug("Server Density API Response: %s", api_response)
|
log.debug("Server Density API Response: %s", api_response)
|
||||||
log.debug("Server Density API Response content: %s", api_response.content)
|
log.debug("Server Density API Response content: %s", api_response.content)
|
||||||
|
|
|
@ -37,13 +37,16 @@ def _exit_status(retcode, stderr=None):
|
||||||
"""
|
"""
|
||||||
Translate exit status of imgadm
|
Translate exit status of imgadm
|
||||||
"""
|
"""
|
||||||
ret = {
|
if retcode == 0:
|
||||||
0: "Successful completion.",
|
return "Successful completion."
|
||||||
1: "An error occurred." if not stderr else stderr,
|
if retcode == 1:
|
||||||
2: "Usage error.",
|
if stderr:
|
||||||
3: "Image not installed.",
|
return stderr
|
||||||
}[retcode]
|
return "An error occurred."
|
||||||
return ret
|
if retcode == 2:
|
||||||
|
return "Usage error."
|
||||||
|
if retcode == 3:
|
||||||
|
return "Image not installed."
|
||||||
|
|
||||||
|
|
||||||
def _parse_image_meta(image=None, detail=False):
|
def _parse_image_meta(image=None, detail=False):
|
||||||
|
|
|
@ -43,10 +43,12 @@ def _exit_status(retcode):
|
||||||
"""
|
"""
|
||||||
Translate exit status of vmadm
|
Translate exit status of vmadm
|
||||||
"""
|
"""
|
||||||
ret = {0: "Successful completion.", 1: "An error occurred.", 2: "Usage error."}[
|
if retcode == 0:
|
||||||
retcode
|
return "Successful completion."
|
||||||
]
|
if retcode == 1:
|
||||||
return ret
|
return "An error occurred."
|
||||||
|
if retcode == 2:
|
||||||
|
return "Usage error."
|
||||||
|
|
||||||
|
|
||||||
def _create_update_from_file(mode="create", uuid=None, path=None):
|
def _create_update_from_file(mode="create", uuid=None, path=None):
|
||||||
|
|
|
@ -114,7 +114,7 @@ def info(name):
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = pwd.getpwnam(name)
|
data = pwd.getpwnam(name) # pylint: disable=used-before-assignment
|
||||||
ret.update({"name": name})
|
ret.update({"name": name})
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return ret
|
return ret
|
||||||
|
|
|
@ -162,7 +162,7 @@ def create(name, profile="splunk", **kwargs):
|
||||||
_req_url = "{}/servicesNS/{}/search/saved/searches/{}/acl".format(
|
_req_url = "{}/servicesNS/{}/search/saved/searches/{}/acl".format(
|
||||||
url, config.get("username"), urllib.parse.quote(name)
|
url, config.get("username"), urllib.parse.quote(name)
|
||||||
)
|
)
|
||||||
requests.post(_req_url, auth=auth, verify=True, data=data)
|
requests.post(_req_url, auth=auth, verify=True, data=data, timeout=120)
|
||||||
return _get_splunk_search_props(search)
|
return _get_splunk_search_props(search)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -186,10 +186,10 @@ def custom():
|
||||||
try:
|
try:
|
||||||
ret[item] = vals[item]
|
ret[item] = vals[item]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"val {item} not in return of {func}")
|
log.warning("val %s not in return of %s", item, func)
|
||||||
ret[item] = "UNKNOWN"
|
ret[item] = "UNKNOWN"
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"custom status {func} isn't loaded")
|
log.warning("custom status %s isn't loaded", func)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -1361,7 +1361,10 @@ def netdev():
|
||||||
"""
|
"""
|
||||||
freebsd specific implementation of netdev
|
freebsd specific implementation of netdev
|
||||||
"""
|
"""
|
||||||
_dict_tree = lambda: collections.defaultdict(_dict_tree)
|
|
||||||
|
def _dict_tree():
|
||||||
|
return collections.defaultdict(_dict_tree)
|
||||||
|
|
||||||
ret = _dict_tree()
|
ret = _dict_tree()
|
||||||
netstat = __salt__["cmd.run"]("netstat -i -n -4 -b -d").splitlines()
|
netstat = __salt__["cmd.run"]("netstat -i -n -4 -b -d").splitlines()
|
||||||
netstat += __salt__["cmd.run"]("netstat -i -n -6 -b -d").splitlines()[1:]
|
netstat += __salt__["cmd.run"]("netstat -i -n -6 -b -d").splitlines()[1:]
|
||||||
|
|
|
@ -110,7 +110,7 @@ def _http_request(url, method="GET", headers=None, data=None):
|
||||||
"""
|
"""
|
||||||
Make the HTTP request and return the body as python object.
|
Make the HTTP request and return the body as python object.
|
||||||
"""
|
"""
|
||||||
req = requests.request(method, url, headers=headers, data=data)
|
req = requests.request(method, url, headers=headers, data=data, timeout=120)
|
||||||
ret = _default_ret()
|
ret = _default_ret()
|
||||||
ok_status = METHOD_OK_STATUS.get(method, 200)
|
ok_status = METHOD_OK_STATUS.get(method, 200)
|
||||||
if req.status_code != ok_status:
|
if req.status_code != ok_status:
|
||||||
|
|
|
@ -374,7 +374,7 @@ def node_ls(server=str):
|
||||||
try:
|
try:
|
||||||
salt_return = {}
|
salt_return = {}
|
||||||
client = docker.APIClient(base_url="unix://var/run/docker.sock")
|
client = docker.APIClient(base_url="unix://var/run/docker.sock")
|
||||||
service = client.nodes(filters=({"name": server}))
|
service = client.nodes(filters={"name": server})
|
||||||
getdata = salt.utils.json.dumps(service)
|
getdata = salt.utils.json.dumps(service)
|
||||||
dump = salt.utils.json.loads(getdata)
|
dump = salt.utils.json.loads(getdata)
|
||||||
for items in dump:
|
for items in dump:
|
||||||
|
|
|
@ -93,14 +93,14 @@ def set_(name, value, **kwargs):
|
||||||
# YES, NO, Yes, No, True, False, etc. to boolean types. However, in this case,
|
# YES, NO, Yes, No, True, False, etc. to boolean types. However, in this case,
|
||||||
# we will check to see if that happened and replace it with "YES" or "NO" because
|
# we will check to see if that happened and replace it with "YES" or "NO" because
|
||||||
# those items are accepted in sysrc.
|
# those items are accepted in sysrc.
|
||||||
if type(value) == bool:
|
if isinstance(value, bool):
|
||||||
if value:
|
if value:
|
||||||
value = "YES"
|
value = "YES"
|
||||||
else:
|
else:
|
||||||
value = "NO"
|
value = "NO"
|
||||||
|
|
||||||
# This is here for the same reason, except for numbers
|
# This is here for the same reason, except for numbers
|
||||||
if type(value) == int:
|
if isinstance(value, int):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
|
|
||||||
cmd += " " + name + '="' + value + '"'
|
cmd += " " + name + '="' + value + '"'
|
||||||
|
|
|
@ -55,9 +55,6 @@ VALID_UNIT_TYPES = (
|
||||||
# Define the module's virtual name
|
# Define the module's virtual name
|
||||||
__virtualname__ = "service"
|
__virtualname__ = "service"
|
||||||
|
|
||||||
# Disable check for string substitution
|
|
||||||
# pylint: disable=E1321
|
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
def __virtual__():
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -113,7 +113,7 @@ def _post_message(message, chat_id, token):
|
||||||
parameters["text"] = message
|
parameters["text"] = message
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(url, data=parameters)
|
response = requests.post(url, data=parameters, timeout=120)
|
||||||
result = response.json()
|
result = response.json()
|
||||||
|
|
||||||
log.debug("Raw response of the telegram request is %s", response)
|
log.debug("Raw response of the telegram request is %s", response)
|
||||||
|
|
|
@ -101,7 +101,7 @@ def _retrieve_channel_id(email, profile="telemetry"):
|
||||||
_get_telemetry_base(profile)
|
_get_telemetry_base(profile)
|
||||||
+ "/notification-channels?_type=EmailNotificationChannel"
|
+ "/notification-channels?_type=EmailNotificationChannel"
|
||||||
)
|
)
|
||||||
response = requests.get(get_url, headers=auth)
|
response = requests.get(get_url, headers=auth, timeout=120)
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
cache_result = {}
|
cache_result = {}
|
||||||
|
@ -140,7 +140,7 @@ def get_alert_config(
|
||||||
get_url = _get_telemetry_base(profile) + "/alerts?deployment={}".format(
|
get_url = _get_telemetry_base(profile) + "/alerts?deployment={}".format(
|
||||||
deployment_id
|
deployment_id
|
||||||
)
|
)
|
||||||
response = requests.get(get_url, headers=auth)
|
response = requests.get(get_url, headers=auth, timeout=120)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
log.error(str(e))
|
log.error(str(e))
|
||||||
return False
|
return False
|
||||||
|
@ -197,7 +197,7 @@ def get_notification_channel_id(notify_channel, profile="telemetry"):
|
||||||
"email": notify_channel,
|
"email": notify_channel,
|
||||||
}
|
}
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
post_url, data=salt.utils.json.dumps(data), headers=auth
|
post_url, data=salt.utils.json.dumps(data), headers=auth, timeout=120
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
log.info(
|
log.info(
|
||||||
|
@ -236,6 +236,7 @@ def get_alarms(deployment_id, profile="telemetry"):
|
||||||
_get_telemetry_base(profile)
|
_get_telemetry_base(profile)
|
||||||
+ "/alerts?deployment={}".format(deployment_id),
|
+ "/alerts?deployment={}".format(deployment_id),
|
||||||
headers=auth,
|
headers=auth,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
log.error(str(e))
|
log.error(str(e))
|
||||||
|
@ -293,7 +294,10 @@ def create_alarm(deployment_id, metric_name, data, api_key=None, profile="teleme
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
request_uri, data=salt.utils.json.dumps(post_body), headers=auth
|
request_uri,
|
||||||
|
data=salt.utils.json.dumps(post_body),
|
||||||
|
headers=auth,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
# TODO: May be we should retry?
|
# TODO: May be we should retry?
|
||||||
|
@ -364,7 +368,10 @@ def update_alarm(deployment_id, metric_name, data, api_key=None, profile="teleme
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.put(
|
response = requests.put(
|
||||||
request_uri, data=salt.utils.json.dumps(post_body), headers=auth
|
request_uri,
|
||||||
|
data=salt.utils.json.dumps(post_body),
|
||||||
|
headers=auth,
|
||||||
|
timeout=120,
|
||||||
)
|
)
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
log.error("Update failed: %s", e)
|
log.error("Update failed: %s", e)
|
||||||
|
@ -429,7 +436,7 @@ def delete_alarms(
|
||||||
delete_url = _get_telemetry_base(profile) + "/alerts/{}".format(id)
|
delete_url = _get_telemetry_base(profile) + "/alerts/{}".format(id)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = requests.delete(delete_url, headers=auth)
|
response = requests.delete(delete_url, headers=auth, timeout=120)
|
||||||
if metric_name:
|
if metric_name:
|
||||||
log.debug(
|
log.debug(
|
||||||
"updating cache and delete %s key from %s",
|
"updating cache and delete %s key from %s",
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue