mirror of
https://github.com/saltstack/salt.git
synced 2025-04-10 06:41:40 +00:00
Update and remove obsolete pylint plugins
This commit is contained in:
parent
f3f54a6eb1
commit
b9be2dec1b
314 changed files with 2209 additions and 3342 deletions
2
.github/workflows/lint-action.yml
vendored
2
.github/workflows/lint-action.yml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
|||
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
|
||||
|
||||
container:
|
||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.8
|
||||
image: ghcr.io/saltstack/salt-ci-containers/python:3.9
|
||||
|
||||
steps:
|
||||
- name: Install System Deps
|
||||
|
|
|
@ -1370,23 +1370,6 @@ repos:
|
|||
# <---- Doc CI Requirements ----------------------------------------------------------------------------------------
|
||||
|
||||
# ----- Lint CI Requirements -------------------------------------------------------------------------------------->
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-lint-3.7-requirements
|
||||
name: Lint CI Py3.7 Requirements
|
||||
files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.7/linux\.txt)))$
|
||||
pass_filenames: false
|
||||
args:
|
||||
- -v
|
||||
- --build-isolation
|
||||
- --py-version=3.7
|
||||
- --platform=linux
|
||||
- --include=requirements/base.txt
|
||||
- --include=requirements/zeromq.txt
|
||||
- --include=requirements/static/pkg/linux.in
|
||||
- --include=requirements/static/ci/linux.in
|
||||
- --include=requirements/static/ci/common.in
|
||||
- --no-emit-index-url
|
||||
- requirements/static/ci/lint.in
|
||||
|
||||
- id: pip-tools-compile
|
||||
alias: compile-ci-lint-3.8-requirements
|
||||
|
@ -1762,7 +1745,7 @@ repos:
|
|||
- types-attrs
|
||||
- types-pyyaml
|
||||
- types-requests
|
||||
- python-tools-scripts>=0.20.0
|
||||
- python-tools-scripts==0.20.0
|
||||
|
||||
- repo: https://github.com/saltstack/mirrors-nox
|
||||
rev: v2021.6.12
|
||||
|
@ -1770,7 +1753,7 @@ repos:
|
|||
- id: nox
|
||||
alias: lint-salt
|
||||
name: Lint Salt
|
||||
files: ^((setup|noxfile)|(salt|tasks|tools)/.*)\.py$
|
||||
files: ^((setup|noxfile)|(salt|tools)/.*)\.py$
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
|
|
97
noxfile.py
97
noxfile.py
|
@ -1300,7 +1300,7 @@ def decompress_dependencies(session):
|
|||
# Let's try to fix shebang's
|
||||
try:
|
||||
fpath = pathlib.Path(path)
|
||||
contents = fpath.read_text().splitlines()
|
||||
contents = fpath.read_text(encoding="utf-8").splitlines()
|
||||
if (
|
||||
contents[0].startswith("#!")
|
||||
and contents[0].endswith("python")
|
||||
|
@ -1310,7 +1310,9 @@ def decompress_dependencies(session):
|
|||
"Fixing broken shebang in %r",
|
||||
str(fpath.relative_to(REPO_ROOT)),
|
||||
)
|
||||
fpath.write_text("\n".join([fixed_shebang] + contents[1:]))
|
||||
fpath.write_text(
|
||||
"\n".join([fixed_shebang] + contents[1:]), encoding="utf-8"
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
|
@ -1467,48 +1469,26 @@ class Tee:
|
|||
return self._first.fileno()
|
||||
|
||||
|
||||
def _lint(
|
||||
session, rcfile, flags, paths, tee_output=True, upgrade_setuptools_and_pip=True
|
||||
):
|
||||
def _lint(session, rcfile, flags, paths, upgrade_setuptools_and_pip=True):
|
||||
if _upgrade_pip_setuptools_and_wheel(session, upgrade=upgrade_setuptools_and_pip):
|
||||
requirements_file = os.path.join(
|
||||
base_requirements_file = os.path.join(
|
||||
"requirements", "static", "ci", _get_pydir(session), "linux.txt"
|
||||
)
|
||||
lint_requirements_file = os.path.join(
|
||||
"requirements", "static", "ci", _get_pydir(session), "lint.txt"
|
||||
)
|
||||
install_command = ["--progress-bar=off", "-r", requirements_file]
|
||||
install_command = [
|
||||
"--progress-bar=off",
|
||||
"-r",
|
||||
base_requirements_file,
|
||||
"-r",
|
||||
lint_requirements_file,
|
||||
]
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
|
||||
if tee_output:
|
||||
session.run("pylint", "--version")
|
||||
pylint_report_path = os.environ.get("PYLINT_REPORT")
|
||||
|
||||
cmd_args = ["pylint", "--rcfile={}".format(rcfile)] + list(flags) + list(paths)
|
||||
|
||||
cmd_kwargs = {"env": {"PYTHONUNBUFFERED": "1"}}
|
||||
|
||||
if tee_output:
|
||||
stdout = tempfile.TemporaryFile(mode="w+b")
|
||||
cmd_kwargs["stdout"] = Tee(stdout, sys.__stdout__)
|
||||
|
||||
lint_failed = False
|
||||
try:
|
||||
session.run(*cmd_args, **cmd_kwargs)
|
||||
except CommandFailed:
|
||||
lint_failed = True
|
||||
raise
|
||||
finally:
|
||||
if tee_output:
|
||||
stdout.seek(0)
|
||||
contents = stdout.read()
|
||||
if contents:
|
||||
contents = contents.decode("utf-8")
|
||||
sys.stdout.write(contents)
|
||||
sys.stdout.flush()
|
||||
if pylint_report_path:
|
||||
# Write report
|
||||
with open(pylint_report_path, "w") as wfh:
|
||||
wfh.write(contents)
|
||||
session.log("Report file written to %r", pylint_report_path)
|
||||
stdout.close()
|
||||
session.run(*cmd_args, **cmd_kwargs)
|
||||
|
||||
|
||||
def _lint_pre_commit(session, rcfile, flags, paths):
|
||||
|
@ -1527,26 +1507,17 @@ def _lint_pre_commit(session, rcfile, flags, paths):
|
|||
from nox.virtualenv import VirtualEnv
|
||||
|
||||
# Let's patch nox to make it run inside the pre-commit virtualenv
|
||||
try:
|
||||
session._runner.venv = VirtualEnv( # pylint: disable=unexpected-keyword-arg
|
||||
os.environ["VIRTUAL_ENV"],
|
||||
interpreter=session._runner.func.python,
|
||||
reuse_existing=True,
|
||||
venv=True,
|
||||
)
|
||||
except TypeError:
|
||||
# This is still nox-py2
|
||||
session._runner.venv = VirtualEnv(
|
||||
os.environ["VIRTUAL_ENV"],
|
||||
interpreter=session._runner.func.python,
|
||||
reuse_existing=True,
|
||||
)
|
||||
session._runner.venv = VirtualEnv(
|
||||
os.environ["VIRTUAL_ENV"],
|
||||
interpreter=session._runner.func.python,
|
||||
reuse_existing=True,
|
||||
venv=True,
|
||||
)
|
||||
_lint(
|
||||
session,
|
||||
rcfile,
|
||||
flags,
|
||||
paths,
|
||||
tee_output=False,
|
||||
upgrade_setuptools_and_pip=False,
|
||||
)
|
||||
|
||||
|
@ -1554,7 +1525,7 @@ def _lint_pre_commit(session, rcfile, flags, paths):
|
|||
@nox.session(python="3")
|
||||
def lint(session):
|
||||
"""
|
||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
||||
Run PyLint against Salt and it's test suite.
|
||||
"""
|
||||
session.notify("lint-salt-{}".format(session.python))
|
||||
session.notify("lint-tests-{}".format(session.python))
|
||||
|
@ -1563,21 +1534,21 @@ def lint(session):
|
|||
@nox.session(python="3", name="lint-salt")
|
||||
def lint_salt(session):
|
||||
"""
|
||||
Run PyLint against Salt. Set PYLINT_REPORT to a path to capture output.
|
||||
Run PyLint against Salt.
|
||||
"""
|
||||
flags = ["--disable=I"]
|
||||
if session.posargs:
|
||||
paths = session.posargs
|
||||
else:
|
||||
# TBD replace paths entries when implement pyproject.toml
|
||||
paths = ["setup.py", "noxfile.py", "salt/"]
|
||||
paths = ["setup.py", "noxfile.py", "salt/", "tools/"]
|
||||
_lint(session, ".pylintrc", flags, paths)
|
||||
|
||||
|
||||
@nox.session(python="3", name="lint-tests")
|
||||
def lint_tests(session):
|
||||
"""
|
||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
||||
Run PyLint against Salt and it's test suite.
|
||||
"""
|
||||
flags = ["--disable=I"]
|
||||
if session.posargs:
|
||||
|
@ -1590,20 +1561,20 @@ def lint_tests(session):
|
|||
@nox.session(python=False, name="lint-salt-pre-commit")
|
||||
def lint_salt_pre_commit(session):
|
||||
"""
|
||||
Run PyLint against Salt. Set PYLINT_REPORT to a path to capture output.
|
||||
Run PyLint against Salt.
|
||||
"""
|
||||
flags = ["--disable=I"]
|
||||
if session.posargs:
|
||||
paths = session.posargs
|
||||
else:
|
||||
paths = ["setup.py", "noxfile.py", "salt/"]
|
||||
paths = ["setup.py", "noxfile.py", "salt/", "tools/"]
|
||||
_lint_pre_commit(session, ".pylintrc", flags, paths)
|
||||
|
||||
|
||||
@nox.session(python=False, name="lint-tests-pre-commit")
|
||||
def lint_tests_pre_commit(session):
|
||||
"""
|
||||
Run PyLint against Salt and it's test suite. Set PYLINT_REPORT to a path to capture output.
|
||||
Run PyLint against Salt and it's test suite.
|
||||
"""
|
||||
flags = ["--disable=I"]
|
||||
if session.posargs:
|
||||
|
@ -1960,8 +1931,8 @@ def ci_test_onedir_pkgs(session):
|
|||
+ cmd_args[:]
|
||||
+ [
|
||||
"--no-install",
|
||||
f"--junitxml=artifacts/xml-unittests-output/test-results-install.xml",
|
||||
f"--log-file=artifacts/logs/runtests-install.log",
|
||||
"--junitxml=artifacts/xml-unittests-output/test-results-install.xml",
|
||||
"--log-file=artifacts/logs/runtests-install.log",
|
||||
]
|
||||
+ session.posargs
|
||||
)
|
||||
|
@ -1978,8 +1949,8 @@ def ci_test_onedir_pkgs(session):
|
|||
+ cmd_args[:]
|
||||
+ [
|
||||
"--no-install",
|
||||
f"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml",
|
||||
f"--log-file=artifacts/logs/runtests-install-rerun.log",
|
||||
"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml",
|
||||
"--log-file=artifacts/logs/runtests-install-rerun.log",
|
||||
"--lf",
|
||||
]
|
||||
+ session.posargs
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
--constraint=./py{py_version}/{platform}.txt
|
||||
|
||||
docker
|
||||
pylint==2.4.4
|
||||
SaltPyLint>=2023.3.8
|
||||
pylint~=3.1.0
|
||||
SaltPyLint>=2024.2.2
|
||||
toml
|
||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
|||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
async-timeout==4.0.2
|
||||
# via
|
||||
|
@ -145,6 +145,8 @@ cryptography==42.0.3
|
|||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
dill==0.3.8
|
||||
# via pylint
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
|
@ -285,8 +287,6 @@ kubernetes==3.0.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
|
@ -320,8 +320,6 @@ mercurial==6.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
|
@ -385,6 +383,7 @@ pathtools==0.1.2
|
|||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# pylint
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
|
@ -405,8 +404,6 @@ pyasn1==0.4.8
|
|||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.21 ; python_version >= "3.9"
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
|
@ -435,7 +432,7 @@ pyjwt==2.4.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
pylint==3.1.0
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
|
@ -571,7 +568,7 @@ s3transfer==0.5.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
saltpylint==2024.2.5
|
||||
# via -r requirements/static/ci/lint.in
|
||||
scp==0.13.2
|
||||
# via
|
||||
|
@ -591,7 +588,6 @@ six==1.16.0
|
|||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
|
@ -646,6 +642,12 @@ toml==0.10.2
|
|||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# pylint
|
||||
tomlkit==0.12.3
|
||||
# via pylint
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
|
@ -658,6 +660,10 @@ twilio==7.9.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
typing-extensions==4.8.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# astroid
|
||||
tzlocal==3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
|
@ -696,8 +702,6 @@ werkzeug==3.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
|||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
attrs==23.1.0
|
||||
# via
|
||||
|
@ -141,6 +141,8 @@ cryptography==42.0.3
|
|||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
dill==0.3.8
|
||||
# via pylint
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
|
@ -276,8 +278,6 @@ kubernetes==3.0.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
|
@ -306,8 +306,6 @@ mercurial==6.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
|
@ -360,6 +358,7 @@ pathtools==0.1.2
|
|||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# pylint
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
|
@ -380,8 +379,6 @@ pyasn1==0.4.8
|
|||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.21 ; python_version >= "3.9"
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
|
@ -410,7 +407,7 @@ pyjwt==2.4.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
pylint==3.1.0
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
|
@ -536,7 +533,7 @@ s3transfer==0.5.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
saltpylint==2024.2.5
|
||||
# via -r requirements/static/ci/lint.in
|
||||
semantic-version==2.9.0
|
||||
# via
|
||||
|
@ -552,7 +549,6 @@ six==1.16.0
|
|||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
|
@ -604,6 +600,8 @@ toml==0.10.2
|
|||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tomlkit==0.12.3
|
||||
# via pylint
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
|
@ -650,8 +648,6 @@ werkzeug==3.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
|
|
|
@ -33,7 +33,7 @@ asn1crypto==1.3.0
|
|||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
attrs==23.1.0
|
||||
# via
|
||||
|
@ -141,6 +141,8 @@ cryptography==42.0.3
|
|||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
dill==0.3.8
|
||||
# via pylint
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
@ -276,8 +278,6 @@ kubernetes==3.0.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
@ -306,8 +306,6 @@ mercurial==6.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
|
@ -360,6 +358,7 @@ pathtools==0.1.2
|
|||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# pylint
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
|
@ -380,8 +379,6 @@ pyasn1==0.4.8
|
|||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.21 ; python_version >= "3.9"
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
|
@ -410,7 +407,7 @@ pyjwt==2.4.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
pylint==3.1.0
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
|
@ -536,7 +533,7 @@ s3transfer==0.5.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
saltpylint==2024.2.5
|
||||
# via -r requirements/static/ci/lint.in
|
||||
semantic-version==2.9.0
|
||||
# via
|
||||
|
@ -552,7 +549,6 @@ six==1.16.0
|
|||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
|
@ -604,6 +600,8 @@ toml==0.10.2
|
|||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tomlkit==0.12.3
|
||||
# via pylint
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
@ -650,8 +648,6 @@ werkzeug==3.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
|
|
@ -1,796 +0,0 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt
|
||||
#
|
||||
aiohttp==3.8.6
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# etcd3-py
|
||||
aiosignal==1.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
apache-libcloud==2.5.0 ; sys_platform != "win32"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
apscheduler==3.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# python-telegram-bot
|
||||
asn1crypto==1.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
# via pylint
|
||||
async-timeout==4.0.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
asynctest==0.13.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
attrs==23.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
# jsonschema
|
||||
backports.entry-points-selectable==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# virtualenv
|
||||
backports.zoneinfo==0.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# tzlocal
|
||||
bcrypt==4.1.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
boto3==1.21.46
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# moto
|
||||
boto==2.49.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
botocore==1.24.46
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# boto3
|
||||
# moto
|
||||
# s3transfer
|
||||
cached-property==1.5.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# pygit2
|
||||
cachetools==4.2.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# google-auth
|
||||
# python-telegram-bot
|
||||
cassandra-driver==3.23.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
certifi==2023.07.22
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# kubernetes
|
||||
# python-telegram-bot
|
||||
# requests
|
||||
certvalidator==0.11.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# vcert
|
||||
cffi==1.14.6
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# cryptography
|
||||
# napalm
|
||||
# pygit2
|
||||
# pynacl
|
||||
charset-normalizer==3.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
# requests
|
||||
cheetah3==3.2.6.post2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
cheroot==8.5.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cherrypy
|
||||
cherrypy==18.6.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/pkg/linux.in
|
||||
ciscoconfparse==1.5.19
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# napalm
|
||||
click==7.1.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# geomet
|
||||
clustershell==1.8.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
colorama==0.4.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# ciscoconfparse
|
||||
contextvars==2.4
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
croniter==0.3.29 ; sys_platform != "win32"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
cryptography==42.0.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
# etcd3-py
|
||||
# moto
|
||||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# virtualenv
|
||||
distro==1.5.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
dnspython==1.16.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# ciscoconfparse
|
||||
# python-etcd
|
||||
docker==6.1.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/lint.in
|
||||
etcd3-py==0.1.6
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
filelock==3.0.12
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# virtualenv
|
||||
frozenlist==1.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
# aiosignal
|
||||
future==0.18.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# napalm
|
||||
# textfsm
|
||||
genshi==0.7.5
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
geomet==0.1.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cassandra-driver
|
||||
gitdb==4.0.7
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# gitpython
|
||||
gitpython==3.1.41
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
google-auth==2.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# kubernetes
|
||||
hglib==2.6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
idna==3.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# etcd3-py
|
||||
# requests
|
||||
# yarl
|
||||
immutables==0.15
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# contextvars
|
||||
importlib-metadata==4.6.4
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
# attrs
|
||||
# backports.entry-points-selectable
|
||||
# jsonschema
|
||||
# mako
|
||||
# moto
|
||||
# virtualenv
|
||||
ipaddress==1.0.22
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# kubernetes
|
||||
isort==4.3.21
|
||||
# via pylint
|
||||
jaraco.classes==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# jaraco.collections
|
||||
jaraco.collections==3.4.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cherrypy
|
||||
jaraco.functools==2.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cheroot
|
||||
# jaraco.text
|
||||
# tempora
|
||||
jaraco.text==3.5.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# jaraco.collections
|
||||
jinja2==3.1.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# junos-eznc
|
||||
# moto
|
||||
# napalm
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# boto3
|
||||
# botocore
|
||||
jsonschema==3.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# napalm
|
||||
jxmlease==1.0.1 ; sys_platform != "win32"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
keyring==5.7.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
kubernetes==3.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
looseversion==1.0.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
lxml==4.9.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
# napalm
|
||||
# ncclient
|
||||
mako==1.2.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
markupsafe==2.1.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# jinja2
|
||||
# mako
|
||||
# moto
|
||||
# werkzeug
|
||||
mccabe==0.6.1
|
||||
# via pylint
|
||||
mercurial==6.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cheroot
|
||||
# cherrypy
|
||||
# jaraco.classes
|
||||
# jaraco.functools
|
||||
moto==3.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
msgpack==1.0.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
multidict==6.0.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
ncclient==0.6.4
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
netaddr==0.7.19
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
# napalm
|
||||
# pyeapi
|
||||
netmiko==3.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# napalm
|
||||
ntc-templates==1.4.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
oscrypto==1.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# certvalidator
|
||||
packaging==22.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# docker
|
||||
paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# junos-eznc
|
||||
# napalm
|
||||
# ncclient
|
||||
# netmiko
|
||||
# scp
|
||||
passlib==1.7.4
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# ciscoconfparse
|
||||
pathspec==0.9.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# yamllint
|
||||
pathtools==0.1.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# watchdog
|
||||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cherrypy
|
||||
psutil==5.8.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
pyasn1-modules==0.2.4
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# google-auth
|
||||
pyasn1==0.4.8
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.17
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cffi
|
||||
pycryptodomex==3.19.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/crypto.txt
|
||||
pyeapi==0.8.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# napalm
|
||||
pygit2==1.10.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
pyiface==0.0.11
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
pyjwt==2.4.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
pymysql==1.0.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
pynacl==1.5.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# paramiko
|
||||
pyopenssl==24.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
# etcd3-py
|
||||
pyparsing==3.0.9
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
pyrsistent==0.17.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# jsonschema
|
||||
pyserial==3.4
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
# netmiko
|
||||
python-consul==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
python-dateutil==2.8.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
# botocore
|
||||
# croniter
|
||||
# kubernetes
|
||||
# moto
|
||||
# vcert
|
||||
python-etcd==0.4.5
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
python-gnupg==0.4.8
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
python-telegram-bot==13.7
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
pytz==2022.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# apscheduler
|
||||
# moto
|
||||
# python-telegram-bot
|
||||
# tempora
|
||||
# twilio
|
||||
pyvmomi==6.7.1.2018.12
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
pyyaml==6.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# clustershell
|
||||
# junos-eznc
|
||||
# kubernetes
|
||||
# napalm
|
||||
# yamllint
|
||||
# yamlordereddictloader
|
||||
pyzmq==23.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/zeromq.txt
|
||||
redis-py-cluster==2.1.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
redis==3.5.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# redis-py-cluster
|
||||
requests==2.31.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/base.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# apache-libcloud
|
||||
# docker
|
||||
# etcd3-py
|
||||
# kubernetes
|
||||
# moto
|
||||
# napalm
|
||||
# python-consul
|
||||
# pyvmomi
|
||||
# responses
|
||||
# twilio
|
||||
# vcert
|
||||
responses==0.10.6
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# moto
|
||||
rfc3987==1.3.8
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
rpm-vercmp==0.1.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
rsa==4.7.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# google-auth
|
||||
s3transfer==0.5.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
# via -r requirements/static/ci/lint.in
|
||||
scp==0.13.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
# napalm
|
||||
# netmiko
|
||||
semantic-version==2.9.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# etcd3-py
|
||||
setproctitle==1.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
six==1.16.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
# genshi
|
||||
# geomet
|
||||
# jsonschema
|
||||
# junos-eznc
|
||||
# kazoo
|
||||
# kubernetes
|
||||
# more-itertools
|
||||
# ncclient
|
||||
# python-consul
|
||||
# python-dateutil
|
||||
# pyvmomi
|
||||
# responses
|
||||
# textfsm
|
||||
# transitions
|
||||
# vcert
|
||||
# virtualenv
|
||||
# websocket-client
|
||||
slack-bolt==1.15.5
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
slack-sdk==3.19.5
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# slack-bolt
|
||||
smmap==4.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# gitdb
|
||||
sqlparse==0.4.4
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
tempora==4.1.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# portend
|
||||
terminal==0.4.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# ntc-templates
|
||||
textfsm==1.1.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# napalm
|
||||
# netmiko
|
||||
# ntc-templates
|
||||
timelib==0.2.5
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
toml==0.10.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# python-telegram-bot
|
||||
transitions==0.8.9
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
twilio==7.9.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
typed-ast==1.4.1
|
||||
# via astroid
|
||||
typing-extensions==3.10.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
# async-timeout
|
||||
# gitpython
|
||||
# importlib-metadata
|
||||
# yarl
|
||||
tzlocal==3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# apscheduler
|
||||
urllib3==1.26.18
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# botocore
|
||||
# docker
|
||||
# kubernetes
|
||||
# python-etcd
|
||||
# requests
|
||||
vcert==0.7.4 ; sys_platform != "win32"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
virtualenv==20.7.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
watchdog==0.10.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
websocket-client==0.40.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# moto
|
||||
yamllint==1.26.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
yamlordereddictloader==0.4.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# junos-eznc
|
||||
yarl==1.7.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# aiohttp
|
||||
zc.lockfile==1.4
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# cherrypy
|
||||
zipp==3.5.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.7/linux.txt
|
||||
# -c requirements/static/ci/py3.7/linux.txt
|
||||
# importlib-metadata
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
|
@ -25,7 +25,7 @@ asn1crypto==1.3.0
|
|||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
async-timeout==4.0.2
|
||||
# via
|
||||
|
@ -149,6 +149,8 @@ cryptography==42.0.3
|
|||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
dill==0.3.8
|
||||
# via pylint
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
|
@ -296,8 +298,6 @@ kubernetes==3.0.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
|
@ -332,8 +332,6 @@ mercurial==6.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
|
@ -413,6 +411,7 @@ pathtools==0.1.2
|
|||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# pylint
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
|
@ -433,8 +432,6 @@ pyasn1==0.4.8
|
|||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.17
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
|
@ -465,7 +462,7 @@ pyjwt==2.4.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
pylint==3.1.0
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
|
@ -599,7 +596,7 @@ s3transfer==0.5.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
saltpylint==2024.2.5
|
||||
# via -r requirements/static/ci/lint.in
|
||||
scp==0.13.2
|
||||
# via
|
||||
|
@ -621,7 +618,6 @@ six==1.16.0
|
|||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
|
@ -687,6 +683,12 @@ toml==0.10.2
|
|||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# pylint
|
||||
tomlkit==0.12.3
|
||||
# via pylint
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
|
@ -699,6 +701,11 @@ twilio==7.9.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
typing-extensions==4.8.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# astroid
|
||||
# pylint
|
||||
tzlocal==3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
|
@ -737,8 +744,6 @@ werkzeug==3.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
|
|
|
@ -25,7 +25,7 @@ asn1crypto==1.3.0
|
|||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# certvalidator
|
||||
# oscrypto
|
||||
astroid==2.3.3
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
async-timeout==4.0.2
|
||||
# via
|
||||
|
@ -145,6 +145,8 @@ cryptography==42.0.3
|
|||
# paramiko
|
||||
# pyopenssl
|
||||
# vcert
|
||||
dill==0.3.8
|
||||
# via pylint
|
||||
distlib==0.3.2
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
|
@ -292,8 +294,6 @@ kubernetes==3.0.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
lazy-object-proxy==1.4.3
|
||||
# via astroid
|
||||
libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin"
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
|
@ -328,8 +328,6 @@ mercurial==6.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
modernize==0.5
|
||||
# via saltpylint
|
||||
more-itertools==5.0.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
|
@ -409,6 +407,7 @@ pathtools==0.1.2
|
|||
platformdirs==2.2.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# pylint
|
||||
# virtualenv
|
||||
portend==2.4
|
||||
# via
|
||||
|
@ -429,8 +428,6 @@ pyasn1==0.4.8
|
|||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
pycodestyle==2.5.0
|
||||
# via saltpylint
|
||||
pycparser==2.21 ; python_version >= "3.9"
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
|
@ -463,7 +460,7 @@ pyjwt==2.4.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# twilio
|
||||
pylint==2.4.4
|
||||
pylint==3.1.0
|
||||
# via
|
||||
# -r requirements/static/ci/lint.in
|
||||
# saltpylint
|
||||
|
@ -597,7 +594,7 @@ s3transfer==0.5.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# boto3
|
||||
saltpylint==2023.8.3
|
||||
saltpylint==2024.2.5
|
||||
# via -r requirements/static/ci/lint.in
|
||||
scp==0.13.2
|
||||
# via
|
||||
|
@ -619,7 +616,6 @@ six==1.16.0
|
|||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# apscheduler
|
||||
# astroid
|
||||
# cassandra-driver
|
||||
# cheroot
|
||||
# etcd3-py
|
||||
|
@ -685,6 +681,12 @@ toml==0.10.2
|
|||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# -r requirements/static/ci/common.in
|
||||
# -r requirements/static/ci/lint.in
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# pylint
|
||||
tomlkit==0.12.3
|
||||
# via pylint
|
||||
tornado==6.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
|
@ -697,6 +699,11 @@ twilio==7.9.2
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# -r requirements/static/ci/linux.in
|
||||
typing-extensions==4.8.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# astroid
|
||||
# pylint
|
||||
tzlocal==3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
|
@ -735,8 +742,6 @@ werkzeug==3.0.1
|
|||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# moto
|
||||
wrapt==1.11.1
|
||||
# via astroid
|
||||
xmltodict==0.12.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
|
|
|
@ -169,7 +169,7 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
|
|||
logging.getLogger(__name__)
|
||||
|
||||
"""
|
||||
instance = super().__new__(cls)
|
||||
instance = super().__new__(cls) # pylint: disable=no-value-for-parameter
|
||||
|
||||
try:
|
||||
max_logger_length = len(
|
||||
|
|
|
@ -84,11 +84,10 @@ def auth(username, password, **kwargs):
|
|||
if cert.verify(cacert.get_pubkey()):
|
||||
log.info("Successfully authenticated certificate: %s", pem)
|
||||
return True
|
||||
else:
|
||||
log.info("Failed to authenticate certificate: %s", pem)
|
||||
return False
|
||||
log.info("Failed to authenticate certificate: %s", pem)
|
||||
return False
|
||||
|
||||
c = OpenSSL.crypto
|
||||
c = OpenSSL.crypto # pylint: disable=used-before-assignment
|
||||
cert = c.load_certificate(c.FILETYPE_PEM, pem)
|
||||
|
||||
with salt.utils.files.fopen(cacert_file) as f:
|
||||
|
@ -101,7 +100,7 @@ def auth(username, password, **kwargs):
|
|||
cert_asn1 = c.dump_certificate(c.FILETYPE_ASN1, cert)
|
||||
|
||||
# Decode the certificate
|
||||
der = asn1.DerSequence()
|
||||
der = asn1.DerSequence() # pylint: disable=used-before-assignment
|
||||
der.decode(cert_asn1)
|
||||
|
||||
# The certificate has three parts:
|
||||
|
|
|
@ -225,7 +225,7 @@ class AsyncReqChannel:
|
|||
if HAS_M2:
|
||||
aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding)
|
||||
else:
|
||||
cipher = PKCS1_OAEP.new(key)
|
||||
cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment
|
||||
aes = cipher.decrypt(ret["key"])
|
||||
|
||||
# Decrypt using the public key.
|
||||
|
|
|
@ -206,7 +206,7 @@ class ReqServerChannel:
|
|||
if HAS_M2:
|
||||
pret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding)
|
||||
else:
|
||||
cipher = PKCS1_OAEP.new(pub)
|
||||
cipher = PKCS1_OAEP.new(pub) # pylint: disable=used-before-assignment
|
||||
pret["key"] = cipher.encrypt(key)
|
||||
if ret is False:
|
||||
ret = {}
|
||||
|
|
|
@ -90,7 +90,10 @@ class Batch:
|
|||
"""
|
||||
Return the active number of minions to maintain
|
||||
"""
|
||||
partition = lambda x: float(x) / 100.0 * len(self.minions)
|
||||
|
||||
def partition(x):
|
||||
return float(x) / 100.0 * len(self.minions)
|
||||
|
||||
try:
|
||||
if isinstance(self.opts["batch"], str) and "%" in self.opts["batch"]:
|
||||
res = partition(float(self.opts["batch"].strip("%")))
|
||||
|
|
|
@ -68,7 +68,7 @@ class SaltCP:
|
|||
except OSError as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
# Path does not exist
|
||||
sys.stderr.write("{} does not exist\n".format(path))
|
||||
sys.stderr.write(f"{path} does not exist\n")
|
||||
sys.exit(42)
|
||||
elif exc.errno in (errno.EINVAL, errno.ENOTDIR):
|
||||
# Path is a file (EINVAL on Windows, ENOTDIR otherwise)
|
||||
|
@ -97,7 +97,7 @@ class SaltCP:
|
|||
Take a path and return the contents of the file as a string
|
||||
"""
|
||||
if not os.path.isfile(fn_):
|
||||
err = "The referenced file, {} is not available.".format(fn_)
|
||||
err = f"The referenced file, {fn_} is not available."
|
||||
sys.stderr.write(err + "\n")
|
||||
sys.exit(42)
|
||||
with salt.utils.files.fopen(fn_, "r") as fp_:
|
||||
|
@ -211,12 +211,10 @@ class SaltCP:
|
|||
log.debug(
|
||||
"Copying %s to %starget '%s' as %s%s",
|
||||
fn_,
|
||||
"{} ".format(selected_target_option)
|
||||
if selected_target_option
|
||||
else "",
|
||||
f"{selected_target_option} " if selected_target_option else "",
|
||||
tgt,
|
||||
remote_path,
|
||||
" (chunk #{})".format(index) if append else "",
|
||||
f" (chunk #{index})" if append else "",
|
||||
)
|
||||
args = [
|
||||
tgt,
|
||||
|
@ -261,11 +259,7 @@ class SaltCP:
|
|||
log.debug(
|
||||
"Creating empty dir %s on %starget '%s'",
|
||||
dirname,
|
||||
"{} ".format(
|
||||
selected_target_option
|
||||
) # pylint: disable=str-format-in-logging
|
||||
if selected_target_option
|
||||
else "",
|
||||
f"{selected_target_option} " if selected_target_option else "",
|
||||
tgt,
|
||||
)
|
||||
args = [tgt, "cp.recv_chunked", [remote_path, None], timeout]
|
||||
|
|
|
@ -8,7 +8,6 @@ The data structure needs to be:
|
|||
'key': '<read in the key file>'}
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
# The components here are simple, and they need to be and stay simple, we
|
||||
|
@ -1296,7 +1295,7 @@ class LocalClient:
|
|||
except KeyError as exc:
|
||||
# This is a safe pass. We're just using the try/except to
|
||||
# avoid having to deep-check for keys.
|
||||
missing_key = exc.__str__().strip("'\"")
|
||||
missing_key = str(exc).strip("'\"")
|
||||
if missing_key == "retcode":
|
||||
log.debug("retcode missing from client return")
|
||||
else:
|
||||
|
|
|
@ -559,7 +559,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
try:
|
||||
retcode = int(retcode)
|
||||
except (TypeError, ValueError):
|
||||
log.warning(f"Got an invalid retcode for host '{host}': '{retcode}'")
|
||||
log.warning("Got an invalid retcode for host '%s': '%s'", host, retcode)
|
||||
retcode = 1
|
||||
# This job is done, yield
|
||||
try:
|
||||
|
@ -573,7 +573,9 @@ class SSH(MultiprocessingStateMixin):
|
|||
retcode = int(remote_retcode)
|
||||
except (TypeError, ValueError):
|
||||
log.warning(
|
||||
f"Host '{host}' reported an invalid retcode: '{remote_retcode}'"
|
||||
"Host '%s' reported an invalid retcode: '%s'",
|
||||
host,
|
||||
remote_retcode,
|
||||
)
|
||||
retcode = max(retcode, 1)
|
||||
except (KeyError, TypeError):
|
||||
|
@ -599,7 +601,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
"""
|
||||
que = multiprocessing.Queue()
|
||||
running = {}
|
||||
target_iter = self.targets.__iter__()
|
||||
target_iter = iter(self.targets)
|
||||
returned = set()
|
||||
rets = set()
|
||||
init = False
|
||||
|
@ -829,7 +831,7 @@ class SSH(MultiprocessingStateMixin):
|
|||
for ret, retcode in self.handle_ssh():
|
||||
host = next(iter(ret))
|
||||
if not isinstance(retcode, int):
|
||||
log.warning(f"Host '{host}' returned an invalid retcode: {retcode}")
|
||||
log.warning("Host '%s' returned an invalid retcode: %s", host, retcode)
|
||||
retcode = 1
|
||||
final_exit = max(final_exit, retcode)
|
||||
|
||||
|
@ -1784,7 +1786,7 @@ def ssh_version():
|
|||
["ssh", "-V"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
).communicate()
|
||||
try:
|
||||
version_parts = ret[1].split(b",")[0].split(b"_")[1]
|
||||
version_parts = ret[1].split(b",", maxsplit=1)[0].split(b"_")[1]
|
||||
parts = []
|
||||
for part in version_parts:
|
||||
try:
|
||||
|
|
|
@ -41,7 +41,6 @@ ARGS = None
|
|||
# The below line is where OPTIONS can be redefined with internal options
|
||||
# (rather than cli arguments) when the shim is bundled by
|
||||
# client.ssh.Single._cmd_str()
|
||||
# pylint: disable=block-comment-should-start-with-cardinal-space
|
||||
#%%OPTS
|
||||
|
||||
|
||||
|
@ -230,7 +229,9 @@ def get_executable():
|
|||
Find executable which matches supported python version in the thin
|
||||
"""
|
||||
pymap = {}
|
||||
with open(os.path.join(OPTIONS.saltdir, "supported-versions")) as _fp:
|
||||
with open(
|
||||
os.path.join(OPTIONS.saltdir, "supported-versions"), encoding="utf-8"
|
||||
) as _fp:
|
||||
for line in _fp.readlines():
|
||||
ns, v_maj, v_min = line.strip().split(":")
|
||||
pymap[ns] = (int(v_maj), int(v_min))
|
||||
|
@ -314,7 +315,7 @@ def main(argv): # pylint: disable=W0613
|
|||
)
|
||||
)
|
||||
need_deployment()
|
||||
with open(code_checksum_path, "r") as vpo:
|
||||
with open(code_checksum_path, "r", encoding="utf-8") as vpo:
|
||||
cur_code_cs = vpo.readline().strip()
|
||||
if cur_code_cs != OPTIONS.code_checksum:
|
||||
sys.stderr.write(
|
||||
|
@ -330,7 +331,7 @@ def main(argv): # pylint: disable=W0613
|
|||
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path))
|
||||
need_deployment()
|
||||
|
||||
with open(os.path.join(OPTIONS.saltdir, "minion"), "w") as config:
|
||||
with open(os.path.join(OPTIONS.saltdir, "minion"), "w", encoding="utf-8") as config:
|
||||
config.write(OPTIONS.config + "\n")
|
||||
if OPTIONS.ext_mods:
|
||||
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
|
||||
|
@ -340,7 +341,7 @@ def main(argv): # pylint: disable=W0613
|
|||
version_path = os.path.join(OPTIONS.saltdir, "ext_version")
|
||||
if not os.path.exists(version_path) or not os.path.isfile(version_path):
|
||||
need_ext()
|
||||
with open(version_path, "r") as vpo:
|
||||
with open(version_path, "r", encoding="utf-8") as vpo:
|
||||
cur_version = vpo.readline().strip()
|
||||
if cur_version != OPTIONS.ext_mods:
|
||||
need_ext()
|
||||
|
|
|
@ -23,21 +23,28 @@ def _serial_sanitizer(instr):
|
|||
return "{}{}".format(instr[:index], "X" * (length - index))
|
||||
|
||||
|
||||
_FQDN_SANITIZER = lambda x: "MINION.DOMAINNAME"
|
||||
_HOSTNAME_SANITIZER = lambda x: "MINION"
|
||||
_DOMAINNAME_SANITIZER = lambda x: "DOMAINNAME"
|
||||
def _fqdn_sanitizer(x):
|
||||
return "MINION.DOMAINNAME"
|
||||
|
||||
|
||||
def _hostname_sanitizer(x):
|
||||
return "MINION"
|
||||
|
||||
|
||||
def _domainname_sanitizer(x):
|
||||
return "DOMAINNAME"
|
||||
|
||||
|
||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||
# is used when the 'sanitize' flag is given.
|
||||
_SANITIZERS = {
|
||||
"serialnumber": _serial_sanitizer,
|
||||
"domain": _DOMAINNAME_SANITIZER,
|
||||
"fqdn": _FQDN_SANITIZER,
|
||||
"id": _FQDN_SANITIZER,
|
||||
"host": _HOSTNAME_SANITIZER,
|
||||
"localhost": _HOSTNAME_SANITIZER,
|
||||
"nodename": _HOSTNAME_SANITIZER,
|
||||
"domain": _domainname_sanitizer,
|
||||
"fqdn": _fqdn_sanitizer,
|
||||
"id": _fqdn_sanitizer,
|
||||
"host": _hostname_sanitizer,
|
||||
"localhost": _hostname_sanitizer,
|
||||
"nodename": _hostname_sanitizer,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,9 @@ try:
|
|||
from collections.abc import Mapping
|
||||
except ImportError:
|
||||
# We still allow Py2 import because this could be executed in a machine with Py2.
|
||||
from collections import Mapping # pylint: disable=no-name-in-module
|
||||
from collections import ( # pylint: disable=no-name-in-module,deprecated-class
|
||||
Mapping,
|
||||
)
|
||||
|
||||
|
||||
def get(key, default="", merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
||||
|
|
|
@ -174,11 +174,11 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
msg = "The following virtual machines are set to be destroyed:\n"
|
||||
names = set()
|
||||
for alias, drivers in matching.items():
|
||||
msg += " {}:\n".format(alias)
|
||||
msg += f" {alias}:\n"
|
||||
for driver, vms in drivers.items():
|
||||
msg += " {}:\n".format(driver)
|
||||
msg += f" {driver}:\n"
|
||||
for name in vms:
|
||||
msg += " {}\n".format(name)
|
||||
msg += f" {name}\n"
|
||||
names.add(name)
|
||||
try:
|
||||
if self.print_confirm(msg):
|
||||
|
@ -212,7 +212,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
key, value = name.split("=", 1)
|
||||
kwargs[key] = value
|
||||
else:
|
||||
msg += " {}\n".format(name)
|
||||
msg += f" {name}\n"
|
||||
machines.append(name)
|
||||
names = machines
|
||||
|
||||
|
@ -255,7 +255,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
|
||||
elif self.options.set_password:
|
||||
username = self.credential_username
|
||||
provider_name = "salt.cloud.provider.{}".format(self.credential_provider)
|
||||
provider_name = f"salt.cloud.provider.{self.credential_provider}"
|
||||
# TODO: check if provider is configured
|
||||
# set the password
|
||||
salt.utils.cloud.store_password_in_keyring(provider_name, username)
|
||||
|
@ -275,7 +275,7 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
# display profile errors
|
||||
msg += "Found the following errors:\n"
|
||||
for profile_name, error in dmap["errors"].items():
|
||||
msg += " {}: {}\n".format(profile_name, error)
|
||||
msg += f" {profile_name}: {error}\n"
|
||||
sys.stderr.write(msg)
|
||||
sys.stderr.flush()
|
||||
|
||||
|
@ -283,17 +283,17 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
if "existing" in dmap:
|
||||
msg += "The following virtual machines already exist:\n"
|
||||
for name in dmap["existing"]:
|
||||
msg += " {}\n".format(name)
|
||||
msg += f" {name}\n"
|
||||
|
||||
if dmap["create"]:
|
||||
msg += "The following virtual machines are set to be created:\n"
|
||||
for name in dmap["create"]:
|
||||
msg += " {}\n".format(name)
|
||||
msg += f" {name}\n"
|
||||
|
||||
if "destroy" in dmap:
|
||||
msg += "The following virtual machines are set to be destroyed:\n"
|
||||
for name in dmap["destroy"]:
|
||||
msg += " {}\n".format(name)
|
||||
msg += f" {name}\n"
|
||||
|
||||
if not dmap["create"] and not dmap.get("destroy", None):
|
||||
if not dmap.get("existing", None):
|
||||
|
@ -382,19 +382,17 @@ class SaltCloud(salt.utils.parsers.SaltCloudParser):
|
|||
# This is a salt cloud system exit
|
||||
if exc.exit_code > 0:
|
||||
# the exit code is bigger than 0, it's an error
|
||||
msg = "Error: {}".format(msg)
|
||||
msg = f"Error: {msg}"
|
||||
self.exit(exc.exit_code, msg.format(exc).rstrip() + "\n")
|
||||
# It's not a system exit but it's an error we can
|
||||
# handle
|
||||
self.error(msg.format(exc))
|
||||
# This is a generic exception, log it, include traceback if
|
||||
# debug logging is enabled and exit.
|
||||
# pylint: disable=str-format-in-logging
|
||||
log.error(
|
||||
msg.format(exc),
|
||||
# Show the traceback if the debug logging level is
|
||||
# enabled
|
||||
exc_info_on_loglevel=logging.DEBUG,
|
||||
)
|
||||
# pylint: enable=str-format-in-logging
|
||||
self.exit(salt.defaults.exitcodes.EX_GENERIC)
|
||||
|
|
|
@ -798,7 +798,7 @@ def query(params=None):
|
|||
signature = _compute_signature(parameters, access_key_secret)
|
||||
parameters["Signature"] = signature
|
||||
|
||||
request = requests.get(path, params=parameters, verify=True)
|
||||
request = requests.get(path, params=parameters, verify=True, timeout=120)
|
||||
if request.status_code != 200:
|
||||
raise SaltCloudSystemExit(
|
||||
"An error occurred while querying aliyun ECS. HTTP Code: {} "
|
||||
|
|
|
@ -308,7 +308,7 @@ def get_build_status(req_id, nodename):
|
|||
counter = 0
|
||||
req_id = str(req_id)
|
||||
while counter < 10:
|
||||
queue = clc.v1.Blueprint.GetStatus(request_id=(req_id))
|
||||
queue = clc.v1.Blueprint.GetStatus(request_id=req_id)
|
||||
if queue["PercentComplete"] == 100:
|
||||
server_name = queue["Servers"][0]
|
||||
creds = get_creds()
|
||||
|
|
|
@ -474,9 +474,14 @@ def create(vm_):
|
|||
dns_hostname,
|
||||
dns_domain,
|
||||
)
|
||||
__add_dns_addr__ = lambda t, d: post_dns_record(
|
||||
dns_domain=dns_domain, name=dns_hostname, record_type=t, record_data=d
|
||||
)
|
||||
|
||||
def __add_dns_addr__(t, d):
|
||||
return post_dns_record(
|
||||
dns_domain=dns_domain,
|
||||
name=dns_hostname,
|
||||
record_type=t,
|
||||
record_data=d,
|
||||
)
|
||||
|
||||
log.debug("create_dns_record: %s", __add_dns_addr__)
|
||||
else:
|
||||
|
@ -639,6 +644,7 @@ def query(
|
|||
"Authorization": "Bearer " + personal_access_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
if request.status_code > 299:
|
||||
raise SaltCloudSystemExit(
|
||||
|
|
|
@ -403,7 +403,7 @@ def query(
|
|||
log.trace("EC2 Request Parameters: %s", params_with_headers)
|
||||
try:
|
||||
result = requests.get(
|
||||
requesturl, headers=headers, params=params_with_headers
|
||||
requesturl, headers=headers, params=params_with_headers, timeout=120
|
||||
)
|
||||
log.debug(
|
||||
"EC2 Response Status Code: %s",
|
||||
|
@ -1198,9 +1198,9 @@ def get_imageid(vm_):
|
|||
"Filter.0.Value.0": image,
|
||||
}
|
||||
# Query AWS, sort by 'creationDate' and get the last imageId
|
||||
_t = lambda x: datetime.datetime.strptime(
|
||||
x["creationDate"], "%Y-%m-%dT%H:%M:%S.%fZ"
|
||||
)
|
||||
def _t(x):
|
||||
return datetime.datetime.strptime(x["creationDate"], "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
|
||||
image_id = sorted(
|
||||
aws.query(
|
||||
params,
|
||||
|
|
|
@ -1168,8 +1168,8 @@ def query(action=None, command=None, args=None, method="GET", location=None, dat
|
|||
digest = md.final()
|
||||
signed = rsa_key.sign(digest, algo="sha256")
|
||||
else:
|
||||
rsa_ = PKCS1_v1_5.new(rsa_key)
|
||||
hash_ = SHA256.new()
|
||||
rsa_ = PKCS1_v1_5.new(rsa_key) # pylint: disable=used-before-assignment
|
||||
hash_ = SHA256.new() # pylint: disable=used-before-assignment
|
||||
hash_.update(timestamp.encode(__salt_system_encoding__))
|
||||
signed = rsa_.sign(hash_)
|
||||
signed = base64.b64encode(signed)
|
||||
|
|
|
@ -530,7 +530,9 @@ class LinodeAPIv4(LinodeAPI):
|
|||
attempt = 0
|
||||
while True:
|
||||
try:
|
||||
result = requests.request(method, url, json=data, headers=headers)
|
||||
result = requests.request(
|
||||
method, url, json=data, headers=headers, timeout=120
|
||||
)
|
||||
|
||||
log.debug("Linode API response status code: %d", result.status_code)
|
||||
log.trace("Linode API response body: %s", result.text)
|
||||
|
@ -1092,7 +1094,9 @@ class LinodeAPIv4(LinodeAPI):
|
|||
"entity.type": entity,
|
||||
}
|
||||
last_event = None
|
||||
condition = lambda event: self._check_event_status(event, status)
|
||||
|
||||
def condition(event):
|
||||
return self._check_event_status(event, status)
|
||||
|
||||
while True:
|
||||
if last_event is not None:
|
||||
|
@ -1965,8 +1969,8 @@ class LinodeAPIv3(LinodeAPI):
|
|||
|
||||
for key, val in ips.items():
|
||||
if key == linode_id:
|
||||
this_node["private_ips"] = val["private_ips"]
|
||||
this_node["public_ips"] = val["public_ips"]
|
||||
this_node["private_ips"] = val[1]
|
||||
this_node["public_ips"] = val[0]
|
||||
|
||||
if full:
|
||||
this_node["extra"] = node
|
||||
|
|
|
@ -137,7 +137,9 @@ def _authenticate():
|
|||
connect_data = {"username": username, "password": passwd}
|
||||
full_url = "https://{}:{}/api2/json/access/ticket".format(url, port)
|
||||
|
||||
response = requests.post(full_url, verify=verify_ssl, data=connect_data)
|
||||
response = requests.post(
|
||||
full_url, verify=verify_ssl, data=connect_data, timeout=120
|
||||
)
|
||||
response.raise_for_status()
|
||||
returned_data = response.json()
|
||||
|
||||
|
@ -171,6 +173,7 @@ def query(conn_type, option, post_data=None):
|
|||
data=post_data,
|
||||
cookies=ticket,
|
||||
headers=httpheaders,
|
||||
timeout=120,
|
||||
)
|
||||
elif conn_type == "put":
|
||||
httpheaders["CSRFPreventionToken"] = csrf
|
||||
|
@ -180,6 +183,7 @@ def query(conn_type, option, post_data=None):
|
|||
data=post_data,
|
||||
cookies=ticket,
|
||||
headers=httpheaders,
|
||||
timeout=120,
|
||||
)
|
||||
elif conn_type == "delete":
|
||||
httpheaders["CSRFPreventionToken"] = csrf
|
||||
|
@ -189,9 +193,12 @@ def query(conn_type, option, post_data=None):
|
|||
data=post_data,
|
||||
cookies=ticket,
|
||||
headers=httpheaders,
|
||||
timeout=120,
|
||||
)
|
||||
elif conn_type == "get":
|
||||
response = requests.get(full_url, verify=verify_ssl, cookies=ticket)
|
||||
response = requests.get(
|
||||
full_url, verify=verify_ssl, cookies=ticket, timeout=120
|
||||
)
|
||||
|
||||
try:
|
||||
response.raise_for_status()
|
||||
|
@ -862,7 +869,7 @@ def _import_api():
|
|||
"""
|
||||
global api
|
||||
full_url = "https://{}:{}/pve-docs/api-viewer/apidoc.js".format(url, port)
|
||||
returned_data = requests.get(full_url, verify=verify_ssl)
|
||||
returned_data = requests.get(full_url, verify=verify_ssl, timeout=120)
|
||||
|
||||
re_filter = re.compile(" (?:pveapi|apiSchema) = (.*)^;", re.DOTALL | re.MULTILINE)
|
||||
api_json = re_filter.findall(returned_data.text)[0]
|
||||
|
|
|
@ -179,7 +179,7 @@ def query(params=None):
|
|||
# print('parameters:')
|
||||
# pprint.pprint(real_parameters)
|
||||
|
||||
request = requests.get(path, params=real_parameters, verify=verify_ssl)
|
||||
request = requests.get(path, params=real_parameters, verify=verify_ssl, timeout=120)
|
||||
|
||||
# print('url:')
|
||||
# print(request.url)
|
||||
|
@ -439,7 +439,7 @@ def _get_size(vm_):
|
|||
if not vm_size:
|
||||
raise SaltCloudNotFound("No size specified for this instance.")
|
||||
|
||||
if vm_size in sizes.keys():
|
||||
if vm_size in sizes:
|
||||
return vm_size
|
||||
|
||||
raise SaltCloudNotFound(
|
||||
|
|
|
@ -1628,7 +1628,7 @@ def _get_snapshots(snapshot_list, current_snapshot=None, parent_snapshot_path=""
|
|||
snapshots[snapshot_path] = {
|
||||
"name": snapshot.name,
|
||||
"description": snapshot.description,
|
||||
"created": str(snapshot.createTime).split(".")[0],
|
||||
"created": str(snapshot.createTime).split(".", maxsplit=1)[0],
|
||||
"state": snapshot.state,
|
||||
"path": snapshot_path,
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ class MinionDefaultInclude(DefaultIncludeConfig):
|
|||
|
||||
class MasterDefaultInclude(DefaultIncludeConfig):
|
||||
__target__ = "master"
|
||||
__confd_directory = "master.d"
|
||||
__confd_directory = "master.d" # pylint: disable=unused-private-member
|
||||
|
||||
|
||||
class IncludeConfig(Schema):
|
||||
|
|
|
@ -890,8 +890,6 @@ class SlackClient:
|
|||
if cmd in runner_functions:
|
||||
runner = salt.runner.RunnerClient(__opts__)
|
||||
log.debug("Command %s will run via runner_functions", cmd)
|
||||
# pylint is tripping
|
||||
# pylint: disable=missing-whitespace-after-comma
|
||||
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
||||
job_id = job_id_dict["jid"]
|
||||
|
||||
|
|
|
@ -995,8 +995,6 @@ class SlackClient:
|
|||
if cmd in runner_functions:
|
||||
runner = salt.runner.RunnerClient(__opts__)
|
||||
log.debug("Command %s will run via runner_functions", cmd)
|
||||
# pylint is tripping
|
||||
# pylint: disable=missing-whitespace-after-comma
|
||||
job_id_dict = runner.asynchronous(cmd, {"arg": args, "kwarg": kwargs})
|
||||
job_id = job_id_dict["jid"]
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ def mk_gen():
|
|||
return True
|
||||
return NotImplemented
|
||||
|
||||
generator = type((lambda: (yield))())
|
||||
generator = type((lambda: (yield))()) # pylint: disable=unnecessary-direct-lambda-call
|
||||
Generator.register(generator)
|
||||
return Generator
|
||||
|
||||
|
|
|
@ -2162,8 +2162,7 @@ class IPv6Interface(IPv6Address):
|
|||
return x
|
||||
|
||||
def __str__(self):
|
||||
return '%s/%d' % (super().__str__(),
|
||||
self._prefixlen)
|
||||
return '%s/%d' % (super(), self._prefixlen)
|
||||
|
||||
def __eq__(self, other):
|
||||
address_equal = IPv6Address.__eq__(self, other)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# auto.py is full of patterns mypy doesn't like, so for type checking
|
||||
# purposes we replace it with interface.py.
|
||||
|
||||
from .interface import *
|
||||
from .interface import * # pylint: disable=unused-wildcard-import,wildcard-import
|
||||
|
|
|
@ -726,7 +726,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||
if cached_md5 == file_md5:
|
||||
return
|
||||
else:
|
||||
log.info(f"found different hash for file {path}, updating...")
|
||||
log.info("found different hash for file %s, updating...", path)
|
||||
else:
|
||||
cached_file_stat = os.stat(cached_file_path)
|
||||
cached_file_size = cached_file_stat.st_size
|
||||
|
@ -762,6 +762,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||
https_enable=https_enable,
|
||||
)
|
||||
if ret is not None:
|
||||
s3_file_mtime = s3_file_size = None
|
||||
for header_name, header_value in ret["headers"].items():
|
||||
name = header_name.strip()
|
||||
value = header_value.strip()
|
||||
|
@ -771,9 +772,8 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||
)
|
||||
elif str(name).lower() == "content-length":
|
||||
s3_file_size = int(value)
|
||||
if (
|
||||
cached_file_size == s3_file_size
|
||||
and cached_file_mtime > s3_file_mtime
|
||||
if (s3_file_size and cached_file_size == s3_file_size) and (
|
||||
s3_file_mtime and cached_file_mtime > s3_file_mtime
|
||||
):
|
||||
log.info(
|
||||
"%s - %s : %s skipped download since cached file size "
|
||||
|
|
|
@ -2910,8 +2910,8 @@ def ip_fqdn():
|
|||
if not ret["ipv" + ipv_num]:
|
||||
ret[key] = []
|
||||
else:
|
||||
start_time = datetime.datetime.utcnow()
|
||||
try:
|
||||
start_time = datetime.datetime.utcnow()
|
||||
info = socket.getaddrinfo(_fqdn, None, socket_type)
|
||||
ret[key] = list({item[4][0] for item in info})
|
||||
except (OSError, UnicodeError):
|
||||
|
|
|
@ -15,7 +15,7 @@ import time
|
|||
import traceback
|
||||
import types
|
||||
from collections.abc import MutableMapping
|
||||
from zipimport import zipimporter
|
||||
from zipimport import zipimporter # pylint: disable=no-name-in-module
|
||||
|
||||
import salt.config
|
||||
import salt.defaults.events
|
||||
|
|
|
@ -769,7 +769,9 @@ class Master(SMaster):
|
|||
mod = ".".join(proc.split(".")[:-1])
|
||||
cls = proc.split(".")[-1]
|
||||
_tmp = __import__(mod, globals(), locals(), [cls], -1)
|
||||
cls = _tmp.__getattribute__(cls)
|
||||
cls = _tmp.__getattribute__( # pylint: disable=unnecessary-dunder-call
|
||||
cls
|
||||
)
|
||||
name = "ExtProcess({})".format(cls.__qualname__)
|
||||
self.process_manager.add_process(cls, args=(self.opts,), name=name)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
|
|
|
@ -239,7 +239,7 @@ if not HAS_APT:
|
|||
opts = _get_opts(self.line)
|
||||
self.architectures.extend(opts["arch"]["value"])
|
||||
self.signedby = opts["signedby"]["value"]
|
||||
for opt in opts.keys():
|
||||
for opt in opts:
|
||||
opt = opts[opt]["full"]
|
||||
if opt:
|
||||
try:
|
||||
|
@ -1609,9 +1609,11 @@ def _get_upgradable(dist_upgrade=True, **kwargs):
|
|||
|
||||
# rexp parses lines that look like the following:
|
||||
# Conf libxfont1 (1:1.4.5-1 Debian:testing [i386])
|
||||
rexp = re.compile("(?m)^Conf " "([^ ]+) " r"\(([^ ]+)") # Package name # Version
|
||||
rexp = re.compile(r"(?m)^Conf ([^ ]+) \(([^ ]+)") # Package name # Version
|
||||
keys = ["name", "version"]
|
||||
_get = lambda l, k: l[keys.index(k)]
|
||||
|
||||
def _get(line, k):
|
||||
return line[keys.index(k)]
|
||||
|
||||
upgrades = rexp.findall(out)
|
||||
|
||||
|
@ -1685,7 +1687,10 @@ def version_cmp(pkg1, pkg2, ignore_epoch=False, **kwargs):
|
|||
|
||||
salt '*' pkg.version_cmp '0.2.4-0ubuntu1' '0.2.4.1-0ubuntu1'
|
||||
"""
|
||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
def normalize(x):
|
||||
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
# both apt_pkg.version_compare and _cmd_quote need string arguments.
|
||||
pkg1 = normalize(pkg1)
|
||||
pkg2 = normalize(pkg2)
|
||||
|
|
|
@ -380,7 +380,7 @@ def list_(
|
|||
dirs, files, links = func(name, cached, *args)
|
||||
except OSError as exc:
|
||||
raise CommandExecutionError(
|
||||
"Failed to list contents of {}: {}".format(name, exc.__str__())
|
||||
"Failed to list contents of {}: {}".format(name, exc)
|
||||
)
|
||||
except CommandExecutionError as exc:
|
||||
raise
|
||||
|
@ -395,9 +395,7 @@ def list_(
|
|||
log.debug("Cleaned cached archive %s", cached)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
log.warning(
|
||||
"Failed to clean cached archive %s: %s", cached, exc.__str__()
|
||||
)
|
||||
log.warning("Failed to clean cached archive %s: %s", cached, exc)
|
||||
|
||||
if strip_components:
|
||||
for item in (dirs, files, links):
|
||||
|
@ -796,8 +794,8 @@ def zip_(zip_file, sources, template=None, cwd=None, runas=None, zip64=False):
|
|||
os.setegid(uinfo["gid"])
|
||||
os.seteuid(uinfo["uid"])
|
||||
|
||||
exc = None
|
||||
try:
|
||||
exc = None
|
||||
archived_files = []
|
||||
with contextlib.closing(
|
||||
zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED, zip64)
|
||||
|
@ -1203,7 +1201,7 @@ def is_encrypted(name, clean=False, saltenv="base", source_hash=None, use_etag=F
|
|||
"{} is not a ZIP file".format(name), info=archive_info
|
||||
)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(exc.__str__(), info=archive_info)
|
||||
raise CommandExecutionError(exc, info=archive_info)
|
||||
else:
|
||||
ret = False
|
||||
|
||||
|
@ -1213,9 +1211,7 @@ def is_encrypted(name, clean=False, saltenv="base", source_hash=None, use_etag=F
|
|||
log.debug("Cleaned cached archive %s", cached)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
log.warning(
|
||||
"Failed to clean cached archive %s: %s", cached, exc.__str__()
|
||||
)
|
||||
log.warning("Failed to clean cached archive %s: %s", cached, exc)
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -946,7 +946,7 @@ def _aws_decode(x):
|
|||
if "\\" in x:
|
||||
return x.decode("unicode_escape")
|
||||
|
||||
if type(x) == bytes:
|
||||
if isinstance(x, bytes):
|
||||
return x.decode("idna")
|
||||
|
||||
return x
|
||||
|
|
|
@ -137,7 +137,7 @@ def topic_exists(name, region=None, key=None, keyid=None, profile=None):
|
|||
salt myminion boto3_sns.topic_exists mytopic region=us-east-1
|
||||
"""
|
||||
topics = list_topics(region=region, key=key, keyid=keyid, profile=profile)
|
||||
return name in list(topics.values() + topics.keys())
|
||||
return name in list(topics.values()) + list(topics)
|
||||
|
||||
|
||||
def create_topic(Name, region=None, key=None, keyid=None, profile=None):
|
||||
|
|
|
@ -223,7 +223,10 @@ def _usage_specific(raw):
|
|||
"""
|
||||
Parse usage/specific.
|
||||
"""
|
||||
get_key = lambda val: dict([tuple(val.split(":"))])
|
||||
|
||||
def get_key(val):
|
||||
return dict([tuple(val.split(":"))])
|
||||
|
||||
raw = raw.split("\n")
|
||||
section, size, used = raw[0].split(" ")
|
||||
section = section.replace(",", "_").replace(":", "").lower()
|
||||
|
|
|
@ -75,7 +75,7 @@ def _query(
|
|||
data = None
|
||||
else:
|
||||
if data is not None:
|
||||
if type(data) != str:
|
||||
if not isinstance(data, str):
|
||||
data = salt.utils.json.dumps(data)
|
||||
else:
|
||||
data = salt.utils.json.dumps({})
|
||||
|
|
|
@ -115,7 +115,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
|||
if os.path.isfile(dest):
|
||||
return "Path exists and is a file"
|
||||
else:
|
||||
return _error(exc.__str__())
|
||||
return _error(str(exc))
|
||||
return True
|
||||
|
||||
chunk = base64.b64decode(chunk)
|
||||
|
@ -126,12 +126,12 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
|||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
# Parent dir does not exist, we need to create it
|
||||
return _error(exc.__str__())
|
||||
return _error(str(exc))
|
||||
try:
|
||||
os.makedirs(os.path.dirname(dest))
|
||||
except OSError as makedirs_exc:
|
||||
# Failed to make directory
|
||||
return _error(makedirs_exc.__str__())
|
||||
return _error(str(makedirs_exc))
|
||||
fh_ = salt.utils.files.fopen(dest, open_mode) # pylint: disable=W8470
|
||||
|
||||
try:
|
||||
|
@ -139,7 +139,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
|||
fh_.write(salt.utils.gzip_util.uncompress(chunk) if compressed else chunk)
|
||||
except OSError as exc:
|
||||
# Write failed
|
||||
return _error(exc.__str__())
|
||||
return _error(str(exc))
|
||||
else:
|
||||
# Write successful
|
||||
if not append and mode is not None:
|
||||
|
@ -149,7 +149,7 @@ def recv_chunked(dest, chunk, append=False, compressed=True, mode=None):
|
|||
try:
|
||||
os.chmod(dest, mode)
|
||||
except OSError:
|
||||
return _error(exc.__str__())
|
||||
return _error(str(exc))
|
||||
return True
|
||||
finally:
|
||||
try:
|
||||
|
|
|
@ -263,7 +263,10 @@ def set_crypttab(
|
|||
criteria = entry.pick(match_on)
|
||||
|
||||
except KeyError:
|
||||
filterFn = lambda key: key not in _crypttab_entry.crypttab_keys
|
||||
|
||||
def filterFn(key):
|
||||
return key not in _crypttab_entry.crypttab_keys
|
||||
|
||||
invalid_keys = filter(filterFn, match_on)
|
||||
|
||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||
|
|
|
@ -29,7 +29,7 @@ def _temp_exists(method, ip):
|
|||
"""
|
||||
_type = method.replace("temp", "").upper()
|
||||
cmd = (
|
||||
"csf -t | awk -v code=1 -v type=_type -v ip=ip '$1==type && $2==ip {{code=0}}"
|
||||
"csf -t | awk -v code=1 -v type={_type} -v ip={ip} '$1==type && $2==ip {{code=0}}"
|
||||
" END {{exit code}}'".format(_type=_type, ip=ip)
|
||||
)
|
||||
exists = __salt__["cmd.run_all"](cmd)
|
||||
|
|
|
@ -157,7 +157,9 @@ def cancel_downtime(api_key=None, app_key=None, scope=None, id=None):
|
|||
elif scope:
|
||||
params = {"api_key": api_key, "application_key": app_key, "scope": scope}
|
||||
response = requests.post(
|
||||
"https://app.datadoghq.com/api/v1/downtime/cancel/by_scope", params=params
|
||||
"https://app.datadoghq.com/api/v1/downtime/cancel/by_scope",
|
||||
params=params,
|
||||
timeout=120,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
ret["result"] = True
|
||||
|
|
|
@ -314,7 +314,7 @@ def MX(domain, resolve=False, nameserver=None):
|
|||
stdout = [x.split() for x in cmd["stdout"].split("\n")]
|
||||
|
||||
if resolve:
|
||||
return [(lambda x: [x[0], A(x[1], nameserver)[0]])(x) for x in stdout]
|
||||
return [[x[0], A(x[1], nameserver)[0]] for x in stdout]
|
||||
|
||||
return stdout
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ def _parse_numbers(text):
|
|||
"Z": "10E21",
|
||||
"Y": "10E24",
|
||||
}
|
||||
if text[-1] in postPrefixes.keys():
|
||||
if text[-1] in postPrefixes:
|
||||
v = decimal.Decimal(text[:-1])
|
||||
v = v * decimal.Decimal(postPrefixes[text[-1]])
|
||||
return v
|
||||
|
|
|
@ -399,7 +399,6 @@ def _get_client(timeout=NOTSET, **kwargs):
|
|||
docker_machine_tls["ClientKeyPath"],
|
||||
),
|
||||
ca_cert=docker_machine_tls["CaCertPath"],
|
||||
assert_hostname=False,
|
||||
verify=True,
|
||||
)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
|
@ -690,9 +689,9 @@ def _client_wrapper(attr, *args, **kwargs):
|
|||
raise
|
||||
except docker.errors.DockerException as exc:
|
||||
# More general docker exception (catches InvalidVersion, etc.)
|
||||
raise CommandExecutionError(exc.__str__())
|
||||
raise CommandExecutionError(str(exc))
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
err = exc.__str__()
|
||||
err = str(exc)
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
@ -1333,7 +1332,10 @@ def compare_networks(first, second, ignore="Name,Id,Created,Containers"):
|
|||
if bool(subval1) is bool(subval2) is False:
|
||||
continue
|
||||
elif subkey == "Config":
|
||||
kvsort = lambda x: (list(x.keys()), list(x.values()))
|
||||
|
||||
def kvsort(x):
|
||||
return (list(x.keys()), list(x.values()))
|
||||
|
||||
config1 = sorted(val1["Config"], key=kvsort)
|
||||
config2 = sorted(val2.get("Config", []), key=kvsort)
|
||||
if config1 != config2:
|
||||
|
@ -3312,7 +3314,7 @@ def create(
|
|||
except CommandExecutionError as exc:
|
||||
raise CommandExecutionError(
|
||||
"Failed to start container after creation",
|
||||
info={"response": response, "error": exc.__str__()},
|
||||
info={"response": response, "error": str(exc)},
|
||||
)
|
||||
else:
|
||||
response["Started"] = True
|
||||
|
@ -3502,7 +3504,7 @@ def run_container(
|
|||
f"Failed to auto_remove container: {rm_exc}"
|
||||
)
|
||||
# Raise original exception with additional info
|
||||
raise CommandExecutionError(exc.__str__(), info=exc_info)
|
||||
raise CommandExecutionError(str(exc), info=exc_info)
|
||||
|
||||
# Start the container
|
||||
output = []
|
||||
|
@ -3554,7 +3556,7 @@ def run_container(
|
|||
# it to other_errors as a fallback.
|
||||
exc_info.setdefault("other_errors", []).append(exc.info)
|
||||
# Re-raise with all of the available additional info
|
||||
raise CommandExecutionError(exc.__str__(), info=exc_info)
|
||||
raise CommandExecutionError(str(exc), info=exc_info)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -4286,7 +4288,7 @@ def dangling(prune=False, force=False):
|
|||
try:
|
||||
ret.setdefault(image, {})["Removed"] = rmi(image, force=force)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
err = exc.__str__()
|
||||
err = str(exc)
|
||||
log.error(err)
|
||||
ret.setdefault(image, {})["Comment"] = err
|
||||
ret[image]["Removed"] = False
|
||||
|
@ -4606,7 +4608,7 @@ def pull(
|
|||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
f"Unable to interpret API event: '{event}'",
|
||||
info={"Error": exc.__str__()},
|
||||
info={"Error": str(exc)},
|
||||
)
|
||||
try:
|
||||
event_type = next(iter(event))
|
||||
|
@ -4700,7 +4702,7 @@ def push(
|
|||
except Exception as exc: # pylint: disable=broad-except
|
||||
raise CommandExecutionError(
|
||||
f"Unable to interpret API event: '{event}'",
|
||||
info={"Error": exc.__str__()},
|
||||
info={"Error": str(exc)},
|
||||
)
|
||||
try:
|
||||
event_type = next(iter(event))
|
||||
|
@ -5496,7 +5498,7 @@ def disconnect_all_containers_from_network(network_id):
|
|||
disconnect_container_from_network(cname, network_id)
|
||||
ret.append(cname)
|
||||
except CommandExecutionError as exc:
|
||||
msg = exc.__str__()
|
||||
msg = str(exc)
|
||||
if "404" not in msg:
|
||||
# If 404 was in the error, then the container no longer exists,
|
||||
# so to avoid a race condition we won't consider 404 errors to
|
||||
|
|
|
@ -148,9 +148,9 @@ def _process_emerge_err(stdout, stderr):
|
|||
if slot_conflicts:
|
||||
ret["slot conflicts"] = slot_conflicts
|
||||
|
||||
blocked = re.compile(
|
||||
r"(?m)^\[blocks .+\] " r"([^ ]+/[^ ]+-[0-9]+[^ ]+)" r".*$"
|
||||
).findall(stdout)
|
||||
blocked = re.compile(r"(?m)^\[blocks .+\] ([^ ]+/[^ ]+-[0-9]+[^ ]+).*$").findall(
|
||||
stdout
|
||||
)
|
||||
|
||||
unsatisfied = re.compile(r"Error: The above package list contains").findall(stderr)
|
||||
|
||||
|
@ -331,7 +331,9 @@ def _get_upgradable(backtrack=3):
|
|||
r".*$"
|
||||
)
|
||||
keys = ["name", "version"]
|
||||
_get = lambda l, k: l[keys.index(k)]
|
||||
|
||||
def _get(line, k):
|
||||
return line[keys.index(k)]
|
||||
|
||||
upgrades = rexp.findall(out)
|
||||
|
||||
|
|
|
@ -4027,7 +4027,7 @@ def readlink(path, canonicalize=False):
|
|||
except OSError as exc:
|
||||
if exc.errno == errno.EINVAL:
|
||||
raise CommandExecutionError("Not a symbolic link: {}".format(path))
|
||||
raise CommandExecutionError(exc.__str__())
|
||||
raise CommandExecutionError(str(exc))
|
||||
|
||||
|
||||
def readdir(path):
|
||||
|
@ -5927,7 +5927,7 @@ def get_diff(
|
|||
continue
|
||||
paths.append(cached_path)
|
||||
except MinionError as exc:
|
||||
errors.append(salt.utils.stringutils.to_unicode(exc.__str__()))
|
||||
errors.append(salt.utils.stringutils.to_unicode(str(exc)))
|
||||
continue
|
||||
|
||||
if errors:
|
||||
|
|
|
@ -38,7 +38,7 @@ def start(jail=""):
|
|||
|
||||
salt '*' jail.start [<jail name>]
|
||||
"""
|
||||
cmd = "service jail onestart {}".format(jail)
|
||||
cmd = f"service jail onestart {jail}"
|
||||
return not __salt__["cmd.retcode"](cmd)
|
||||
|
||||
|
||||
|
@ -52,7 +52,7 @@ def stop(jail=""):
|
|||
|
||||
salt '*' jail.stop [<jail name>]
|
||||
"""
|
||||
cmd = "service jail onestop {}".format(jail)
|
||||
cmd = f"service jail onestop {jail}"
|
||||
return not __salt__["cmd.retcode"](cmd)
|
||||
|
||||
|
||||
|
@ -66,7 +66,7 @@ def restart(jail=""):
|
|||
|
||||
salt '*' jail.restart [<jail name>]
|
||||
"""
|
||||
cmd = "service jail onerestart {}".format(jail)
|
||||
cmd = f"service jail onerestart {jail}"
|
||||
return not __salt__["cmd.retcode"](cmd)
|
||||
|
||||
|
||||
|
@ -126,9 +126,7 @@ def show_config(jail):
|
|||
"""
|
||||
ret = {}
|
||||
if subprocess.call(["jls", "-nq", "-j", jail]) == 0:
|
||||
jls = subprocess.check_output(
|
||||
["jls", "-nq", "-j", jail]
|
||||
) # pylint: disable=minimum-python-version
|
||||
jls = subprocess.check_output(["jls", "-nq", "-j", jail])
|
||||
jailopts = salt.utils.args.shlex_split(salt.utils.stringutils.to_unicode(jls))
|
||||
for jailopt in jailopts:
|
||||
if "=" not in jailopt:
|
||||
|
@ -145,7 +143,7 @@ def show_config(jail):
|
|||
line = salt.utils.stringutils.to_unicode(line)
|
||||
if not line.strip():
|
||||
continue
|
||||
if not line.startswith("jail_{}_".format(jail)):
|
||||
if not line.startswith(f"jail_{jail}_"):
|
||||
continue
|
||||
key, value = line.split("=")
|
||||
ret[key.split("_", 2)[2]] = value.split('"')[1]
|
||||
|
|
|
@ -1067,7 +1067,7 @@ def clone(
|
|||
url, https_user, https_pass, https_only=True
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise SaltInvocationError(exc.__str__())
|
||||
raise SaltInvocationError(str(exc))
|
||||
|
||||
command = ["git"] + _format_git_opts(git_opts)
|
||||
command.append("clone")
|
||||
|
@ -3044,7 +3044,7 @@ def ls_remote(
|
|||
remote, https_user, https_pass, https_only=True
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise SaltInvocationError(exc.__str__())
|
||||
raise SaltInvocationError(str(exc))
|
||||
command = ["git"] + _format_git_opts(git_opts)
|
||||
command.append("ls-remote")
|
||||
command.extend(_format_opts(opts))
|
||||
|
@ -4051,7 +4051,7 @@ def remote_refs(
|
|||
)
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise SaltInvocationError(exc.__str__())
|
||||
raise SaltInvocationError(str(exc))
|
||||
if filter_:
|
||||
command.append(filter_)
|
||||
output = _git_run(
|
||||
|
@ -4185,7 +4185,7 @@ def remote_set(
|
|||
url, https_user, https_pass, https_only=True
|
||||
)
|
||||
except ValueError as exc:
|
||||
raise SaltInvocationError(exc.__str__())
|
||||
raise SaltInvocationError(str(exc))
|
||||
command = ["git", "remote", "add", remote, url]
|
||||
_git_run(
|
||||
command,
|
||||
|
|
|
@ -126,6 +126,7 @@ def _api_get(path, server=None):
|
|||
auth=_get_auth(server["user"], server["password"]),
|
||||
headers=_get_headers(),
|
||||
verify=True,
|
||||
timeout=120,
|
||||
)
|
||||
return _api_response(response)
|
||||
|
||||
|
@ -141,6 +142,7 @@ def _api_post(path, data, server=None):
|
|||
headers=_get_headers(),
|
||||
data=salt.utils.json.dumps(data),
|
||||
verify=True,
|
||||
timeout=120,
|
||||
)
|
||||
return _api_response(response)
|
||||
|
||||
|
@ -156,6 +158,7 @@ def _api_delete(path, data, server=None):
|
|||
headers=_get_headers(),
|
||||
params=data,
|
||||
verify=True,
|
||||
timeout=120,
|
||||
)
|
||||
return _api_response(response)
|
||||
|
||||
|
|
|
@ -40,7 +40,9 @@ __outputter__ = {
|
|||
}
|
||||
|
||||
# http://stackoverflow.com/a/12414913/127816
|
||||
_infinitedict = lambda: collections.defaultdict(_infinitedict)
|
||||
def _infinitedict():
|
||||
return collections.defaultdict(_infinitedict)
|
||||
|
||||
|
||||
_non_existent_key = "NonExistentValueMagicNumberSpK3hnufdHfeBUXCfqVK"
|
||||
|
||||
|
@ -54,21 +56,28 @@ def _serial_sanitizer(instr):
|
|||
return "{}{}".format(instr[:index], "X" * (length - index))
|
||||
|
||||
|
||||
_FQDN_SANITIZER = lambda x: "MINION.DOMAINNAME"
|
||||
_HOSTNAME_SANITIZER = lambda x: "MINION"
|
||||
_DOMAINNAME_SANITIZER = lambda x: "DOMAINNAME"
|
||||
def _fqdn_sanitizer(x):
|
||||
return "MINION.DOMAINNAME"
|
||||
|
||||
|
||||
def _hostname_sanitizer(x):
|
||||
return "MINION"
|
||||
|
||||
|
||||
def _domainname_sanitizer(x):
|
||||
return "DOMAINNAME"
|
||||
|
||||
|
||||
# A dictionary of grain -> function mappings for sanitizing grain output. This
|
||||
# is used when the 'sanitize' flag is given.
|
||||
_SANITIZERS = {
|
||||
"serialnumber": _serial_sanitizer,
|
||||
"domain": _DOMAINNAME_SANITIZER,
|
||||
"fqdn": _FQDN_SANITIZER,
|
||||
"id": _FQDN_SANITIZER,
|
||||
"host": _HOSTNAME_SANITIZER,
|
||||
"localhost": _HOSTNAME_SANITIZER,
|
||||
"nodename": _HOSTNAME_SANITIZER,
|
||||
"domain": _domainname_sanitizer,
|
||||
"fqdn": _fqdn_sanitizer,
|
||||
"id": _fqdn_sanitizer,
|
||||
"host": _hostname_sanitizer,
|
||||
"localhost": _hostname_sanitizer,
|
||||
"nodename": _hostname_sanitizer,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -241,9 +241,15 @@ def _poll_for_events(
|
|||
"""
|
||||
if action:
|
||||
stop_status = ("{}_FAILED".format(action), "{}_COMPLETE".format(action))
|
||||
stop_check = lambda a: a in stop_status
|
||||
|
||||
def stop_check(a):
|
||||
return a in stop_status
|
||||
|
||||
else:
|
||||
stop_check = lambda a: a.endswith("_COMPLETE") or a.endswith("_FAILED")
|
||||
|
||||
def stop_check(a):
|
||||
return a.endswith("_COMPLETE") or a.endswith("_FAILED")
|
||||
|
||||
timeout_sec = timeout * 60
|
||||
no_event_polls = 0
|
||||
msg_template = "\n Stack %(name)s %(status)s \n"
|
||||
|
|
|
@ -16,15 +16,12 @@
|
|||
"""
|
||||
:codeauthor: Bo Maryniuk <bo@suse.de>
|
||||
"""
|
||||
|
||||
|
||||
import csv
|
||||
import datetime
|
||||
import gzip
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
|
@ -182,12 +179,15 @@ class CsvDB:
|
|||
:param obj:
|
||||
:return:
|
||||
"""
|
||||
get_type = lambda item: str(type(item)).split("'")[1]
|
||||
|
||||
def get_type(item):
|
||||
return str(type(item)).split("'")[1]
|
||||
|
||||
if not os.path.exists(os.path.join(self.db_path, obj._TABLE)):
|
||||
with gzip.open(os.path.join(self.db_path, obj._TABLE), "wt") as table_file:
|
||||
csv.writer(table_file).writerow(
|
||||
[
|
||||
"{col}:{type}".format(col=elm[0], type=get_type(elm[1]))
|
||||
f"{elm[0]}:{get_type(elm[1])}"
|
||||
for elm in tuple(obj.__dict__.items())
|
||||
]
|
||||
)
|
||||
|
@ -270,7 +270,7 @@ class CsvDB:
|
|||
def _validate_object(self, obj):
|
||||
descr = self._tables.get(obj._TABLE)
|
||||
if descr is None:
|
||||
raise Exception("Table {} not found.".format(obj._TABLE))
|
||||
raise Exception(f"Table {obj._TABLE} not found.")
|
||||
return obj._serialize(self._tables[obj._TABLE])
|
||||
|
||||
def __criteria(self, obj, matches=None, mt=None, lt=None, eq=None):
|
||||
|
@ -333,14 +333,10 @@ class CsvDB:
|
|||
return objects
|
||||
|
||||
def _to_type(self, data, type):
|
||||
if type == "int":
|
||||
if type in ("int", "long"):
|
||||
data = int(data)
|
||||
elif type == "float":
|
||||
data = float(data)
|
||||
elif type == "long":
|
||||
# pylint: disable=undefined-variable,incompatible-py3-code
|
||||
data = sys.version_info[0] == 2 and long(data) or int(data)
|
||||
# pylint: enable=undefined-variable,incompatible-py3-code
|
||||
else:
|
||||
data = str(data)
|
||||
return data
|
||||
|
|
|
@ -480,11 +480,13 @@ class Query(EnvLoader):
|
|||
raise InspectorQueryException(
|
||||
'Unknown "{}" value for parameter "time"'.format(timeformat)
|
||||
)
|
||||
tfmt = (
|
||||
lambda param: timeformat == "tz"
|
||||
and time.strftime("%b %d %Y %H:%M:%S", time.gmtime(param))
|
||||
or int(param)
|
||||
)
|
||||
|
||||
def tfmt(param):
|
||||
return (
|
||||
timeformat == "tz"
|
||||
and time.strftime("%b %d %Y %H:%M:%S", time.gmtime(param))
|
||||
or int(param)
|
||||
)
|
||||
|
||||
size_fmt = kwargs.get("size")
|
||||
if size_fmt is not None and size_fmt.lower() not in ["b", "kb", "mb", "gb"]:
|
||||
|
@ -525,9 +527,9 @@ class Query(EnvLoader):
|
|||
pld_files.append(pld_data.path)
|
||||
else:
|
||||
pld_files[pld_data.path] = {
|
||||
"uid": self._id_resolv(pld_data.uid, named=(owners == "id")),
|
||||
"uid": self._id_resolv(pld_data.uid, named=owners == "id"),
|
||||
"gid": self._id_resolv(
|
||||
pld_data.gid, named=(owners == "id"), uid=False
|
||||
pld_data.gid, named=owners == "id", uid=False
|
||||
),
|
||||
"size": _size_format(pld_data.p_size, fmt=size_fmt),
|
||||
"mode": oct(pld_data.mode),
|
||||
|
|
|
@ -25,11 +25,11 @@ master config. The configuration is read using :py:func:`config.get
|
|||
- "-A FORWARD"
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import salt.utils.args
|
||||
|
@ -73,7 +73,7 @@ def _has_option(option, family="ipv4"):
|
|||
_has_option('--wait')
|
||||
_has_option('--check', family='ipv6')
|
||||
"""
|
||||
cmd = "{} --help".format(_iptables_cmd(family))
|
||||
cmd = f"{_iptables_cmd(family)} --help"
|
||||
if option in __salt__["cmd.run_stdout"](cmd, output_loglevel="quiet"):
|
||||
return True
|
||||
return False
|
||||
|
@ -192,7 +192,7 @@ def version(family="ipv4"):
|
|||
IPv6:
|
||||
salt '*' iptables.version family=ipv6
|
||||
"""
|
||||
cmd = "{} --version".format(_iptables_cmd(family))
|
||||
cmd = f"{_iptables_cmd(family)} --version"
|
||||
out = __salt__["cmd.run_stdout"](cmd).split()
|
||||
return out[1]
|
||||
|
||||
|
@ -204,7 +204,7 @@ def build_rule(
|
|||
position="",
|
||||
full=None,
|
||||
family="ipv4",
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Build a well-formatted iptables rule based on kwargs. A `table` and `chain`
|
||||
|
@ -316,7 +316,7 @@ def build_rule(
|
|||
if not isinstance(match_value, list):
|
||||
match_value = match_value.split(",")
|
||||
for match in match_value:
|
||||
rule.append("-m {}".format(match))
|
||||
rule.append(f"-m {match}")
|
||||
if "name_" in kwargs and match.strip() in ("pknock", "quota2", "recent"):
|
||||
rule.append("--name {}".format(kwargs["name_"]))
|
||||
del kwargs["name_"]
|
||||
|
@ -335,7 +335,7 @@ def build_rule(
|
|||
if match_set.startswith("!") or match_set.startswith("not"):
|
||||
negative_match_set = "! "
|
||||
match_set = re.sub(bang_not_pat, "", match_set)
|
||||
rule.append("-m set {}--match-set {}".format(negative_match_set, match_set))
|
||||
rule.append(f"-m set {negative_match_set}--match-set {match_set}")
|
||||
del kwargs["match-set"]
|
||||
|
||||
if "connstate" in kwargs:
|
||||
|
@ -382,7 +382,7 @@ def build_rule(
|
|||
else:
|
||||
dports = mp_value
|
||||
|
||||
rule.append("--{} {}".format(multiport_arg, dports))
|
||||
rule.append(f"--{multiport_arg} {dports}")
|
||||
del kwargs[multiport_arg]
|
||||
|
||||
if "comment" in kwargs:
|
||||
|
@ -526,11 +526,11 @@ def build_rule(
|
|||
if after_jump_argument in kwargs:
|
||||
value = kwargs[after_jump_argument]
|
||||
if value in (None, ""): # options without arguments
|
||||
after_jump.append("--{}".format(after_jump_argument))
|
||||
after_jump.append(f"--{after_jump_argument}")
|
||||
elif any(ws_char in str(value) for ws_char in string.whitespace):
|
||||
after_jump.append('--{} "{}"'.format(after_jump_argument, value))
|
||||
after_jump.append(f'--{after_jump_argument} "{value}"')
|
||||
else:
|
||||
after_jump.append("--{} {}".format(after_jump_argument, value))
|
||||
after_jump.append(f"--{after_jump_argument} {value}")
|
||||
del kwargs[after_jump_argument]
|
||||
|
||||
for key in kwargs:
|
||||
|
@ -539,8 +539,8 @@ def build_rule(
|
|||
# the value in the kwargs, thus we need to fetch it after that has run
|
||||
value = kwargs[key]
|
||||
flag = "-" if len(key) == 1 else "--"
|
||||
value = "" if value in (None, "") else " {}".format(value)
|
||||
rule.append("{}{}{}{}".format(negation, flag, key, value))
|
||||
value = "" if value in (None, "") else f" {value}"
|
||||
rule.append(f"{negation}{flag}{key}{value}")
|
||||
|
||||
rule += after_jump
|
||||
|
||||
|
@ -704,7 +704,7 @@ def save(filename=None, family="ipv4"):
|
|||
parent_dir = os.path.dirname(filename)
|
||||
if not os.path.isdir(parent_dir):
|
||||
os.makedirs(parent_dir)
|
||||
cmd = "{}-save".format(_iptables_cmd(family))
|
||||
cmd = f"{_iptables_cmd(family)}-save"
|
||||
ipt = __salt__["cmd.run_stdout"](cmd)
|
||||
|
||||
# regex out the output if configured with filters
|
||||
|
@ -743,26 +743,24 @@ def check(table="filter", chain=None, rule=None, family="ipv4"):
|
|||
ipt_cmd = _iptables_cmd(family)
|
||||
|
||||
if _has_option("--check", family):
|
||||
cmd = "{} -t {} -C {} {}".format(ipt_cmd, table, chain, rule)
|
||||
cmd = f"{ipt_cmd} -t {table} -C {chain} {rule}"
|
||||
__salt__["cmd.run_stderr"](cmd, output_loglevel="quiet")
|
||||
return not __context__["retcode"]
|
||||
else:
|
||||
_chain_name = hex(uuid.getnode())
|
||||
|
||||
# Create temporary table
|
||||
__salt__["cmd.run"]("{} -t {} -N {}".format(ipt_cmd, table, _chain_name))
|
||||
__salt__["cmd.run"](
|
||||
"{} -t {} -A {} {}".format(ipt_cmd, table, _chain_name, rule)
|
||||
)
|
||||
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -N {_chain_name}")
|
||||
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -A {_chain_name} {rule}")
|
||||
|
||||
out = __salt__["cmd.run_stdout"]("{}-save".format(ipt_cmd))
|
||||
out = __salt__["cmd.run_stdout"](f"{ipt_cmd}-save")
|
||||
|
||||
# Clean up temporary table
|
||||
__salt__["cmd.run"]("{} -t {} -F {}".format(ipt_cmd, table, _chain_name))
|
||||
__salt__["cmd.run"]("{} -t {} -X {}".format(ipt_cmd, table, _chain_name))
|
||||
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -F {_chain_name}")
|
||||
__salt__["cmd.run"](f"{ipt_cmd} -t {table} -X {_chain_name}")
|
||||
|
||||
for i in out.splitlines():
|
||||
if i.startswith("-A {}".format(_chain_name)):
|
||||
if i.startswith(f"-A {_chain_name}"):
|
||||
if i.replace(_chain_name, chain) in out.splitlines():
|
||||
return True
|
||||
|
||||
|
@ -792,8 +790,8 @@ def check_chain(table="filter", chain=None, family="ipv4"):
|
|||
if not chain:
|
||||
return "Error: Chain needs to be specified"
|
||||
|
||||
cmd = "{}-save -t {}".format(_iptables_cmd(family), table)
|
||||
out = __salt__["cmd.run_stdout"](cmd).find(":{} ".format(chain))
|
||||
cmd = f"{_iptables_cmd(family)}-save -t {table}"
|
||||
out = __salt__["cmd.run_stdout"](cmd).find(f":{chain} ")
|
||||
|
||||
if out != -1:
|
||||
out = True
|
||||
|
@ -823,7 +821,7 @@ def new_chain(table="filter", chain=None, family="ipv4"):
|
|||
return "Error: Chain needs to be specified"
|
||||
|
||||
wait = "--wait" if _has_option("--wait", family) else ""
|
||||
cmd = "{} {} -t {} -N {}".format(_iptables_cmd(family), wait, table, chain)
|
||||
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -N {chain}"
|
||||
out = __salt__["cmd.run_stderr"](cmd)
|
||||
|
||||
if not out:
|
||||
|
@ -851,7 +849,7 @@ def delete_chain(table="filter", chain=None, family="ipv4"):
|
|||
return "Error: Chain needs to be specified"
|
||||
|
||||
wait = "--wait" if _has_option("--wait", family) else ""
|
||||
cmd = "{} {} -t {} -X {}".format(_iptables_cmd(family), wait, table, chain)
|
||||
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -X {chain}"
|
||||
out = __salt__["cmd.run_stderr"](cmd)
|
||||
|
||||
if not out:
|
||||
|
@ -889,7 +887,7 @@ def append(table="filter", chain=None, rule=None, family="ipv4"):
|
|||
returnCheck = check(table, chain, rule, family)
|
||||
if isinstance(returnCheck, bool) and returnCheck:
|
||||
return False
|
||||
cmd = "{} {} -t {} -A {} {}".format(_iptables_cmd(family), wait, table, chain, rule)
|
||||
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -A {chain} {rule}"
|
||||
out = __salt__["cmd.run_stderr"](cmd)
|
||||
return not out
|
||||
|
||||
|
@ -977,7 +975,7 @@ def delete(table, chain=None, position=None, rule=None, family="ipv4"):
|
|||
rule = position
|
||||
|
||||
wait = "--wait" if _has_option("--wait", family) else ""
|
||||
cmd = "{} {} -t {} -D {} {}".format(_iptables_cmd(family), wait, table, chain, rule)
|
||||
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -D {chain} {rule}"
|
||||
out = __salt__["cmd.run_stderr"](cmd)
|
||||
return out
|
||||
|
||||
|
@ -998,7 +996,7 @@ def flush(table="filter", chain="", family="ipv4"):
|
|||
"""
|
||||
|
||||
wait = "--wait" if _has_option("--wait", family) else ""
|
||||
cmd = "{} {} -t {} -F {}".format(_iptables_cmd(family), wait, table, chain)
|
||||
cmd = f"{_iptables_cmd(family)} {wait} -t {table} -F {chain}"
|
||||
out = __salt__["cmd.run_stderr"](cmd)
|
||||
return out
|
||||
|
||||
|
@ -1016,7 +1014,7 @@ def _parse_conf(conf_file=None, in_mem=False, family="ipv4"):
|
|||
with salt.utils.files.fopen(conf_file, "r") as ifile:
|
||||
rules = ifile.read()
|
||||
elif in_mem:
|
||||
cmd = "{}-save".format(_iptables_cmd(family))
|
||||
cmd = f"{_iptables_cmd(family)}-save"
|
||||
rules = __salt__["cmd.run_stdout"](cmd)
|
||||
else:
|
||||
raise SaltException("A file was not found to parse")
|
||||
|
@ -1057,7 +1055,7 @@ def _parse_conf(conf_file=None, in_mem=False, family="ipv4"):
|
|||
and args[index + 1] != "!"
|
||||
and not args[index + 1].startswith("-")
|
||||
):
|
||||
args[index] += " {}".format(args.pop(index + 1))
|
||||
args[index] += f" {args.pop(index + 1)}"
|
||||
index += 1
|
||||
if args[-1].startswith("-"):
|
||||
args.append("")
|
||||
|
@ -1082,17 +1080,8 @@ def _parser():
|
|||
iptables(8) and iptables-extensions(8) man pages. They will not all be
|
||||
used by all parts of the module; use them intelligently and appropriately.
|
||||
"""
|
||||
add_arg = None
|
||||
if sys.version.startswith("2.6"):
|
||||
import optparse
|
||||
|
||||
parser = optparse.OptionParser()
|
||||
add_arg = parser.add_option
|
||||
else:
|
||||
import argparse # pylint: disable=minimum-python-version
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
add_arg = parser.add_argument
|
||||
parser = argparse.ArgumentParser()
|
||||
add_arg = parser.add_argument
|
||||
|
||||
# COMMANDS
|
||||
add_arg("-A", "--append", dest="append", action="append")
|
||||
|
|
|
@ -112,7 +112,7 @@ def _valid_iface(iface):
|
|||
Validate the specified interface
|
||||
"""
|
||||
ifaces = list_interfaces()
|
||||
if iface in ifaces.keys():
|
||||
if iface in ifaces:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
|
@ -581,7 +581,7 @@ def _source_encode(source, saltenv):
|
|||
try:
|
||||
source_url = urllib.parse.urlparse(source)
|
||||
except TypeError:
|
||||
return "", {}, "Invalid format for source parameter"
|
||||
return "", {}
|
||||
|
||||
protos = ("salt", "http", "https", "ftp", "swift", "s3", "file")
|
||||
|
||||
|
|
|
@ -154,7 +154,7 @@ def _setup_conn_old(**kwargs):
|
|||
or kubernetes.client.configuration.password != password
|
||||
):
|
||||
# Recreates API connection if settings are changed
|
||||
kubernetes.client.configuration.__init__()
|
||||
kubernetes.client.configuration.__init__() # pylint: disable=unnecessary-dunder-call
|
||||
|
||||
kubernetes.client.configuration.host = host
|
||||
kubernetes.client.configuration.user = username
|
||||
|
|
|
@ -32,7 +32,9 @@ log = logging.getLogger(__name__)
|
|||
|
||||
def __virtual__():
|
||||
"""Only load this module if the Python ldap module is present"""
|
||||
return bool(len(available_backends))
|
||||
if available_backends:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class LDAPError(Exception):
|
||||
|
|
|
@ -176,7 +176,7 @@ def list_ignored():
|
|||
# "Safari6.1.2MountainLion-6.1.2",
|
||||
# or:
|
||||
# Safari6.1.2MountainLion-6.1.2
|
||||
rexp = re.compile('(?m)^ ["]?' r'([^,|\s].*[^"|\n|,])[,|"]?')
|
||||
rexp = re.compile(r'(?m)^ ["]?([^,|\s].*[^"|\n|,])[,|"]?')
|
||||
|
||||
return rexp.findall(out)
|
||||
|
||||
|
|
|
@ -186,7 +186,7 @@ def update(clear=False, mine_functions=None):
|
|||
res = salt.utils.functools.call_function(
|
||||
__salt__[function_name or function_alias],
|
||||
*function_args,
|
||||
**function_kwargs
|
||||
**function_kwargs,
|
||||
)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
trace = traceback.format_exc()
|
||||
|
@ -309,17 +309,8 @@ def get(tgt, fun, tgt_type="glob", exclude_minion=False):
|
|||
# Load from local minion's cache
|
||||
if __opts__["file_client"] == "local":
|
||||
ret = {}
|
||||
is_target = {
|
||||
"glob": __salt__["match.glob"],
|
||||
"pcre": __salt__["match.pcre"],
|
||||
"list": __salt__["match.list"],
|
||||
"grain": __salt__["match.grain"],
|
||||
"grain_pcre": __salt__["match.grain_pcre"],
|
||||
"ipcidr": __salt__["match.ipcidr"],
|
||||
"compound": __salt__["match.compound"],
|
||||
"pillar": __salt__["match.pillar"],
|
||||
"pillar_pcre": __salt__["match.pillar_pcre"],
|
||||
}[tgt_type](tgt)
|
||||
|
||||
is_target = __salt__[f"match.{tgt_type}"](tgt)
|
||||
if not is_target:
|
||||
return ret
|
||||
|
||||
|
|
|
@ -866,7 +866,10 @@ def set_fstab(
|
|||
criteria = entry.pick(match_on)
|
||||
|
||||
except KeyError:
|
||||
filterFn = lambda key: key not in _fstab_entry.fstab_keys
|
||||
|
||||
def filterFn(key):
|
||||
return key not in _fstab_entry.fstab_keys
|
||||
|
||||
invalid_keys = filter(filterFn, match_on)
|
||||
|
||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||
|
@ -996,7 +999,10 @@ def set_vfstab(
|
|||
criteria = entry.pick(match_on)
|
||||
|
||||
except KeyError:
|
||||
filterFn = lambda key: key not in _vfstab_entry.vfstab_keys
|
||||
|
||||
def filterFn(key):
|
||||
return key not in _vfstab_entry.vfstab_keys
|
||||
|
||||
invalid_keys = filter(filterFn, match_on)
|
||||
|
||||
msg = 'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||
|
@ -1878,7 +1884,10 @@ def set_filesystems(
|
|||
criteria = entry_ip.pick(match_on)
|
||||
|
||||
except KeyError:
|
||||
filterFn = lambda key: key not in _FileSystemsEntry.compatibility_keys
|
||||
|
||||
def filterFn(key):
|
||||
return key not in _FileSystemsEntry.compatibility_keys
|
||||
|
||||
invalid_keys = filter(filterFn, match_on)
|
||||
raise CommandExecutionError(
|
||||
'Unrecognized keys in match_on: "{}"'.format(invalid_keys)
|
||||
|
|
|
@ -2394,7 +2394,7 @@ def __grant_generate(
|
|||
if dbc != "*":
|
||||
# _ and % are authorized on GRANT queries and should get escaped
|
||||
# on the db name, but only if not requesting a table level grant
|
||||
dbc = quote_identifier(dbc, for_grants=(table == "*"))
|
||||
dbc = quote_identifier(dbc, for_grants=table == "*")
|
||||
if table != "*":
|
||||
table = quote_identifier(table)
|
||||
# identifiers cannot be used as values, and same thing for grants
|
||||
|
@ -2663,7 +2663,7 @@ def grant_revoke(
|
|||
if dbc != "*":
|
||||
# _ and % are authorized on GRANT queries and should get escaped
|
||||
# on the db name, but only if not requesting a table level grant
|
||||
s_database = quote_identifier(dbc, for_grants=(table == "*"))
|
||||
s_database = quote_identifier(dbc, for_grants=table == "*")
|
||||
if dbc == "*":
|
||||
# add revoke for *.*
|
||||
# before the modification query send to mysql will looks like
|
||||
|
@ -2764,11 +2764,13 @@ def __do_query_into_hash(conn, sql_str):
|
|||
|
||||
rtn_results = []
|
||||
|
||||
cursor = None
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
except MySQLdb.MySQLError:
|
||||
log.error("%s: Can't get cursor for SQL->%s", mod, sql_str)
|
||||
cursor.close()
|
||||
if cursor:
|
||||
cursor.close()
|
||||
log.debug("%s-->", mod)
|
||||
return rtn_results
|
||||
|
||||
|
|
|
@ -2015,7 +2015,7 @@ def iphexval(ip):
|
|||
salt '*' network.iphexval 10.0.0.1
|
||||
"""
|
||||
a = ip.split(".")
|
||||
hexval = ["%02X" % int(x) for x in a] # pylint: disable=E1321
|
||||
hexval = ["%02X" % int(x) for x in a]
|
||||
return "".join(hexval)
|
||||
|
||||
|
||||
|
|
|
@ -2,24 +2,12 @@
|
|||
Module for OpenSCAP Management
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import argparse
|
||||
import shlex
|
||||
import shutil
|
||||
import tempfile
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
ArgumentParser = object
|
||||
|
||||
try:
|
||||
import argparse # pylint: disable=minimum-python-version
|
||||
|
||||
ArgumentParser = argparse.ArgumentParser
|
||||
HAS_ARGPARSE = True
|
||||
except ImportError: # python 2.6
|
||||
HAS_ARGPARSE = False
|
||||
|
||||
|
||||
_XCCDF_MAP = {
|
||||
"eval": {
|
||||
"parser_arguments": [(("--profile",), {"required": True})],
|
||||
|
@ -32,15 +20,10 @@ _XCCDF_MAP = {
|
|||
}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return HAS_ARGPARSE, "argparse module is required."
|
||||
|
||||
|
||||
class _ArgumentParser(ArgumentParser):
|
||||
class _ArgumentParser(argparse.ArgumentParser):
|
||||
def __init__(self, action=None, *args, **kwargs):
|
||||
super().__init__(*args, prog="oscap", **kwargs)
|
||||
self.add_argument("action", choices=["eval"])
|
||||
add_arg = None
|
||||
for params, kwparams in _XCCDF_MAP["eval"]["parser_arguments"]:
|
||||
self.add_argument(*params, **kwparams)
|
||||
|
||||
|
|
|
@ -1222,7 +1222,10 @@ def version_cmp(
|
|||
|
||||
salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
|
||||
"""
|
||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
def normalize(x):
|
||||
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
pkg1 = normalize(pkg1)
|
||||
pkg2 = normalize(pkg2)
|
||||
|
||||
|
|
|
@ -99,6 +99,7 @@ def post_data(
|
|||
"Content-Type": "application/json",
|
||||
"Authorization": "GenieKey " + api_key,
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
else:
|
||||
response = requests.post(
|
||||
|
@ -108,6 +109,7 @@ def post_data(
|
|||
"Content-Type": "application/json",
|
||||
"Authorization": "GenieKey " + api_key,
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
return response.status_code, response.text
|
||||
|
|
|
@ -180,9 +180,12 @@ def version(*dbs):
|
|||
salt '*' oracle.version my_db
|
||||
"""
|
||||
pillar_dbs = __salt__["pillar.get"]("oracle:dbs")
|
||||
get_version = lambda x: [
|
||||
r[0] for r in run_query(x, "select banner from v$version order by banner")
|
||||
]
|
||||
|
||||
def get_version(x):
|
||||
return [
|
||||
r[0] for r in run_query(x, "select banner from v$version order by banner")
|
||||
]
|
||||
|
||||
result = {}
|
||||
if dbs:
|
||||
log.debug("get db versions for: %s", dbs)
|
||||
|
|
|
@ -174,6 +174,7 @@ def _query(
|
|||
params=params,
|
||||
data=salt.utils.json.dumps(data),
|
||||
verify=verify_ssl,
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
if result.text is None or result.text == "":
|
||||
|
@ -196,6 +197,7 @@ def _query(
|
|||
params=params,
|
||||
data=data, # Already serialized above, don't do it again
|
||||
verify=verify_ssl,
|
||||
timeout=120,
|
||||
).json()
|
||||
offset = next_page_results["offset"]
|
||||
limit = next_page_results["limit"]
|
||||
|
|
|
@ -26,7 +26,9 @@ def _repack_pkgs(pkgs, normalize=True):
|
|||
if normalize and "pkg.normalize_name" in __salt__:
|
||||
_normalize_name = __salt__["pkg.normalize_name"]
|
||||
else:
|
||||
_normalize_name = lambda pkgname: pkgname
|
||||
|
||||
def _normalize_name(pkgname):
|
||||
return pkgname
|
||||
|
||||
repacked_pkgs = {
|
||||
_normalize_name(str(x)): str(y) if y is not None else y
|
||||
|
@ -71,7 +73,9 @@ def pack_sources(sources, normalize=True):
|
|||
if normalize and "pkg.normalize_name" in __salt__:
|
||||
_normalize_name = __salt__["pkg.normalize_name"]
|
||||
else:
|
||||
_normalize_name = lambda pkgname: pkgname
|
||||
|
||||
def _normalize_name(pkgname):
|
||||
return pkgname
|
||||
|
||||
if isinstance(sources, str):
|
||||
try:
|
||||
|
|
|
@ -88,7 +88,7 @@ def _get_config_file(conf, atom):
|
|||
# parts.repo will be empty if there is no repo part
|
||||
relative_path = parts.repo or "gentoo"
|
||||
elif str(parts.cp).endswith("/*"):
|
||||
relative_path = str(parts.cp).split("/")[0] + "_"
|
||||
relative_path = str(parts.cp).split("/", maxsplit=1)[0] + "_"
|
||||
else:
|
||||
relative_path = os.path.join(
|
||||
*[x for x in os.path.split(parts.cp) if x != "*"]
|
||||
|
|
|
@ -35,10 +35,6 @@ To prevent Postgres commands from running arbitrarily long, a timeout (in second
|
|||
postgres.bins_dir: '/usr/pgsql-9.5/bin/'
|
||||
"""
|
||||
|
||||
# This pylint error is popping up where there are no colons?
|
||||
# pylint: disable=E8203
|
||||
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
|
@ -1007,7 +1003,8 @@ def user_list(
|
|||
return False
|
||||
|
||||
# will return empty string if return_password = False
|
||||
_x = lambda s: s if return_password else ""
|
||||
def _x(s):
|
||||
return s if return_password else ""
|
||||
|
||||
query = "".join(
|
||||
[
|
||||
|
|
|
@ -105,7 +105,7 @@ def set_(device, **kwargs):
|
|||
"file-hard-limit": 0,
|
||||
}
|
||||
|
||||
current = None
|
||||
current = ret = None
|
||||
cmd = "setquota"
|
||||
if "user" in kwargs:
|
||||
cmd += " -u {} ".format(kwargs["user"])
|
||||
|
|
|
@ -165,7 +165,9 @@ def _output_to_dict(cmdoutput, values_mapper=None):
|
|||
|
||||
ret = {}
|
||||
if values_mapper is None:
|
||||
values_mapper = lambda string: string.split("\t")
|
||||
|
||||
def values_mapper(string):
|
||||
return string.split("\t")
|
||||
|
||||
# remove first and last line: Listing ... - ...done
|
||||
data_rows = _strip_listing_to_done(cmdoutput.splitlines())
|
||||
|
@ -237,11 +239,11 @@ def list_users(runas=None):
|
|||
)
|
||||
|
||||
# func to get tags from string such as "[admin, monitoring]"
|
||||
func = (
|
||||
lambda string: [x.strip() for x in string[1:-1].split(",")]
|
||||
if "," in string
|
||||
else [x for x in string[1:-1].split(" ")]
|
||||
)
|
||||
def func(string):
|
||||
if "," in string:
|
||||
return [x.strip() for x in string[1:-1].split(",")]
|
||||
return [x for x in string[1:-1].split(" ")]
|
||||
|
||||
return _output_to_dict(res, func)
|
||||
|
||||
|
||||
|
|
|
@ -710,7 +710,10 @@ def version_cmp(ver1, ver2, ignore_epoch=False):
|
|||
|
||||
salt '*' pkg.version_cmp '0.2-001' '0.2.0.1-002'
|
||||
"""
|
||||
normalize = lambda x: str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
def normalize(x):
|
||||
return str(x).split(":", 1)[-1] if ignore_epoch else str(x)
|
||||
|
||||
ver1 = normalize(ver1)
|
||||
ver2 = normalize(ver2)
|
||||
|
||||
|
|
|
@ -476,7 +476,7 @@ def run_state_tests(state, saltenv=None, check_all=False, only_fails=False):
|
|||
|
||||
# Check for situations to disable parallization
|
||||
if parallel:
|
||||
if type(num_proc) == float:
|
||||
if isinstance(num_proc, float):
|
||||
num_proc = int(num_proc)
|
||||
|
||||
if multiprocessing.cpu_count() < 2:
|
||||
|
|
|
@ -375,7 +375,7 @@ def _validate_filetype(filetype):
|
|||
Checks if the given filetype is a valid SELinux filetype
|
||||
specification. Throws an SaltInvocationError if it isn't.
|
||||
"""
|
||||
if filetype not in _SELINUX_FILETYPES.keys():
|
||||
if filetype not in _SELINUX_FILETYPES:
|
||||
raise SaltInvocationError("Invalid filetype given: {}".format(filetype))
|
||||
return True
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@ def __virtual__():
|
|||
"""
|
||||
if has_sense_hat:
|
||||
try:
|
||||
global _sensehat
|
||||
_sensehat = SenseHat()
|
||||
except OSError:
|
||||
return (
|
||||
|
|
|
@ -89,6 +89,7 @@ def create(name, **params):
|
|||
"https://api.serverdensity.io/inventory/devices/",
|
||||
params={"token": get_sd_auth("api_token")},
|
||||
data=params,
|
||||
timeout=120,
|
||||
)
|
||||
log.debug("Server Density API Response: %s", api_response)
|
||||
log.debug("Server Density API Response content: %s", api_response.content)
|
||||
|
@ -120,6 +121,7 @@ def delete(device_id):
|
|||
api_response = requests.delete(
|
||||
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
||||
params={"token": get_sd_auth("api_token")},
|
||||
timeout=120,
|
||||
)
|
||||
log.debug("Server Density API Response: %s", api_response)
|
||||
log.debug("Server Density API Response content: %s", api_response.content)
|
||||
|
@ -171,6 +173,7 @@ def ls(**params):
|
|||
"token": get_sd_auth("api_token"),
|
||||
"filter": salt.utils.json.dumps(params),
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
log.debug("Server Density API Response: %s", api_response)
|
||||
log.debug("Server Density API Response content: %s", api_response.content)
|
||||
|
@ -209,6 +212,7 @@ def update(device_id, **params):
|
|||
"https://api.serverdensity.io/inventory/devices/" + device_id,
|
||||
params={"token": get_sd_auth("api_token")},
|
||||
data=params,
|
||||
timeout=120,
|
||||
)
|
||||
log.debug("Server Density API Response: %s", api_response)
|
||||
log.debug("Server Density API Response content: %s", api_response.content)
|
||||
|
|
|
@ -37,13 +37,16 @@ def _exit_status(retcode, stderr=None):
|
|||
"""
|
||||
Translate exit status of imgadm
|
||||
"""
|
||||
ret = {
|
||||
0: "Successful completion.",
|
||||
1: "An error occurred." if not stderr else stderr,
|
||||
2: "Usage error.",
|
||||
3: "Image not installed.",
|
||||
}[retcode]
|
||||
return ret
|
||||
if retcode == 0:
|
||||
return "Successful completion."
|
||||
if retcode == 1:
|
||||
if stderr:
|
||||
return stderr
|
||||
return "An error occurred."
|
||||
if retcode == 2:
|
||||
return "Usage error."
|
||||
if retcode == 3:
|
||||
return "Image not installed."
|
||||
|
||||
|
||||
def _parse_image_meta(image=None, detail=False):
|
||||
|
|
|
@ -43,10 +43,12 @@ def _exit_status(retcode):
|
|||
"""
|
||||
Translate exit status of vmadm
|
||||
"""
|
||||
ret = {0: "Successful completion.", 1: "An error occurred.", 2: "Usage error."}[
|
||||
retcode
|
||||
]
|
||||
return ret
|
||||
if retcode == 0:
|
||||
return "Successful completion."
|
||||
if retcode == 1:
|
||||
return "An error occurred."
|
||||
if retcode == 2:
|
||||
return "Usage error."
|
||||
|
||||
|
||||
def _create_update_from_file(mode="create", uuid=None, path=None):
|
||||
|
|
|
@ -114,7 +114,7 @@ def info(name):
|
|||
}
|
||||
|
||||
try:
|
||||
data = pwd.getpwnam(name)
|
||||
data = pwd.getpwnam(name) # pylint: disable=used-before-assignment
|
||||
ret.update({"name": name})
|
||||
except KeyError:
|
||||
return ret
|
||||
|
|
|
@ -162,7 +162,7 @@ def create(name, profile="splunk", **kwargs):
|
|||
_req_url = "{}/servicesNS/{}/search/saved/searches/{}/acl".format(
|
||||
url, config.get("username"), urllib.parse.quote(name)
|
||||
)
|
||||
requests.post(_req_url, auth=auth, verify=True, data=data)
|
||||
requests.post(_req_url, auth=auth, verify=True, data=data, timeout=120)
|
||||
return _get_splunk_search_props(search)
|
||||
|
||||
|
||||
|
|
|
@ -186,10 +186,10 @@ def custom():
|
|||
try:
|
||||
ret[item] = vals[item]
|
||||
except KeyError:
|
||||
log.warning(f"val {item} not in return of {func}")
|
||||
log.warning("val %s not in return of %s", item, func)
|
||||
ret[item] = "UNKNOWN"
|
||||
except KeyError:
|
||||
log.warning(f"custom status {func} isn't loaded")
|
||||
log.warning("custom status %s isn't loaded", func)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -1361,7 +1361,10 @@ def netdev():
|
|||
"""
|
||||
freebsd specific implementation of netdev
|
||||
"""
|
||||
_dict_tree = lambda: collections.defaultdict(_dict_tree)
|
||||
|
||||
def _dict_tree():
|
||||
return collections.defaultdict(_dict_tree)
|
||||
|
||||
ret = _dict_tree()
|
||||
netstat = __salt__["cmd.run"]("netstat -i -n -4 -b -d").splitlines()
|
||||
netstat += __salt__["cmd.run"]("netstat -i -n -6 -b -d").splitlines()[1:]
|
||||
|
|
|
@ -110,7 +110,7 @@ def _http_request(url, method="GET", headers=None, data=None):
|
|||
"""
|
||||
Make the HTTP request and return the body as python object.
|
||||
"""
|
||||
req = requests.request(method, url, headers=headers, data=data)
|
||||
req = requests.request(method, url, headers=headers, data=data, timeout=120)
|
||||
ret = _default_ret()
|
||||
ok_status = METHOD_OK_STATUS.get(method, 200)
|
||||
if req.status_code != ok_status:
|
||||
|
|
|
@ -374,7 +374,7 @@ def node_ls(server=str):
|
|||
try:
|
||||
salt_return = {}
|
||||
client = docker.APIClient(base_url="unix://var/run/docker.sock")
|
||||
service = client.nodes(filters=({"name": server}))
|
||||
service = client.nodes(filters={"name": server})
|
||||
getdata = salt.utils.json.dumps(service)
|
||||
dump = salt.utils.json.loads(getdata)
|
||||
for items in dump:
|
||||
|
|
|
@ -93,14 +93,14 @@ def set_(name, value, **kwargs):
|
|||
# YES, NO, Yes, No, True, False, etc. to boolean types. However, in this case,
|
||||
# we will check to see if that happened and replace it with "YES" or "NO" because
|
||||
# those items are accepted in sysrc.
|
||||
if type(value) == bool:
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
value = "YES"
|
||||
else:
|
||||
value = "NO"
|
||||
|
||||
# This is here for the same reason, except for numbers
|
||||
if type(value) == int:
|
||||
if isinstance(value, int):
|
||||
value = str(value)
|
||||
|
||||
cmd += " " + name + '="' + value + '"'
|
||||
|
|
|
@ -55,9 +55,6 @@ VALID_UNIT_TYPES = (
|
|||
# Define the module's virtual name
|
||||
__virtualname__ = "service"
|
||||
|
||||
# Disable check for string substitution
|
||||
# pylint: disable=E1321
|
||||
|
||||
|
||||
def __virtual__():
|
||||
"""
|
||||
|
|
|
@ -113,7 +113,7 @@ def _post_message(message, chat_id, token):
|
|||
parameters["text"] = message
|
||||
|
||||
try:
|
||||
response = requests.post(url, data=parameters)
|
||||
response = requests.post(url, data=parameters, timeout=120)
|
||||
result = response.json()
|
||||
|
||||
log.debug("Raw response of the telegram request is %s", response)
|
||||
|
|
|
@ -101,7 +101,7 @@ def _retrieve_channel_id(email, profile="telemetry"):
|
|||
_get_telemetry_base(profile)
|
||||
+ "/notification-channels?_type=EmailNotificationChannel"
|
||||
)
|
||||
response = requests.get(get_url, headers=auth)
|
||||
response = requests.get(get_url, headers=auth, timeout=120)
|
||||
|
||||
if response.status_code == 200:
|
||||
cache_result = {}
|
||||
|
@ -140,7 +140,7 @@ def get_alert_config(
|
|||
get_url = _get_telemetry_base(profile) + "/alerts?deployment={}".format(
|
||||
deployment_id
|
||||
)
|
||||
response = requests.get(get_url, headers=auth)
|
||||
response = requests.get(get_url, headers=auth, timeout=120)
|
||||
except requests.exceptions.RequestException as e:
|
||||
log.error(str(e))
|
||||
return False
|
||||
|
@ -197,7 +197,7 @@ def get_notification_channel_id(notify_channel, profile="telemetry"):
|
|||
"email": notify_channel,
|
||||
}
|
||||
response = requests.post(
|
||||
post_url, data=salt.utils.json.dumps(data), headers=auth
|
||||
post_url, data=salt.utils.json.dumps(data), headers=auth, timeout=120
|
||||
)
|
||||
if response.status_code == 200:
|
||||
log.info(
|
||||
|
@ -236,6 +236,7 @@ def get_alarms(deployment_id, profile="telemetry"):
|
|||
_get_telemetry_base(profile)
|
||||
+ "/alerts?deployment={}".format(deployment_id),
|
||||
headers=auth,
|
||||
timeout=120,
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
log.error(str(e))
|
||||
|
@ -293,7 +294,10 @@ def create_alarm(deployment_id, metric_name, data, api_key=None, profile="teleme
|
|||
|
||||
try:
|
||||
response = requests.post(
|
||||
request_uri, data=salt.utils.json.dumps(post_body), headers=auth
|
||||
request_uri,
|
||||
data=salt.utils.json.dumps(post_body),
|
||||
headers=auth,
|
||||
timeout=120,
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
# TODO: May be we should retry?
|
||||
|
@ -364,7 +368,10 @@ def update_alarm(deployment_id, metric_name, data, api_key=None, profile="teleme
|
|||
|
||||
try:
|
||||
response = requests.put(
|
||||
request_uri, data=salt.utils.json.dumps(post_body), headers=auth
|
||||
request_uri,
|
||||
data=salt.utils.json.dumps(post_body),
|
||||
headers=auth,
|
||||
timeout=120,
|
||||
)
|
||||
except requests.exceptions.RequestException as e:
|
||||
log.error("Update failed: %s", e)
|
||||
|
@ -429,7 +436,7 @@ def delete_alarms(
|
|||
delete_url = _get_telemetry_base(profile) + "/alerts/{}".format(id)
|
||||
|
||||
try:
|
||||
response = requests.delete(delete_url, headers=auth)
|
||||
response = requests.delete(delete_url, headers=auth, timeout=120)
|
||||
if metric_name:
|
||||
log.debug(
|
||||
"updating cache and delete %s key from %s",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue