Merge branch 'master' into toml

This commit is contained in:
Christian McHugh 2020-04-25 11:02:49 +01:00 committed by GitHub
commit 6a5a14e4e1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
118 changed files with 6141 additions and 2999 deletions

View file

@ -8,7 +8,7 @@ runTestSuite(
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq',
nox_passthrough_opts: '-n integration.modules.test_pkg',
nox_passthrough_opts: '',
python_version: 'py3',
testrun_timeout: 6,
use_spot_instances: true)

View file

@ -0,0 +1,16 @@
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'centos',
distro_version: '7',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodome',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -7,7 +7,7 @@ runTestSuite(
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_env_name: 'runtests-zeromq-pycryptodome',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
testrun_timeout: 6,

View file

@ -0,0 +1,16 @@
@Library('salt@master-1.7') _
runTestSuite(
concurrent_builds: 1,
distro_name: 'ubuntu',
distro_version: '1604',
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodome',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
testrun_timeout: 6,
use_spot_instances: true)
// vim: ft=groovy

View file

@ -7,7 +7,7 @@ runTestSuite(
env: env,
golden_images_branch: 'master',
jenkins_slave_label: 'kitchen-slave',
nox_env_name: 'runtests-zeromq-pycryptodomex',
nox_env_name: 'runtests-zeromq-pycryptodome',
nox_passthrough_opts: '--ssh-tests',
python_version: 'py3',
testrun_timeout: 6,

View file

@ -505,6 +505,48 @@ repos:
- --py-version=3.9
- --platform=linux
# ----- Invoke ---------------------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-py3.5-invoke-requirements
name: Linux Py3.5 Invoke Requirements
files: ^requirements/static/invoke\.in$
args:
- -v
- --py-version=3.5
- id: pip-tools-compile
alias: compile-py3.6-invoke-requirements
name: Linux Py3.6 Invoke Requirements
files: ^requirements/static/invoke\.in$
args:
- -v
- --py-version=3.6
- id: pip-tools-compile
alias: compile-py3.7-invoke-requirements
name: Linux Py3.7 Invoke Requirements
files: ^requirements/static/invoke\.in$
args:
- -v
- --py-version=3.7
- id: pip-tools-compile
alias: compile-py3.8-invoke-requirements
name: Linux Py3.8 Invoke Requirements
files: ^requirements/static/invoke\.in$
args:
- -v
- --py-version=3.8
- id: pip-tools-compile
alias: compile-py3.9-invoke-requirements
name: Linux Py3.9 Invoke Requirements
files: ^requirements/static/invoke\.in$
args:
- -v
- --py-version=3.9
# <---- Invoke -----------------------------------------------------------------------------------------------------
- repo: https://github.com/timothycrosley/isort
rev: "1e78a9acf3110e1f9721feb591f89a451fc9876a"
hooks:
@ -539,7 +581,7 @@ repos:
- id: nox-py2
alias: lint-salt
name: Lint Salt
files: ^((setup|noxfile)|salt/.*)\.py$
files: ^((setup|noxfile)|(salt|tasks)/.*)\.py$
args:
- -e
- lint-salt-pre-commit
@ -556,3 +598,16 @@ repos:
- -e
- lint-tests-pre-commit
- --
- repo: https://github.com/saltstack/salt-nox-pre-commit
rev: master
hooks:
- id: nox-py2
alias: check-docs
name: Check Docs
files: ^(salt/.*\.py|doc/ref/.*\.rst)$
args:
- -e
- invoke-pre-commit
- --
- docs.check

File diff suppressed because it is too large Load diff

View file

@ -738,3 +738,28 @@ Also, if it is not desirable that *every* minion syncs on startup, the ``*``
can be replaced with a different glob to narrow down the set of minions which
will match that reactor (e.g. ``salt/minion/appsrv*/start``, which would only
match minion IDs beginning with ``appsrv``).
Reactor Tuning for Large-Scale Installations
=================================================
The reactor uses a thread pool implementation that's contained inside
salt.utils.process.ThreadPool and It uses Python's stdlib Queue to enqueue
jobs which are picked up by standard Python threads. If the queue is full,
False is simply returned by the firing method on the thread pool.
As such, there are a few things to say about the selection of proper values
for the reactor.
For situations where it is expected that many long-running jobs might be
executed by the reactor, `reactor_worker_hwm` should be increased or even
set to 0 to bound it only by available memory. If set to zero, a close eye
should be kept on memory consumption.
If many long-running jobs are expected and execution concurrency and
performance are a concern, you may also increase the value for
`reactor_worker_threads`. This will control the number of concurrent threads
which are pulling jobs from the queue and executing them. Obviously, this
bears a relationship to the speed at which the queue itself will fill up.
The price to pay for this value is that each thread will contain a copy of
Salt code needed to perform the requested action.

View file

@ -523,6 +523,16 @@ Module Changes
fixed to remove file system entries other than files, directories
and symbolic links properly.
- The :py:func:`debian_ip <salt.modules.debian_ip>` module used by the
:py:func:`network.managed <salt.states.network.managed>` state has been
heavily refactored. The order that options appear in inet/inet6 blocks may
produce cosmetic changes. Many options without an 'ipvX' prefix will now be
shared between inet and inet6 blocks. The options ``enable_ipv4`` and
``enabled_ipv6`` will now fully remove relevant inet/inet6 blocks. Overriding
options by prefixing them with 'ipvX' will now work with most options (i.e.
``dns`` can be overriden by ``ipv4dns`` or ``ipv6dns``). The ``proto`` option
is now required.
Enhancements to Engines
=======================

View file

@ -489,24 +489,22 @@ def _runtests(session, coverage, cmd_args):
@nox.session(python=_PYTHON_VERSIONS, name="runtests-parametrized")
@nox.parametrize("coverage", [False, True])
@nox.parametrize("transport", ["zeromq", "tcp"])
@nox.parametrize("crypto", [None, "m2crypto", "pycryptodomex"])
@nox.parametrize("crypto", [None, "m2crypto", "pycryptodome"])
def runtests_parametrized(session, coverage, transport, crypto):
# Install requirements
_install_requirements(session, transport, "unittest-xml-reporting==2.5.2")
if crypto:
if crypto == "m2crypto":
session.run(
"pip",
"uninstall",
"-y",
"pycrypto",
"pycryptodome",
"pycryptodomex",
silent=True,
)
else:
session.run("pip", "uninstall", "-y", "m2crypto", silent=True)
session.run(
"pip",
"uninstall",
"-y",
"m2crypto",
"pycrypto",
"pycryptodome",
"pycryptodomex",
silent=True,
)
distro_constraints = _get_distro_pip_constraints(session, transport)
install_command = [
"--progress-bar=off",
@ -601,40 +599,40 @@ def runtests_zeromq_m2crypto(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name="runtests-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="runtests-pycryptodome")
@nox.parametrize("coverage", [False, True])
def runtests_pycryptodomex(session, coverage):
def runtests_pycryptodome(session, coverage):
"""
runtests.py session with zeromq transport and pycryptodomex
runtests.py session with zeromq transport and pycryptodome
"""
session.notify(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='zeromq')".format(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodome', transport='zeromq')".format(
session.python, coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name="runtests-tcp-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="runtests-tcp-pycryptodome")
@nox.parametrize("coverage", [False, True])
def runtests_tcp_pycryptodomex(session, coverage):
def runtests_tcp_pycryptodome(session, coverage):
"""
runtests.py session with TCP transport and pycryptodomex
runtests.py session with TCP transport and pycryptodome
"""
session.notify(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='tcp')".format(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodome', transport='tcp')".format(
session.python, coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name="runtests-zeromq-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="runtests-zeromq-pycryptodome")
@nox.parametrize("coverage", [False, True])
def runtests_zeromq_pycryptodomex(session, coverage):
def runtests_zeromq_pycryptodome(session, coverage):
"""
runtests.py session with zeromq transport and pycryptodomex
runtests.py session with zeromq transport and pycryptodome
"""
session.notify(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='zeromq')".format(
"runtests-parametrized-{}(coverage={}, crypto='pycryptodome', transport='zeromq')".format(
session.python, coverage
)
)
@ -675,24 +673,22 @@ def runtests_tornado(session, coverage):
@nox.session(python=_PYTHON_VERSIONS, name="pytest-parametrized")
@nox.parametrize("coverage", [False, True])
@nox.parametrize("transport", ["zeromq", "tcp"])
@nox.parametrize("crypto", [None, "m2crypto", "pycryptodomex"])
@nox.parametrize("crypto", [None, "m2crypto", "pycryptodome"])
def pytest_parametrized(session, coverage, transport, crypto):
# Install requirements
_install_requirements(session, transport)
if crypto:
if crypto == "m2crypto":
session.run(
"pip",
"uninstall",
"-y",
"pycrypto",
"pycryptodome",
"pycryptodomex",
silent=True,
)
else:
session.run("pip", "uninstall", "-y", "m2crypto", silent=True)
session.run(
"pip",
"uninstall",
"-y",
"m2crypto",
"pycrypto",
"pycryptodome",
"pycryptodomex",
silent=True,
)
distro_constraints = _get_distro_pip_constraints(session, transport)
install_command = [
"--progress-bar=off",
@ -793,40 +789,40 @@ def pytest_zeromq_m2crypto(session, coverage):
)
@nox.session(python=_PYTHON_VERSIONS, name="pytest-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="pytest-pycryptodome")
@nox.parametrize("coverage", [False, True])
def pytest_pycryptodomex(session, coverage):
def pytest_pycryptodome(session, coverage):
"""
pytest session with zeromq transport and pycryptodomex
pytest session with zeromq transport and pycryptodome
"""
session.notify(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='zeromq')".format(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodome', transport='zeromq')".format(
session.python, coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name="pytest-tcp-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="pytest-tcp-pycryptodome")
@nox.parametrize("coverage", [False, True])
def pytest_tcp_pycryptodomex(session, coverage):
def pytest_tcp_pycryptodome(session, coverage):
"""
pytest session with TCP transport and pycryptodomex
pytest session with TCP transport and pycryptodome
"""
session.notify(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='tcp')".format(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodome', transport='tcp')".format(
session.python, coverage
)
)
@nox.session(python=_PYTHON_VERSIONS, name="pytest-zeromq-pycryptodomex")
@nox.session(python=_PYTHON_VERSIONS, name="pytest-zeromq-pycryptodome")
@nox.parametrize("coverage", [False, True])
def pytest_zeromq_pycryptodomex(session, coverage):
def pytest_zeromq_pycryptodome(session, coverage):
"""
pytest session with zeromq transport and pycryptodomex
pytest session with zeromq transport and pycryptodome
"""
session.notify(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodomex', transport='zeromq')".format(
"pytest-parametrized-{}(coverage={}, crypto='pycryptodome', transport='zeromq')".format(
session.python, coverage
)
)
@ -1031,7 +1027,7 @@ def lint_salt(session):
if session.posargs:
paths = session.posargs
else:
paths = ["setup.py", "noxfile.py", "salt/"]
paths = ["setup.py", "noxfile.py", "salt/", "tasks/"]
_lint(session, ".pylintrc", flags, paths)
@ -1133,3 +1129,72 @@ def docs_man(session, compress, update):
if compress:
session.run("tar", "-cJvf", "man-archive.tar.xz", "_build/man", external=True)
os.chdir("..")
def _invoke(session):
"""
Run invoke tasks
"""
requirements_file = "requirements/static/invoke.in"
distro_constraints = [
"requirements/static/{}/invoke.txt".format(_get_pydir(session))
]
install_command = ["--progress-bar=off", "-r", requirements_file]
for distro_constraint in distro_constraints:
install_command.extend(["--constraint", distro_constraint])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
cmd = ["inv"]
files = []
# Unfortunately, invoke doesn't support the nargs functionality like argpase does.
# Let's make it behave properly
for idx, posarg in enumerate(session.posargs):
if idx == 0:
cmd.append(posarg)
continue
if posarg.startswith("--"):
cmd.append(posarg)
continue
files.append(posarg)
if files:
cmd.append("--files={}".format(" ".join(files)))
session.run(*cmd)
@nox.session(name="invoke", python="3")
def invoke(session):
_invoke(session)
@nox.session(name="invoke-pre-commit", python="3")
def invoke_pre_commit(session):
if "VIRTUAL_ENV" not in os.environ:
session.error(
"This should be running from within a virtualenv and "
"'VIRTUAL_ENV' was not found as an environment variable."
)
if "pre-commit" not in os.environ["VIRTUAL_ENV"]:
session.error(
"This should be running from within a pre-commit virtualenv and "
"'VIRTUAL_ENV'({}) does not appear to be a pre-commit virtualenv.".format(
os.environ["VIRTUAL_ENV"]
)
)
from nox.virtualenv import VirtualEnv
# Let's patch nox to make it run inside the pre-commit virtualenv
try:
session._runner.venv = VirtualEnv( # pylint: disable=unexpected-keyword-arg
os.environ["VIRTUAL_ENV"],
interpreter=session._runner.func.python,
reuse_existing=True,
venv=True,
)
except TypeError:
# This is still nox-py2
session._runner.venv = VirtualEnv(
os.environ["VIRTUAL_ENV"],
interpreter=session._runner.func.python,
reuse_existing=True,
)
_invoke(session)

View file

@ -1,13 +1,11 @@
apache-libcloud==2.4.0
backports.ssl_match_hostname==3.7.0.1
backports-abc==0.5; python_version < '3.0'
certifi
cffi==1.12.2
CherryPy==17.4.1
click==7.0
cryptography==2.6.1
enum34==1.1.6
futures==3.2.0 ; python_version < "3.0"
gitpython==2.1.15
idna==2.8
ipaddress==1.0.22
@ -19,7 +17,7 @@ msgpack-python==0.5.6
psutil==5.6.6
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodomex==3.9.7
python-dateutil==2.8.0
python-gnupg==0.4.4
pyyaml==5.1.2

View file

@ -1,6 +1,6 @@
# Need to set a specific version of pyzmq, so can't use the main project's requirements file... have to copy it in and modify...
#-r ../../../requirements/zeromq.txt
-r ../../../requirements/base.txt
pycrypto>=2.6.1
pycryptodomex==3.9.7
pyzmq
-r requirements.txt

View file

@ -74,7 +74,7 @@ BuildRequires: python-ioflo >= 1.1.7
BuildRequires: python-raet >= 0.6.0
%endif
# requirements/zeromq.txt
BuildRequires: python-pycrypto >= 2.6.1
BuildRequires: pycryptodomex >= 3.9.7
BuildRequires: python-pyzmq >= 2.2.0
%if %{with test}
# requirements/dev_python27.txt
@ -121,7 +121,7 @@ Recommends: python-gnupg
# Recommends: salt-raet
# requirements/zeromq.txt
%endif
Requires: python-pycrypto >= 2.6.1
Requires: pycryptodomex >= 3.9.7
Requires: python-pyzmq >= 2.2.0
#
%if 0%{?suse_version}

View file

@ -4,14 +4,12 @@ salt for Windows with their corresponding licenses:
| Module | License |
|-----------|---------|
| backports-abc | --- |
| backports.ssl-match-hostname | PSF |
| certifi | ISC |
| cffi | MIT |
| CherryPy | BSD |
| cryptography | BSD |
| enum34 | BSD |
| futures | BSD |
| gitdb | BSD |
| GitPython | BSD |
| idna | BSD-like |

View file

@ -1,12 +1,10 @@
-r req_win.txt
backports-abc==0.5; python_version < '3.0'
backports.ssl-match-hostname==3.7.0.1; python_version < '3.7'
certifi
cffi==1.12.2
CherryPy==17.4.1
cryptography==2.6.1
enum34==1.1.6; python_version < '3.4'
futures==3.2.0; python_version < "3.0"
idna==2.8
ioloop==0.1a0
ipaddress==1.0.22
@ -19,7 +17,7 @@ msgpack-python==0.5.6
psutil==5.6.6
pyasn1==0.4.5
pycparser==2.19
pycryptodomex==3.8.1
pycryptodomex==3.9.7
pycurl==7.43.0.5
pymssql==2.1.4
PyMySQL==0.9.3

View file

@ -4,7 +4,5 @@ PyYAML
MarkupSafe
requests>=1.0.0
# Requirements for Tornado 4.5.3 (vendored as salt.ext.tornado)
backports-abc==0.5; python_version < '3.0'
singledispatch==3.4.0.3; python_version < '3.4'
# Required by Tornado to handle threads stuff.
futures>=2.0; python_version < '3.0'

View file

@ -1,2 +1 @@
pycrypto>=2.6.1; sys.platform not in 'win32,darwin'
pycryptodomex; sys.platform == 'win32'
pycryptodomex>=3.9.7

View file

@ -1,7 +1,7 @@
mock >= 3.0.0
# PyTest
pytest >=4.6.6,<4.7 # PyTest 4.6.x are the last Py2 and Py3 releases
pytest-salt >= 2019.12.27
pytest-salt >= 2020.1.27
pytest-tempdir >= 2019.10.12
pytest-helpers-namespace >= 2019.1.8
pytest-salt-runtests-bridge >= 2019.7.10

View file

@ -1,2 +1,2 @@
m2crypto
pycryptodomex
pycryptodome

View file

@ -0,0 +1,2 @@
invoke
blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.5/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -81,14 +81,15 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -0,0 +1,9 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/invoke.txt -v requirements/static/invoke.in
#
blessings==1.7
invoke==1.4.1
six==1.14.0 # via blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.5/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.3
pycryptodome==3.9.7

View file

@ -74,8 +74,8 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
pycparser==2.19 # via cffi
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pygit2==0.28.2
pyinotify==0.9.6
pynacl==1.3.0 # via paramiko
@ -84,7 +84,7 @@ pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.5/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -69,7 +69,7 @@ pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.8.1 ; sys_platform == "win32"
pycryptodomex==3.9.7
pycurl==7.43.0.5
pygit2==0.28.2
pymssql==2.1.4
@ -78,7 +78,7 @@ pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.6/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -80,14 +80,15 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -0,0 +1,9 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/invoke.txt -v requirements/static/invoke.in
#
blessings==1.7
invoke==1.4.1
six==1.14.0 # via blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.6/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.3
pycryptodome==3.9.7

View file

@ -73,8 +73,8 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
pycparser==2.19 # via cffi
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pygit2==0.28.2
pyinotify==0.9.6
pynacl==1.3.0 # via paramiko
@ -83,7 +83,7 @@ pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.6/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -68,7 +68,7 @@ pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.8.1 ; sys_platform == "win32"
pycryptodomex==3.9.7
pycurl==7.43.0.5
pygit2==0.28.2
pymssql==2.1.4
@ -77,7 +77,7 @@ pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.7/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -79,14 +79,15 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -0,0 +1,9 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/invoke.txt -v requirements/static/invoke.in
#
blessings==1.7
invoke==1.4.1
six==1.14.0 # via blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.7/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.3
pycryptodome==3.9.7

View file

@ -73,8 +73,8 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
pycparser==2.19 # via cffi
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pygit2==0.28.2
pyinotify==0.9.6
pynacl==1.3.0 # via paramiko
@ -83,7 +83,7 @@ pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.7/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -67,7 +67,7 @@ pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.8.1 ; sys_platform == "win32"
pycryptodomex==3.9.7
pycurl==7.43.0.5
pygit2==0.28.2
pymssql==2.1.4
@ -76,7 +76,7 @@ pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.8/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -78,14 +78,15 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -0,0 +1,9 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.8/invoke.txt -v requirements/static/invoke.in
#
blessings==1.7
invoke==1.4.1
six==1.14.0 # via blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.8/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.3
pycryptodome==3.9.7

View file

@ -72,8 +72,8 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
pycparser==2.19 # via cffi
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pygit2==0.28.2
pyinotify==0.9.6
pynacl==1.3.0 # via paramiko
@ -82,7 +82,7 @@ pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.9/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.0
pycryptodome==3.9.7

View file

@ -78,14 +78,15 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5
pycparser==2.19
pycryptodome==3.8.1
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pynacl==1.3.0 # via paramiko
pyopenssl==19.0.0
pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0

View file

@ -0,0 +1,9 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.9/invoke.txt -v requirements/static/invoke.in
#
blessings==1.7
invoke==1.4.1
six==1.14.0 # via blessings

View file

@ -5,4 +5,4 @@
# pip-compile -o requirements/static/py3.9/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodomex==3.9.3
pycryptodome==3.9.7

View file

@ -72,8 +72,8 @@ pyaml==19.4.1 # via moto
pyasn1-modules==0.2.4 # via google-auth
pyasn1==0.4.5 # via paramiko, pyasn1-modules, rsa
pycparser==2.19 # via cffi
pycrypto==2.6.1 ; sys_platform not in "win32,darwin"
pycryptodome==3.8.1 # via python-jose
pycryptodomex==3.9.7
pygit2==0.28.2
pyinotify==0.9.6
pynacl==1.3.0 # via paramiko
@ -82,7 +82,7 @@ pyparsing==2.4.5 # via packaging
pyserial==3.4 # via junos-eznc
pytest-helpers-namespace==2019.1.8
pytest-salt-runtests-bridge==2019.7.10
pytest-salt==2019.12.27
pytest-salt==2020.1.27
pytest-tempdir==2019.10.12
pytest==4.6.6
python-dateutil==2.8.0 # via botocore, croniter, kubernetes, moto, vcert

View file

@ -6,7 +6,6 @@ A collection of mixins useful for the various *Client interfaces
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals, with_statement
import collections
import copy as pycopy
import fnmatch
import logging
@ -36,6 +35,13 @@ import salt.utils.user
import salt.utils.versions
from salt.ext import six
try:
from collections.abc import Mapping, MutableMapping
except ImportError:
# pylint: disable=no-name-in-module
from collections import Mapping, MutableMapping
log = logging.getLogger(__name__)
CLIENT_INTERNAL_KEYWORDS = frozenset(
@ -58,7 +64,7 @@ CLIENT_INTERNAL_KEYWORDS = frozenset(
)
class ClientFuncsDict(collections.MutableMapping):
class ClientFuncsDict(MutableMapping):
"""
Class to make a read-only dict for accessing runner funcs "directly"
"""
@ -148,7 +154,7 @@ class SyncClientMixin(object):
self.opts, crypt="clear", usage="master_call"
) as channel:
ret = channel.send(load)
if isinstance(ret, collections.Mapping):
if isinstance(ret, Mapping):
if "error" in ret:
salt.utils.error.raise_error(**ret["error"])
return ret

View file

@ -2950,7 +2950,9 @@ def apply_cloud_providers_config(overrides, defaults=None):
# Merge provided extends
keep_looping = False
for alias, entries in six.iteritems(providers.copy()):
for driver, details in six.iteritems(entries):
for driver in list(six.iterkeys(entries)):
# Don't use iteritems, because the values of the dictionary will be changed
details = entries[driver]
if "extends" not in details:
# Extends resolved or non existing, continue!

View file

@ -36,6 +36,13 @@ from salt.ext.tornado.escape import native_str, parse_qs_bytes, utf8
from salt.ext.tornado.log import gen_log
from salt.ext.tornado.util import ObjectDict, PY3
try:
from collections.abc import MutableMapping
except ImportError:
# pylint: disable=no-name-in-module
from collections import MutableMapping
if PY3:
import http.cookies as Cookie
from http.client import responses
@ -104,7 +111,7 @@ class _NormalizedHeaderCache(dict):
_normalized_headers = _NormalizedHeaderCache(1000)
class HTTPHeaders(collections.MutableMapping):
class HTTPHeaders(MutableMapping):
"""A dictionary that maintains ``Http-Header-Case`` for all keys.
Supports multiple values per key via a pair of new methods,

View file

@ -60,29 +60,34 @@ except ImportError:
__proxyenabled__ = ["*"]
__FQDN__ = None
_supported_dists += (
"arch",
"mageia",
"meego",
"vmware",
"bluewhite64",
"slamd64",
"ovs",
"system",
"mint",
"oracle",
"void",
)
# linux_distribution deprecated in py3.7
try:
from platform import linux_distribution as _deprecated_linux_distribution
# Extend the default list of supported distros. This will be used for the
# /etc/DISTRO-release checking that is part of linux_distribution()
from platform import _supported_dists
_supported_dists += (
"arch",
"mageia",
"meego",
"vmware",
"bluewhite64",
"slamd64",
"ovs",
"system",
"mint",
"oracle",
"void",
)
def linux_distribution(**kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
return _deprecated_linux_distribution(**kwargs)
return _deprecated_linux_distribution(
supported_dists=_supported_dists, **kwargs
)
except ImportError:
@ -2067,8 +2072,7 @@ def os_data():
"platform.linux_distribution()"
)
(osname, osrelease, oscodename) = [
x.strip('"').strip("'")
for x in linux_distribution(supported_dists=_supported_dists)
x.strip('"').strip("'") for x in linux_distribution()
]
# Try to assign these three names based on the lsb info, they tend to
# be more accurate than what python gets from /etc/DISTRO-release.
@ -2434,7 +2438,7 @@ def ip_fqdn():
start_time = datetime.datetime.utcnow()
info = socket.getaddrinfo(_fqdn, None, socket_type)
ret[key] = list(set(item[4][0] for item in info))
except socket.error:
except (socket.error, UnicodeError):
timediff = datetime.datetime.utcnow() - start_time
if timediff.seconds > 5 and __opts__["__role"] == "master":
log.warning(

View file

@ -379,7 +379,10 @@ def _get_snapshot_url(
version=version,
headers=headers,
)
if packaging not in snapshot_version_metadata["snapshot_versions"]:
if (
not has_classifier
and packaging not in snapshot_version_metadata["snapshot_versions"]
):
error_message = """Cannot find requested packaging '{packaging}' in the snapshot version metadata.
artifactory_url: {artifactory_url}
repository: {repository}

View file

@ -49,6 +49,7 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
import sys
import time
# Import salt libs
@ -56,6 +57,7 @@ import salt.utils.compat
import salt.utils.odict as odict
import salt.utils.versions
from salt.exceptions import SaltInvocationError
from salt.ext import six
log = logging.getLogger(__name__)
@ -106,6 +108,10 @@ def _get_split_zone(zone, _conn, private_zone):
return False
def _is_retryable_error(exception):
return exception.code != "SignatureDoesNotMatch"
def describe_hosted_zones(
zone_id=None, domain_name=None, region=None, key=None, keyid=None, profile=None
):
@ -295,7 +301,7 @@ def zone_exists(
return bool(conn.get_zone(zone))
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:
@ -307,6 +313,7 @@ def zone_exists(
error_retries -= 1
continue
six.reraise(*sys.exc_info())
return False
def create_zone(
@ -475,7 +482,7 @@ def create_healthcheck(
return {"result": conn.create_health_check(hc_)}
except DNSServerError as exc:
log.debug(exc)
if retry_on_errors:
if retry_on_errors and _is_retryable_error(exc):
if "Throttling" == exc.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == exc.code:
@ -572,6 +579,8 @@ def get_record(
if rate_limit_retries is not None:
error_retries = rate_limit_retries
_record = None
ret = odict.OrderedDict()
while error_retries > 0:
try:
if split_dns:
@ -583,7 +592,6 @@ def get_record(
log.error(msg)
return None
_type = record_type.upper()
ret = odict.OrderedDict()
name = _encode_name(name)
@ -594,7 +602,7 @@ def get_record(
break # the while True
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:
@ -694,7 +702,7 @@ def add_record(
break
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:
@ -717,7 +725,7 @@ def add_record(
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:
@ -729,6 +737,7 @@ def add_record(
error_retries -= 1
continue
six.reraise(*sys.exc_info())
return False
def update_record(
@ -804,7 +813,7 @@ def update_record(
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:
@ -816,6 +825,7 @@ def update_record(
error_retries -= 1
continue
six.reraise(*sys.exc_info())
return False
def delete_record(
@ -891,7 +901,7 @@ def delete_record(
return _wait_for_sync(status.id, conn, wait_for_sync)
except DNSServerError as e:
if retry_on_errors:
if retry_on_errors and _is_retryable_error(e):
if "Throttling" == e.code:
log.debug("Throttled by AWS API.")
elif "PriorRequestNotComplete" == e.code:

View file

@ -16,9 +16,13 @@ import tempfile
# Import salt libs
import salt.utils.data
import salt.utils.platform
# Import 3rd party libs
from requests.structures import CaseInsensitiveDict
from salt.exceptions import (
CommandExecutionError,
CommandNotFoundError,
MinionError,
SaltInvocationError,
)
from salt.utils.data import CaseInsensitiveDict
@ -28,6 +32,8 @@ log = logging.getLogger(__name__)
__func_alias__ = {"list_": "list"}
__virtualname__ = "chocolatey"
def __virtual__():
"""
@ -48,7 +54,7 @@ def __virtual__():
"Windows Vista or later",
)
return "chocolatey"
return __virtualname__
def _clear_context():
@ -149,7 +155,7 @@ def chocolatey_version():
return __context__["chocolatey._version"]
def bootstrap(force=False):
def bootstrap(force=False, source=None):
"""
Download and install the latest version of the Chocolatey package manager
via the official bootstrap.
@ -159,18 +165,47 @@ def bootstrap(force=False):
ensure these prerequisites are met by downloading and executing the
appropriate installers from Microsoft.
Note that if PowerShell is installed, you may have to restart the host
machine for Chocolatey to work.
.. note::
If PowerShell is installed, you may have to restart the host machine for
Chocolatey to work.
force
Run the bootstrap process even if Chocolatey is found in the path.
.. note::
If you're installing offline using the source parameter, the PowerShell
and .NET requirements must already be met on the target. This shouldn't
be a problem on Windows versions 2012/8 and later
Args:
force (bool):
Run the bootstrap process even if Chocolatey is found in the path.
source (str):
The location of the ``.nupkg`` file or ``.ps1`` file to run from an
alternate location. This can be one of the following types of URLs:
- salt://
- http(s)://
- ftp://
- file:// - A local file on the system
.. versionadded:: Sodium
Returns:
str: The stdout of the Chocolatey installation script
CLI Example:
.. code-block:: bash
# To bootstrap Chocolatey
salt '*' chocolatey.bootstrap
salt '*' chocolatey.bootstrap force=True
# To bootstrap Chocolatey offline from a file on the salt master
salt '*' chocolatey.bootstrap source=salt://files/chocolatey.nupkg
# To bootstrap Chocolatey from a file on C:\\Temp
salt '*' chocolatey.bootstrap source=C:\\Temp\\chocolatey.nupkg
"""
# Check if Chocolatey is already present in the path
try:
@ -180,78 +215,140 @@ def bootstrap(force=False):
if choc_path and not force:
return "Chocolatey found at {0}".format(choc_path)
# The following lookup tables are required to determine the correct
# download required to install PowerShell. That's right, there's more
# than one! You're welcome.
ps_downloads = {
(
"Vista",
"x86",
): "http://download.microsoft.com/download/A/7/5/A75BC017-63CE-47D6-8FA4-AFB5C21BAC54/Windows6.0-KB968930-x86.msu",
(
"Vista",
"AMD64",
): "http://download.microsoft.com/download/3/C/8/3C8CF51E-1D9D-4DAA-AAEA-5C48D1CD055C/Windows6.0-KB968930-x64.msu",
(
"2008Server",
"x86",
): "http://download.microsoft.com/download/F/9/E/F9EF6ACB-2BA8-4845-9C10-85FC4A69B207/Windows6.0-KB968930-x86.msu",
(
"2008Server",
"AMD64",
): "http://download.microsoft.com/download/2/8/6/28686477-3242-4E96-9009-30B16BED89AF/Windows6.0-KB968930-x64.msu",
}
# It took until .NET v4.0 for Microsoft got the hang of making installers,
# this should work under any version of Windows
net4_url = "http://download.microsoft.com/download/1/B/E/1BE39E79-7E39-46A3-96FF-047F95396215/dotNetFx40_Full_setup.exe"
temp_dir = tempfile.gettempdir()
# Check if PowerShell is installed. This should be the case for every
# Windows release following Server 2008.
ps_path = "C:\\Windows\\SYSTEM32\\WindowsPowerShell\\v1.0\\powershell.exe"
# Make sure PowerShell is on the System if we're passing source
# Vista and Windows Server 2008 do not have Powershell installed
powershell_info = __salt__["cmd.shell_info"](shell="powershell")
if not powershell_info["installed"]:
# The following lookup tables are required to determine the correct
# download required to install PowerShell. That's right, there's more
# than one! You're welcome.
ps_downloads = {
(
"Vista",
"x86",
): "http://download.microsoft.com/download/A/7/5/A75BC017-63CE-47D6-8FA4-AFB5C21BAC54/Windows6.0-KB968930-x86.msu",
(
"Vista",
"AMD64",
): "http://download.microsoft.com/download/3/C/8/3C8CF51E-1D9D-4DAA-AAEA-5C48D1CD055C/Windows6.0-KB968930-x64.msu",
(
"2008Server",
"x86",
): "http://download.microsoft.com/download/F/9/E/F9EF6ACB-2BA8-4845-9C10-85FC4A69B207/Windows6.0-KB968930-x86.msu",
(
"2008Server",
"AMD64",
): "http://download.microsoft.com/download/2/8/6/28686477-3242-4E96-9009-30B16BED89AF/Windows6.0-KB968930-x64.msu",
}
if not __salt__["cmd.has_exec"](ps_path):
# PowerShell needs to be installed on older systems (Vista, 2008Server)
if (__grains__["osrelease"], __grains__["cpuarch"]) in ps_downloads:
# Install the appropriate release of PowerShell v2.0
url = ps_downloads[(__grains__["osrelease"], __grains__["cpuarch"])]
dest = os.path.join(temp_dir, "powershell.exe")
__salt__["cp.get_url"](url, dest)
dest = os.path.join(temp_dir, os.path.basename(url))
# Download the KB
try:
log.debug("Downloading PowerShell...")
__salt__["cp.get_url"](path=url, dest=dest)
except MinionError:
err = "Failed to download PowerShell KB for {0}" "".format(
__grains__["osrelease"]
)
if source:
raise CommandExecutionError(
"{0}: PowerShell is required to bootstrap Chocolatey "
"with Source".format(err)
)
raise CommandExecutionError(err)
# Install the KB
cmd = [dest, "/quiet", "/norestart"]
log.debug("Installing PowerShell...")
result = __salt__["cmd.run_all"](cmd, python_shell=False)
if result["retcode"] != 0:
err = (
"Installing Windows PowerShell failed. Please run the "
"installer GUI on the host to get a more specific "
"reason."
"Failed to install PowerShell KB. For more information "
"run the installer manually on the host"
)
raise CommandExecutionError(err)
else:
err = "Windows PowerShell not found"
err = "Windows PowerShell Installation not available"
raise CommandNotFoundError(err)
# Run the .NET Framework 4 web installer
dest = os.path.join(temp_dir, "dotnet4.exe")
__salt__["cp.get_url"](net4_url, dest)
cmd = [dest, "/q", "/norestart"]
result = __salt__["cmd.run_all"](cmd, python_shell=False)
if result["retcode"] != 0:
err = (
"Installing .NET v4.0 failed. Please run the installer GUI on "
"the host to get a more specific reason."
)
# Check that .NET v4.0+ is installed
# Windows 7 / Windows Server 2008 R2 and below do not come with at least
# .NET v4.0 installed
if not __utils__["dotnet.version_at_least"](version="4"):
# It took until .NET v4.0 for Microsoft got the hang of making
# installers, this should work under any version of Windows
url = "http://download.microsoft.com/download/1/B/E/1BE39E79-7E39-46A3-96FF-047F95396215/dotNetFx40_Full_setup.exe"
dest = os.path.join(temp_dir, os.path.basename(url))
# Download the .NET Framework 4 web installer
try:
log.debug("Downloading .NET v4.0...")
__salt__["cp.get_url"](path=url, dest=dest)
except MinionError:
err = "Failed to download .NET v4.0 Web Installer"
if source:
err = (
"{0}: .NET v4.0+ is required to bootstrap "
"Chocolatey with Source".format(err)
)
raise CommandExecutionError(err)
# Run the .NET Framework 4 web installer
cmd = [dest, "/q", "/norestart"]
log.debug("Installing .NET v4.0...")
result = __salt__["cmd.run_all"](cmd, python_shell=False)
if result["retcode"] != 0:
err = (
"Failed to install .NET v4.0 failed. For more information "
"run the installer manually on the host"
)
raise CommandExecutionError(err)
# Define target / destination
if source:
url = source
else:
url = "https://chocolatey.org/install.ps1"
dest = os.path.join(temp_dir, os.path.basename(url))
# Download Chocolatey installer
try:
log.debug("Downloading Chocolatey: {0}".format(os.path.basename(url)))
script = __salt__["cp.get_url"](path=url, dest=dest)
log.debug("Script: {0}".format(script))
except MinionError:
err = "Failed to download Chocolatey Installer"
if source:
err = "{0} from source"
raise CommandExecutionError(err)
# Run the Chocolatey bootstrap.
cmd = (
"{0} -NoProfile -ExecutionPolicy unrestricted "
'-Command "iex ((new-object net.webclient).'
"DownloadString('https://chocolatey.org/install.ps1'))\" "
"&& SET PATH=%PATH%;%systemdrive%\\chocolatey\\bin".format(ps_path)
)
result = __salt__["cmd.run_all"](cmd, python_shell=True)
# If this is a nupkg download we need to unzip it first
if os.path.splitext(os.path.basename(dest))[1] == ".nupkg":
log.debug("Unzipping Chocolatey: {0}".format(dest))
__salt__["archive.unzip"](
zip_file=dest,
dest=os.path.join(os.path.dirname(dest), "chocolatey"),
extract_perms=False,
)
script = os.path.join(
os.path.dirname(dest), "chocolatey", "tools", "chocolateyInstall.ps1"
)
if not os.path.exists(script):
raise CommandExecutionError(
"Failed to find Chocolatey installation " "script: {0}".format(script)
)
# Run the Chocolatey bootstrap
log.debug("Installing Chocolatey: {0}".format(script))
result = __salt__["cmd.script"](
script, cwd=os.path.dirname(script), shell="powershell", python_shell=True
)
if result["retcode"] != 0:
err = "Bootstrapping Chocolatey failed: {0}".format(result["stderr"])
raise CommandExecutionError(err)
@ -259,6 +356,69 @@ def bootstrap(force=False):
return result["stdout"]
def unbootstrap():
"""
Uninstall chocolatey from the system by doing the following:
.. versionadded:: Sodium
- Delete the Chocolatey Directory
- Remove Chocolatey from the path
- Remove Chocolatey environment variables
Returns:
list: A list of items that were removed, otherwise an empty list
CLI Example:
.. code-block:: bash
salt * chocolatey.unbootstrap
"""
removed = []
# Delete the Chocolatey directory
choco_dir = os.environ.get("ChocolateyInstall", False)
if choco_dir:
if os.path.exists(choco_dir):
log.debug("Removing Chocolatey directory: {0}".format(choco_dir))
__salt__["file.remove"](path=choco_dir, force=True)
removed.append("Removed Directory: {0}".format(choco_dir))
else:
known_paths = [
os.path.join(os.environ.get("ProgramData"), "Chocolatey"),
os.path.join(os.environ.get("SystemDrive"), "Chocolatey"),
]
for path in known_paths:
if os.path.exists(path):
log.debug("Removing Chocolatey directory: {0}".format(path))
__salt__["file.remove"](path=path, force=True)
removed.append("Removed Directory: {0}".format(path))
# Delete all Chocolatey environment variables
for env_var in __salt__["environ.items"]():
if env_var.lower().startswith("chocolatey"):
log.debug(
"Removing Chocolatey environment variable: {0}" "".format(env_var)
)
__salt__["environ.setval"](
key=env_var, val=False, false_unsets=True, permanent="HKLM"
)
__salt__["environ.setval"](
key=env_var, val=False, false_unsets=True, permanent="HKCU"
)
removed.append("Removed Environment Var: {0}".format(env_var))
# Remove Chocolatey from the path:
for path in __salt__["win_path.get_path"]():
if "chocolatey" in path.lower():
log.debug("Removing Chocolatey path item: {0}" "".format(path))
__salt__["win_path.remove"](path=path, rehash=True)
removed.append("Removed Path Item: {0}".format(path))
return removed
def list_(
narrow=None,
all_versions=False,
@ -963,7 +1123,13 @@ def version(name, check_remote=False, source=None, pre_versions=False):
available = list_(narrow=name, pre_versions=pre_versions, source=source)
for pkg in packages:
packages[pkg] = {"installed": installed[pkg], "available": available[pkg]}
# Grab the current version from the package that was installed
packages[pkg] = {"installed": installed[pkg]}
# If there's a remote package available, then also include that
# in the dictionary that we return.
if pkg in available:
packages[pkg]["available"] = available[pkg]
return packages

View file

@ -463,6 +463,7 @@ IPV4_ATTR_MAP = {
#
"network": __anything, # i don't know what this is
"test": __anything, # TODO
"enable_ipv4": __anything, # TODO
"enable_ipv6": __anything, # TODO
}
@ -503,6 +504,7 @@ IPV6_ATTR_MAP = {
#
"vlan-raw-device": __anything,
"test": __anything, # TODO
"enable_ipv4": __anything, # TODO
"enable_ipv6": __anything, # TODO
}
@ -700,10 +702,13 @@ def _parse_interfaces(interface_files=None):
adapters.pop(iface_name)
continue
for opt in ["ethtool", "bonding", "bridging"]:
if "inet" in adapters[iface_name]["data"]:
if opt in adapters[iface_name]["data"]["inet"]:
opt_keys = sorted(adapters[iface_name]["data"]["inet"][opt].keys())
adapters[iface_name]["data"]["inet"][opt + "_keys"] = opt_keys
for inet in ["inet", "inet6"]:
if inet in adapters[iface_name]["data"]:
if opt in adapters[iface_name]["data"][inet]:
opt_keys = sorted(
adapters[iface_name]["data"][inet][opt].keys()
)
adapters[iface_name]["data"][inet][opt + "_keys"] = opt_keys
return adapters
@ -1254,148 +1259,92 @@ def _parse_settings_eth(opts, iface_type, enabled, iface):
if opts.get("hotplug", False):
adapters[iface]["hotplug"] = True
# Defaults assume IPv4 (inet) interfaces unless enable_ipv6=True
def_addrfam = "inet"
dual_stack = False
if opts.get("enable_ipv6", None) and opts.get("iface_type", "") == "vlan":
iface_data["inet6"]["vlan_raw_device"] = re.sub(r"\.\d*", "", iface)
# If enable_ipv6=True, then expet either IPv6-only or dual stack.
if "enable_ipv6" in opts and opts["enable_ipv6"]:
iface_data["inet6"]["addrfam"] = "inet6"
iface_data["inet6"]["netmask"] = "64" # defaults to 64
def_addrfam = "inet6"
for addrfam in ["inet", "inet6"]:
if iface_type not in ["bridge"]:
tmp_ethtool = _parse_ethtool_opts(opts, iface)
if tmp_ethtool:
ethtool = {}
for item in tmp_ethtool:
ethtool[_ETHTOOL_CONFIG_OPTS[item]] = tmp_ethtool[item]
if "iface_type" in opts and opts["iface_type"] == "vlan":
iface_data["inet6"]["vlan_raw_device"] = re.sub(r"\.\d*", "", iface)
iface_data[addrfam]["ethtool"] = ethtool
# return a list of sorted keys to ensure consistent order
iface_data[addrfam]["ethtool_keys"] = sorted(ethtool)
if "ipaddr" in opts and "ipv6ipaddr" in opts:
# If both 'ipaddr' and 'ipv6ipaddr' are present; expect dual stack
if iface_type == "bridge":
bridging = _parse_bridge_opts(opts, iface)
if bridging:
iface_data[addrfam]["bridging"] = bridging
iface_data[addrfam]["bridging_keys"] = sorted(bridging)
iface_data[addrfam]["addrfam"] = addrfam
elif iface_type == "bond":
bonding = _parse_settings_bond(opts, iface)
if bonding:
iface_data[addrfam]["bonding"] = bonding
iface_data[addrfam]["bonding"]["slaves"] = opts["slaves"]
iface_data[addrfam]["bonding_keys"] = sorted(bonding)
iface_data[addrfam]["addrfam"] = addrfam
elif iface_type == "slave":
adapters[iface]["master"] = opts["master"]
opts["proto"] = "manual"
iface_data[addrfam]["master"] = adapters[iface]["master"]
iface_data[addrfam]["addrfam"] = addrfam
elif iface_type == "vlan":
iface_data[addrfam]["vlan_raw_device"] = re.sub(r"\.\d*", "", iface)
iface_data[addrfam]["addrfam"] = addrfam
elif iface_type == "pppoe":
tmp_ethtool = _parse_ethtool_pppoe_opts(opts, iface)
if tmp_ethtool:
for item in tmp_ethtool:
adapters[iface]["data"][addrfam][
_DEB_CONFIG_PPPOE_OPTS[item]
] = tmp_ethtool[item]
iface_data[addrfam]["addrfam"] = addrfam
opts.pop("mode", None)
for opt, val in opts.items():
inet = None
if opt.startswith("ipv4"):
opt = opt[4:]
inet = "inet"
iface_data["inet"]["addrfam"] = "inet"
def_addrfam = "inet"
dual_stack = True
elif opt.startswith("ipv6"):
iface_data["inet6"]["addrfam"] = "inet6"
opt = opt[4:]
inet = "inet6"
elif opt in [
"ipaddr",
"address",
"ipaddresses",
"addresses",
"gateway",
"proto",
]:
iface_data["inet"]["addrfam"] = "inet"
inet = "inet"
else:
# If enable_ipv6=False|None, IPv6 settings should not be set.
iface_data["inet"]["addrfam"] = "inet"
_opt = SALT_ATTR_TO_DEBIAN_ATTR_MAP.get(opt, opt)
_debopt = _opt.replace("-", "_")
if iface_type not in ["bridge"]:
tmp_ethtool = _parse_ethtool_opts(opts, iface)
if tmp_ethtool:
ethtool = {}
for item in tmp_ethtool:
ethtool[_ETHTOOL_CONFIG_OPTS[item]] = tmp_ethtool[item]
iface_data[def_addrfam]["ethtool"] = ethtool
# return a list of sorted keys to ensure consistent order
iface_data[def_addrfam]["ethtool_keys"] = sorted(ethtool)
if iface_type == "bridge":
bridging = _parse_bridge_opts(opts, iface)
if bridging:
opts.pop("mode", None)
iface_data[def_addrfam]["bridging"] = bridging
iface_data[def_addrfam]["bridging_keys"] = sorted(bridging)
iface_data[def_addrfam]["addrfam"] = def_addrfam
elif iface_type == "bond":
bonding = _parse_settings_bond(opts, iface)
if bonding:
opts.pop("mode", None)
iface_data[def_addrfam]["bonding"] = bonding
iface_data[def_addrfam]["bonding"]["slaves"] = opts["slaves"]
iface_data[def_addrfam]["bonding_keys"] = sorted(bonding)
iface_data[def_addrfam]["addrfam"] = def_addrfam
elif iface_type == "slave":
adapters[iface]["master"] = opts["master"]
opts["proto"] = "manual"
iface_data[def_addrfam]["master"] = adapters[iface]["master"]
iface_data[def_addrfam]["addrfam"] = def_addrfam
elif iface_type == "vlan":
iface_data[def_addrfam]["vlan_raw_device"] = re.sub(r"\.\d*", "", iface)
iface_data[def_addrfam]["addrfam"] = def_addrfam
elif iface_type == "pppoe":
tmp_ethtool = _parse_ethtool_pppoe_opts(opts, iface)
if tmp_ethtool:
for item in tmp_ethtool:
adapters[iface]["data"][def_addrfam][
_DEB_CONFIG_PPPOE_OPTS[item]
] = tmp_ethtool[item]
iface_data[def_addrfam]["addrfam"] = def_addrfam
for opt in opts:
# trim leading "ipv6" from option
if opt.startswith("ipv6"):
optname = opt[4:] # trim off the ipv6
v6only = True
else:
optname = opt
v6only = False
_optname = SALT_ATTR_TO_DEBIAN_ATTR_MAP.get(optname, optname)
if _attrmaps_contain_attr(_optname):
valuestr = opts[opt]
# default to 'static' if proto is 'none'
if optname == "proto" and valuestr == "none":
valuestr = "static"
# If option is v6-only, don't validate against inet and always set value
if v6only:
(valid, value, errmsg) = _validate_interface_option(
_optname, valuestr, addrfam="inet6"
)
if not valid:
_raise_error_iface(
iface, "'{0}' '{1}'".format(opt, valuestr), [errmsg]
)
# replace dashes with underscores for jinja
_optname = _optname.replace("-", "_")
iface_data["inet6"][_optname] = value
# Else, if it's a dual stack, the option may belong in both; apply v4 opt as v6 default
elif dual_stack:
valid_once = False
errmsg = None
for addrfam in ["inet", "inet6"]:
(valid, value, errmsg) = _validate_interface_option(
_optname, valuestr, addrfam=addrfam
)
if valid:
valid_once = True
# replace dashes with underscores for jinja
_optname = _optname.replace("-", "_")
# if a v6-only version of this option was set; don't override
# otherwise, if dual stack, use the v4 version as a default value for v6
# allows overriding with =None
if addrfam == "inet" or _optname not in iface_data["inet6"]:
iface_data[addrfam][_optname] = value
if not valid_once:
_raise_error_iface(
iface, "'{0}' '{1}'".format(opt, valuestr), [errmsg]
)
# Else, it goes in the default(only) addrfam
# Not assuming v4 allows a v6 block to be created without lots of "ipv6" prefixes
else:
(valid, value, errmsg) = _validate_interface_option(
_optname, valuestr, addrfam=def_addrfam
)
if not valid:
_raise_error_iface(
iface, "'{0}' '{1}'".format(opt, valuestr), [errmsg]
)
# replace dashes with underscores for jinja
_optname = _optname.replace("-", "_")
iface_data[def_addrfam][_optname] = value
for addrfam in ["inet", "inet6"]:
(valid, value, errmsg) = _validate_interface_option(
_opt, val, addrfam=addrfam
)
if not valid:
continue
if inet is None and _debopt not in iface_data[addrfam]:
iface_data[addrfam][_debopt] = value
elif inet == addrfam:
iface_data[addrfam][_debopt] = value
for opt in [
"up_cmds",
@ -1406,15 +1355,14 @@ def _parse_settings_eth(opts, iface_type, enabled, iface):
"post_down_cmds",
]:
if opt in opts:
iface_data[def_addrfam][opt] = opts[opt]
iface_data["inet"][opt] = opts[opt]
iface_data["inet6"][opt] = opts[opt]
for addrfam in ["inet", "inet6"]:
if (
"addrfam" in iface_data[addrfam]
and iface_data[addrfam]["addrfam"] == addrfam
):
pass
else:
# Remove incomplete/disabled inet blocks
for (addrfam, opt) in [("inet", "enable_ipv4"), ("inet6", "enable_ipv6")]:
if opts.get(opt, None) is False:
iface_data.pop(addrfam)
elif iface_data[addrfam].get("addrfam", "") != addrfam:
iface_data.pop(addrfam)
return adapters
@ -1733,9 +1681,6 @@ def build_interface(iface, iface_type, enabled, **settings):
if iface_type not in _IFACE_TYPES:
_raise_error_iface(iface, iface_type, _IFACE_TYPES)
if "proto" not in settings:
settings["proto"] = "static"
if iface_type == "slave":
settings["slave"] = "yes"
if "master" not in settings:
@ -1835,8 +1780,7 @@ def down(iface, iface_type):
# Slave devices are controlled by the master.
# Source 'interfaces' aren't brought down.
if iface_type not in ["slave", "source"]:
cmd = ["ip", "link", "set", iface, "down"]
return __salt__["cmd.run"](cmd, python_shell=False)
return __salt__["cmd.run"](["ifdown", iface])
return None
@ -1897,8 +1841,7 @@ def up(iface, iface_type): # pylint: disable=C0103
# Slave devices are controlled by the master.
# Source 'interfaces' aren't brought up.
if iface_type not in ("slave", "source"):
cmd = ["ip", "link", "set", iface, "up"]
return __salt__["cmd.run"](cmd, python_shell=False)
return __salt__["cmd.run"](["ifup", iface])
return None
@ -1929,11 +1872,9 @@ def get_network_settings():
hostname = _parse_hostname()
domainname = _parse_domainname()
searchdomain = _parse_searchdomain()
settings["hostname"] = hostname
settings["domainname"] = domainname
settings["searchdomain"] = searchdomain
else:
settings = _parse_current_network_settings()

View file

@ -79,6 +79,7 @@ def setval(key, val, false_unsets=False, permanent=False):
os.environ.pop(key, None)
if permanent and is_windows:
__utils__["reg.delete_value"](permanent_hive, permanent_key, key)
__utils__["win_functions.broadcast_setting_change"]()
return None
except Exception as exc: # pylint: disable=broad-except
log.error(
@ -95,6 +96,7 @@ def setval(key, val, false_unsets=False, permanent=False):
os.environ[key] = val
if permanent and is_windows:
__utils__["reg.set_value"](permanent_hive, permanent_key, key, val)
__utils__["win_functions.broadcast_setting_change"]()
return os.environ[key]
except Exception as exc: # pylint: disable=broad-except
log.error(

File diff suppressed because it is too large Load diff

View file

@ -45,6 +45,7 @@ from salt.exceptions import CommandExecutionError, SaltInvocationError
# Import 3rd-party libs
from salt.ext import six
from salt.loader import _format_cached_grains
from salt.runners.state import orchestrate as _orchestrate
from salt.utils.odict import OrderedDict
@ -2262,7 +2263,7 @@ def pkg(pkg_path, pkg_sum, hash_type, test=None, **kwargs):
roster_grains_json = os.path.join(root, "roster_grains.json")
if os.path.isfile(roster_grains_json):
with salt.utils.files.fopen(roster_grains_json, "r") as fp_:
roster_grains = salt.utils.json.load(fp_)
roster_grains = _format_cached_grains(salt.utils.json.load(fp_))
if os.path.isfile(roster_grains_json):
popts["grains"] = roster_grains

View file

@ -33,11 +33,6 @@ import stat
import string # do not remove, used in imported file.py functions
import sys # do not remove, used in imported file.py functions
import tempfile # do not remove. Used in salt.modules.file.__clean_tmp
# pylint: disable=no-name-in-module
from collections import Iterable, Mapping # do not remove
# pylint: enable=no-name-in-module
from functools import reduce # do not remove
import salt.utils.atomicfile # do not remove, used in imported file.py functions
@ -64,16 +59,14 @@ from salt.modules.file import (
_get_bkroot,
_get_eol,
_get_flags,
_insert_line_after,
_insert_line_before,
_mkstemp_copy,
_psed,
_regex_to_static,
_sed_esc,
_set_line,
_set_line_eol,
_set_line_indent,
_splitlines_preserving_trailing_newline,
_starts_till,
access,
append,
apply_template_on_contents,
@ -129,7 +122,12 @@ from salt.modules.file import (
)
from salt.utils.functools import namespaced_function as _namespaced_function
# pylint: enable=W0611
# pylint: disable=no-name-in-module
try:
from collections import Iterable, Mapping
except ImportError:
from collections.abc import Iterable, Mapping
# pylint: enable=no-name-in-module
HAS_WINDOWS_MODULES = False
@ -190,8 +188,9 @@ def __virtual__():
global write, pardir, join, _add_flags, apply_template_on_contents
global path_exists_glob, comment, uncomment, _mkstemp_copy
global _regex_to_static, _set_line_indent, dirname, basename
global list_backups_dir, normpath_, _assert_occurrence, _starts_till
global _insert_line_before, _insert_line_after, _set_line_eol, _get_eol
global list_backups_dir, normpath_, _assert_occurrence
global _set_line_eol, _get_eol
global _set_line
replace = _namespaced_function(replace, globals())
search = _namespaced_function(search, globals())
@ -250,11 +249,10 @@ def __virtual__():
uncomment = _namespaced_function(uncomment, globals())
comment_line = _namespaced_function(comment_line, globals())
_regex_to_static = _namespaced_function(_regex_to_static, globals())
_set_line = _namespaced_function(_set_line, globals())
_set_line_indent = _namespaced_function(_set_line_indent, globals())
_set_line_eol = _namespaced_function(_set_line_eol, globals())
_get_eol = _namespaced_function(_get_eol, globals())
_insert_line_after = _namespaced_function(_insert_line_after, globals())
_insert_line_before = _namespaced_function(_insert_line_before, globals())
_mkstemp_copy = _namespaced_function(_mkstemp_copy, globals())
_add_flags = _namespaced_function(_add_flags, globals())
apply_template_on_contents = _namespaced_function(
@ -265,7 +263,6 @@ def __virtual__():
list_backups_dir = _namespaced_function(list_backups_dir, globals())
normpath_ = _namespaced_function(normpath_, globals())
_assert_occurrence = _namespaced_function(_assert_occurrence, globals())
_starts_till = _namespaced_function(_starts_till, globals())
else:
return False, "Module win_file: Missing Win32 modules"

View file

@ -2453,10 +2453,10 @@ def group_list():
return ret
def group_info(name, expand=False):
def group_info(name, expand=False, ignore_groups=None):
"""
.. versionadded:: 2014.1.0
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
.. versionchanged:: Sodium,2016.3.0,2015.8.4,2015.5.10
The return data has changed. A new key ``type`` has been added to
distinguish environment groups from package groups. Also, keys for the
group name and group ID have been added. The ``mandatory packages``,
@ -2477,6 +2477,13 @@ def group_info(name, expand=False):
.. versionadded:: 2016.3.0
ignore_groups : None
This parameter can be used to pass a list of groups to ignore when
expanding subgroups. It is used during recursion in order to prevent
expanding the same group multiple times.
.. versionadded:: Sodium
CLI Example:
.. code-block:: bash
@ -2511,6 +2518,7 @@ def group_info(name, expand=False):
ret["description"] = g_info.get("description", "")
completed_groups = ignore_groups or []
pkgtypes_capturegroup = "(" + "|".join(pkgtypes) + ")"
for pkgtype in pkgtypes:
target_found = False
@ -2530,7 +2538,19 @@ def group_info(name, expand=False):
continue
if target_found:
if expand and ret["type"] == "environment group":
expanded = group_info(line, expand=True)
if not line or line in completed_groups:
continue
log.trace(
'Adding group "%s" to completed list: %s',
line,
completed_groups,
)
completed_groups.append(line)
# Using the @ prefix on the group here in order to prevent multiple matches
# being returned, such as with gnome-desktop
expanded = group_info(
"@" + line, expand=True, ignore_groups=completed_groups
)
# Don't shadow the pkgtype variable from the outer loop
for p_type in pkgtypes:
ret[p_type].update(set(expanded[p_type]))

View file

@ -14,6 +14,20 @@ developers can add new secrets quickly and easily.
This renderer requires the gpg_ binary. No python libraries are required as of
the 2015.8.0 release.
.. _gpg-homedir:
GPG Homedir
-----------
When running gpg commands, it is important to run commands as the user that owns
the keys directory. If salt-master runs as user salt, then ``su salt`` before
running any gpg commands.
To avoid compatibility and upgrade problems and to provide a standardized location
for keys, salt uses ``/etc/salt/gpgkeys``. In order to make the gpg command use
this directory, use ``gpg --homedir /etc/salt/gpgkeys`` with gpg commands or set
the homedir for that user using ``echo 'homedir /etc/salt/gpgkeys' >> ~/.gnupg``.
.. _gpg: https://gnupg.org
Setup
@ -38,19 +52,18 @@ your application. Be sure to back up the ``gpgkeys`` directory someplace safe!
be achieved by installing the ``rng-tools`` package.
Import keys to a master
************************
***********************
If the keys already exist and need to be imported to the salt master, run the
following to import them.
.. code-block:: bash
gpg --homedir /etc/salt/gpgkeys --import /path/to/private.key
gpg --import /path/to/pubkey.gpg
gpg --homedir /etc/salt/gpgkeys --import /path/to/private.key
gpg --homedir /etc/salt/gpgkeys --import /path/to/pubkey.gpg
If the salt master runs as normal user, become this user before importing the
keys. The default target dir will be ``~/.gnupg``. This can be overridden by
the ``--homedir`` option. The keys must be at least readable for the runuser.
Note: The default `GPG Homedir <gpg-homedir>` is ``~/.gnupg`` and needs to be
set using ``--homedir``.
Adjust trust level of imported keys
***********************************
@ -62,8 +75,8 @@ keys.
.. code-block:: bash
gpg --homedir /etc/salt/gpgkeys --list-keys
gpg --list-secret-keys
gpg --homedir /etc/salt/gpgkeys --list-keys
gpg --homedir /etc/salt/gpgkeys --list-secret-keys
If the trust-level is not ``ultimate`` it needs to be changed by running
@ -71,20 +84,19 @@ If the trust-level is not ``ultimate`` it needs to be changed by running
gpg --homedir /etc/salt/gpgkeys --edit-key <key_id>
This will open an interactive shell for the management of the GPG encrypted key. Type
``trust`` to be able to set the trust level for the key and then select
``5 (I trust ultimately)``. Then quit the shell by typing ``save``.
This will open an interactive shell for the management of the GPG encryption key.
Type ``trust`` to be able to set the trust level for the key and then select ``5
(I trust ultimately)``. Then quit the shell by typing ``save``.
Enable usage of GPG keys on the master
**************************************
Different GPG Location
**********************
Generating or importing the keys is not enough to activate the ability to decrypt
the pillars, especially if the keys are generated/imported in a non-standard dir.
In some cases, it's preferable to have gpg keys stored on removeable media or
other non-standard locations. This can be done using the ``gpg_keydir`` option
on the salt master. This will also require using a different path to ``--homedir``,
as mentioned in the `GPG Homedir <gpg-homedir>` section.
To enable the keys on the salt-master, the following needs to be added to the
masters configuration.
.. code-block:: yaml
.. code-block:: bash
gpg_keydir: <path/to/homedir>
@ -310,7 +322,7 @@ def _get_key_dir():
gpg_keydir = __opts__.get(
"gpg_keydir",
os.path.join(
__opts__.get("config_dir", os.path.dirname(__opts__["conf_file"]),),
__opts__.get("config_dir", os.path.dirname(__opts__["conf_file"])),
"gpgkeys",
),
)

View file

@ -225,10 +225,10 @@ def render(input, saltenv="base", sls="", argline="", **kws):
tmplctx = STATE_CONF.copy()
if tmplctx:
prefix = sls + "::"
for k in six.iterkeys(tmplctx): # iterate over a copy of keys
if k.startswith(prefix):
tmplctx[k[len(prefix) :]] = tmplctx[k]
del tmplctx[k]
tmplctx = {
k[len(prefix) :] if k.startswith(prefix) else k: v
for k, v in six.iteritems(tmplctx)
}
else:
tmplctx = {}

View file

@ -1182,7 +1182,7 @@ class State(object):
elif data["fun"] == "symlink":
if "bin" in data["name"]:
self.module_refresh()
elif data["state"] in ("pkg", "ports"):
elif data["state"] in ("pkg", "ports", "pip"):
self.module_refresh()
def verify_data(self, data):

View file

@ -292,11 +292,7 @@ import shutil
import sys
import time
import traceback
# pylint: disable=no-name-in-module
from collections import Iterable, Mapping, defaultdict
# pylint: enable=no-name-in-module
from collections import defaultdict
from datetime import date, datetime # python3 problem in the making?
# Import salt libs
@ -322,12 +318,19 @@ from salt.ext.six.moves.urllib.parse import urlparse as _urlparse
from salt.serializers import DeserializationError
from salt.state import get_accumulator_dir as _get_accumulator_dir
# pylint: disable=no-name-in-module
try:
from collections import Iterable, Mapping
except ImportError:
from collections.abc import Iterable, Mapping
# pylint: enable=no-name-in-module
if salt.utils.platform.is_windows():
import salt.utils.win_dacl
import salt.utils.win_functions
import salt.utils.winapi
if salt.utils.platform.is_windows():
import pywintypes
import win32com.client
@ -1670,7 +1673,7 @@ def symlink(
will be deleted to make room for the symlink, unless
backupname is set, when it will be renamed
.. versionchanged:: 3000
.. versionchanged:: Neon
Force will now remove all types of existing file system entries,
not just files, directories and symlinks.
@ -2253,6 +2256,7 @@ def managed(
follow_symlinks=True,
check_cmd=None,
skip_verify=False,
selinux=None,
win_owner=None,
win_perms=None,
win_deny_perms=None,
@ -2754,6 +2758,22 @@ def managed(
.. versionadded:: 2016.3.0
selinux : None
Allows setting the selinux user, role, type, and range of a managed file
.. code-block:: yaml
/tmp/selinux.test
file.managed:
- user: root
- selinux:
seuser: system_u
serole: object_r
setype: system_conf_t
seranage: s0
.. versionadded:: Neon
win_owner : None
The owner of the directory. If this is not passed, user will be used. If
user is not passed, the account under which Salt is running will be
@ -2834,6 +2854,17 @@ def managed(
if attrs is not None and salt.utils.platform.is_windows():
return _error(ret, "The 'attrs' option is not supported on Windows")
if selinux is not None and not salt.utils.platform.is_linux():
return _error(ret, "The 'selinux' option is only supported on Linux")
if selinux:
seuser = selinux.get("seuser", None)
serole = selinux.get("serole", None)
setype = selinux.get("setype", None)
serange = selinux.get("serange", None)
else:
seuser = serole = setype = serange = None
try:
keep_mode = mode.lower() == "keep"
if keep_mode:
@ -3048,7 +3079,17 @@ def managed(
)
else:
ret, ret_perms = __salt__["file.check_perms"](
name, ret, user, group, mode, attrs, follow_symlinks
name,
ret,
user,
group,
mode,
attrs,
follow_symlinks,
seuser=seuser,
serole=serole,
setype=setype,
serange=serange,
)
if __opts__["test"]:
if (
@ -3095,6 +3136,10 @@ def managed(
contents,
skip_verify,
keep_mode,
seuser=seuser,
serole=serole,
setype=setype,
serange=serange,
**kwargs
)
@ -3205,6 +3250,10 @@ def managed(
win_perms_reset=win_perms_reset,
encoding=encoding,
encoding_errors=encoding_errors,
seuser=seuser,
serole=serole,
setype=setype,
serange=serange,
**kwargs
)
except Exception as exc: # pylint: disable=broad-except
@ -3275,6 +3324,10 @@ def managed(
win_perms_reset=win_perms_reset,
encoding=encoding,
encoding_errors=encoding_errors,
seuser=seuser,
serole=serole,
setype=setype,
serange=serange,
**kwargs
)
except Exception as exc: # pylint: disable=broad-except
@ -4587,35 +4640,57 @@ def line(
file_mode=None,
):
"""
Line-based editing of a file.
Line-focused editing of a file.
.. versionadded:: 2015.8.0
:param name:
.. note::
``file.line`` exists for historic reasons, and is not
generally recommended. It has a lot of quirks. You may find
``file.replace`` to be more suitable.
``file.line`` is most useful if you have single lines in a file,
potentially a config file, that you would like to manage. It can
remove, add, and replace lines.
name
Filesystem path to the file to be edited.
:param content:
content
Content of the line. Allowed to be empty if mode=delete.
:param match:
match
Match the target line for an action by
a fragment of a string or regular expression.
If neither ``before`` nor ``after`` are provided, and ``match``
is also ``None``, match becomes the ``content`` value.
is also ``None``, match falls back to the ``content`` value.
:param mode:
mode
Defines how to edit a line. One of the following options is
required:
- ensure
If line does not exist, it will be added.
If line does not exist, it will be added. If ``before``
and ``after`` are specified either zero lines, or lines
that contain the ``content`` line are allowed to be in between
``before`` and ``after``. If there are lines, and none of
them match then it will produce an error.
- replace
If line already exists, it will be replaced.
- delete
Delete the line, once found.
Delete the line, if found.
- insert
Insert a line.
Nearly identical to ``ensure``. If a line does not exist,
it will be added.
The differences are that multiple (and non-matching) lines are
alloweed between ``before`` and ``after``, if they are
sepcified. The line will always be inserted right before
``before``. ``insert`` also allows the use of ``location`` to
specify that the line should be added at the beginning or end of
the file.
.. note::
@ -4624,28 +4699,33 @@ def line(
``after``. If ``location`` is used, it takes precedence
over the other two options.
:param location:
Defines where to place content in the line. Note this option is only
used when ``mode=insert`` is specified. If a location is passed in, it
takes precedence over both the ``before`` and ``after`` kwargs. Valid
locations are:
location
In ``mode=insert`` only, whether to place the ``content`` at the
beginning or end of a the file. If ``location`` is provided,
``before`` and ``after`` are ignored. Valid locations:
- start
Place the content at the beginning of the file.
- end
Place the content at the end of the file.
:param before:
before
Regular expression or an exact case-sensitive fragment of the string.
This option is only used when either the ``ensure`` or ``insert`` mode
is defined.
Will be tried as **both** a regex **and** a part of the line. Must
match **exactly** one line in the file. This value is only used in
``ensure`` and ``insert`` modes. The ``content`` will be inserted just
before this line, matching its ``indent`` unless ``indent=False``.
:param after:
after
Regular expression or an exact case-sensitive fragment of the string.
This option is only used when either the ``ensure`` or ``insert`` mode
is defined.
Will be tried as **both** a regex **and** a part of the line. Must
match **exactly** one line in the file. This value is only used in
``ensure`` and ``insert`` modes. The ``content`` will be inserted
directly after this line, unless ``before`` is also provided. If
``before`` is not matched, indentation will match this line, unless
``indent=False``.
:param show_changes:
show_changes
Output a unified diff of the old file and the new file.
If ``False`` return a boolean if any changes were made.
Default is ``True``
@ -4654,36 +4734,36 @@ def line(
Using this option will store two copies of the file in-memory
(the original version and the edited version) in order to generate the diff.
:param backup:
backup
Create a backup of the original file with the extension:
"Year-Month-Day-Hour-Minutes-Seconds".
:param quiet:
quiet
Do not raise any exceptions. E.g. ignore the fact that the file that is
tried to be edited does not exist and nothing really happened.
:param indent:
indent
Keep indentation with the previous line. This option is not considered when
the ``delete`` mode is specified.
the ``delete`` mode is specified. Default is ``True``.
:param create:
Create an empty file if doesn't exists.
create
Create an empty file if doesn't exist.
.. versionadded:: 2016.11.0
:param user:
user
The user to own the file, this defaults to the user salt is running as
on the minion.
.. versionadded:: 2016.11.0
:param group:
group
The group ownership set for the file, this defaults to the group salt
is running as on the minion On Windows, this is ignored.
.. versionadded:: 2016.11.0
:param file_mode:
file_mode
The permissions to set on this file, aka 644, 0775, 4664. Not supported
on Windows.
@ -4703,6 +4783,145 @@ def line(
- content: my key = my value
- before: somekey.*?
**Examples:**
Here's a simple config file.
.. code-block:: ini
[some_config]
# Some config file
# this line will go away
here=False
away=True
goodybe=away
And an sls file:
.. code-block:: yaml
remove_lines:
file.line:
- name: /some/file.conf
- mode: delete
- match: away
This will produce:
.. code-block:: ini
[some_config]
# Some config file
here=False
away=True
goodbye=away
If that state is executed 2 more times, this will be the result:
.. code-block:: ini
[some_config]
# Some config file
here=False
Given that original file with this state:
.. code-block:: yaml
replace_things:
file.line:
- name: /some/file.conf
- mode: replace
- match: away
- content: here
Three passes will this state will result in this file:
.. code-block:: ini
[some_config]
# Some config file
here
here=False
here
here
Each pass replacing the first line found.
Given this file:
.. code-block:: text
insert after me
something
insert before me
The following state:
.. code-block:: yaml
insert_a_line:
file.line:
- name: /some/file.txt
- mode: insert
- after: insert after me
- before: insert before me
- content: thrice
If this state is executed 3 times, the result will be:
.. code-block:: text
insert after me
something
thrice
thrice
thrice
insert before me
If the mode is ensure instead, it will fail each time. To succeed, we need
to remove the incorrect line between before and after:
.. code-block:: text
insert after me
insert before me
With an ensure mode, this will insert ``thrice`` the first time and
make no changes for subsequent calls. For someting simple this is
fine, but if you have instead blocks like this:
.. code-block:: text
Begin SomeBlock
foo = bar
End
Begin AnotherBlock
another = value
End
And given this state:
.. code-block:: yaml
ensure_someblock:
file.line:
- name: /some/file.conf
- mode: ensure
- after: Begin SomeBlock
- content: this = should be my content
- before: End
This will fail because there are multiple ``End`` lines. Without that
problem, it still would fail because there is a non-matching line,
``foo = bar``. Ensure **only** allows either zero, or the matching
line present to be present in between ``before`` and ``after``.
"""
name = os.path.expanduser(name)
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
@ -8506,14 +8725,15 @@ def not_cached(name, saltenv="base"):
"""
.. versionadded:: 2017.7.3
Ensures that a file is saved to the minion's cache. This state is primarily
invoked by other states to ensure that we do not re-download a source file
if we do not need to.
Ensures that a file is not present in the minion's cache, deleting it
if found. This state is primarily invoked by other states to ensure
that a fresh copy is fetched.
name
The URL of the file to be cached. To cache a file from an environment
other than ``base``, either use the ``saltenv`` argument or include the
saltenv in the URL (e.g. ``salt://path/to/file.conf?saltenv=dev``).
The URL of the file to be removed from cache. To remove a file from
cache in an environment other than ``base``, either use the ``saltenv``
argument or include the saltenv in the URL (e.g.
``salt://path/to/file.conf?saltenv=dev``).
.. note::
A list of URLs is not supported, this must be a single URL. If a

View file

@ -211,7 +211,7 @@ def options_absent(name, sections=None, separator="="):
return ret
except AttributeError:
cur_section = section
if isinstance(sections[section], (dict, OrderedDict)):
if isinstance(sections[section], list):
for key in sections[section]:
cur_value = cur_section.get(key)
if not cur_value:
@ -248,7 +248,7 @@ def options_absent(name, sections=None, separator="="):
if section not in ret["changes"]:
ret["changes"].update({section: {}})
ret["changes"][section].update({key: current_value})
if not isinstance(sections[section], (dict, OrderedDict)):
if not isinstance(sections[section], list):
ret["changes"].update({section: current_value})
# break
ret["comment"] = "Changes take effect"

View file

@ -1030,6 +1030,27 @@ def installed(
Ensure that the package is installed, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.installed:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
:param str name:
The name of the package to be installed. This parameter is ignored if
either "pkgs" or "sources" is used. Additionally, please note that this
@ -2116,6 +2137,27 @@ def downloaded(
Ensure that the package is downloaded, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to download the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.downloaded:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>`, :mod:`zypper <salt.modules.zypper>` and :mod:`zypper <salt.modules.aptpkg>`
@ -2266,6 +2308,19 @@ def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
Ensure that packages related to certain advisory ids are installed.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the patch(es).
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
@ -2398,6 +2453,27 @@ def latest(
used, as :mod:`latest <salt.states.pkg.latest>` will update the package
whenever a new package is available.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.latest:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
name
The name of the package to maintain at the latest available version.
This parameter is ignored if "pkgs" is used.

View file

@ -347,7 +347,7 @@ def present(
Linux, FreeBSD, NetBSD, OpenBSD, and Solaris. If the ``empty_password``
argument is set to ``True`` then ``password`` is ignored.
For Windows this is the plain text password.
For Linux, the hash can be generated with ``openssl passwd -1``.
For Linux, the hash can be generated with ``mkpasswd -m sha-256``.
.. versionchanged:: 0.16.0
BSD support added.

View file

@ -9,8 +9,8 @@ States for managing zpools
.. versionadded:: 2016.3.0
.. versionchanged:: 2018.3.1
Big refactor to remove duplicate code, better type conversions and improved
consistency in output.
Big refactor to remove duplicate code, better type converions and improved
consistancy in output.
.. code-block:: yaml
@ -89,9 +89,10 @@ def __virtual__():
"""
Provides zpool state
"""
if not __grains__.get("zfs_support"):
if __grains__["zfs_support"]:
return __virtualname__
else:
return False, "The zpool state cannot be loaded: zfs not supported"
return __virtualname__
def _layout_to_vdev(layout, device_dir=None):
@ -398,7 +399,7 @@ def absent(name, export=False, force=False):
name : string
name of storage pool
export : boolean
export instead of destroy the zpool if present
export instread of destroy the zpool if present
force : boolean
force destroy or export

View file

@ -1,135 +1,41 @@
{% if data.enabled %}auto {{name}}{# Enabled #}
{%endif%}{% if data.hotplug %}allow-hotplug {{name}}{# Hotplug #}
{%endif%}{# START IPV4 #}{%- if data.data['inet'] and data.data['inet'].addrfam == 'inet' -%}{% set interface = data.data['inet'] -%}
{# START V4IF #}{% if interface.proto and interface.addrfam %}iface {{name}} {{interface.addrfam}} {{interface.proto}}
{% if interface.hwaddress %} hwaddress {{interface.hwaddress}}
{%endif%}{% if interface.vlan_raw_device %} vlan-raw-device {{interface.vlan_raw_device}}
{%endif%}{% if interface.address %} address {{interface.address}}
{%endif%}{% if interface.addresses %}{%for addr in interface.addresses %} address {{addr}}
{%endfor%}{%endif%}{% if interface.netmask %} netmask {{interface.netmask}}
{%endif%}{% if interface.broadcast %} broadcast {{interface.broadcast}}
{%endif%}{% if interface.metric %} metric {{interface.metric}}
{%endif%}{% if interface.gateway %} gateway {{interface.gateway}}
{%endif%}{% if interface.pointopoint %} pointopoint {{interface.pointopoint}}
{%endif%}{% if interface.scope %} scope {{interface.scope}}
{%endif%}{% if interface.hostname %} hostname {{interface.hostname}}
{%endif%}{% if interface.media %} media {{interface.media}}
{%endif%}{% if interface.leasehours %} leasehours {{interface.leasehours}}
{%endif%}{% if interface.leasetime %} leasetime {{interface.leasetime}}
{%endif%}{% if interface.vendor %} vendor {{interface.vendor}}
{%endif%}{% if interface.client %} client {{interface.client}}
{%endif%}{% if interface.bootfile %} bootfile {{interface.bootfile}}
{%endif%}{% if interface.server %} server {{interface.server}}
{%endif%}{% if interface.hwaddr %} hwaddress {{interface.hwaddr}}
{%endif%}{% if interface.mode %} mode {{interface.mode}}
{%endif%}{% if interface.endpoint %} endpoint {{interface.endpoint}}
{%endif%}{% if interface.dstaddr %} dstaddr {{interface.dstaddr}}
{%endif%}{% if interface.local %} local {{interface.local}}
{%endif%}{% if interface.ttl %} ttl {{interface.ttl}}
{%endif%}{% if interface.mtu %} mtu {{interface.mtu}}
{%endif%}{% if interface.provider %} provider {{interface.provider}}
{%endif%}{% if interface.unit %} unit {{interface.unit}}
{%endif%}{% if interface.options %} options {{interface.options}}
{%endif%}{% if interface.master %} bond-master {{interface.master}}
{%endif%}{% if interface.dns_nameservers %} dns-nameservers {%
if interface.dns_nameservers is string %}{{ interface.dns_nameservers }}{%
else %}{{ interface.dns_nameservers|join(" ") }}{% endif %}
{%endif%}{% if interface.dns_search %} dns-search {%
if interface.dns_search is string %}{{interface.dns_search }}{%
else %}{{ interface.dns_search|join(" ") }}{% endif %}
{%endif%}{% if interface.ethtool %}{%for item in interface.ethtool_keys %} {{item}} {{interface.ethtool[item]}}
{%endfor%}{%endif%}{% if interface.bonding %}{%for item in interface.bonding_keys %} bond-{{item}} {{interface.bonding[item]}}
{%endfor%}{%endif%}{% if interface.bridging %}{%for item in interface.bridging_keys %} bridge_{{item}} {{interface.bridging[item]}}
{%endfor%}{%endif%}{% if interface.wireless_essid %} wireless-essid {{interface.wireless_essid}}
{%endif%}{% if interface.wireless_mode %} wireless-mode {{interface.wireless_mode}}
{%endif%}{% if interface.wpa_ap_scan %} wpa-ap-scan {{interface.wpa_ap_scan}}
{%endif%}{% if interface.wpa_conf %} wpa-conf {{interface.wpa_conf}}
{%endif%}{% if interface.wpa_driver %} wpa-driver {{interface.wpa_driver}}
{%endif%}{% if interface.wpa_group %} wpa-group {{interface.wpa_group}}
{%endif%}{% if interface.wpa_key_mgmt %} wpa-key-mgmt {{interface.wpa_key_mgmt}}
{%endif%}{% if interface.wpa_pairwise %} wpa-pairwise {{interface.wpa_pairwise}}
{%endif%}{% if interface.wpa_proto %} wpa-proto {{interface.wpa_proto}}
{%endif%}{% if interface.wpa_psk %} wpa-psk {{interface.wpa_psk}}
{%endif%}{% if interface.wpa_roam %} wpa-roam {{interface.wpa_roam}}
{%endif%}{% if interface.wpa_ssid %} wpa-ssid {{interface.wpa_ssid}}
{%endif%}{% if interface.up_cmds %}{% for cmd in interface.up_cmds %} up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.down_cmds %}{% for cmd in interface.down_cmds %} down {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.pre_up_cmds %}{% for cmd in interface.pre_up_cmds %} pre-up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.post_up_cmds %}{% for cmd in interface.post_up_cmds %} post-up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.pre_down_cmds %}{% for cmd in interface.pre_down_cmds %} pre-down {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.post_down_cmds %}{% for cmd in interface.post_down_cmds %} post-down {{ cmd }}
{%endfor-%}{%endif%}{%endif%}{# END V4IF #}
{%- endif%}{# END IPV4 -#}
{#- START IPV6 #}{%- if data.data['inet6'] and data.data['inet6'].addrfam == 'inet6' -%}{%- set interface = data.data['inet6'] -%}
{#- START V6IF -#}{% if interface.proto and interface.addrfam %}iface {{name}} {{interface.addrfam}} {{interface.proto}}
{% if interface.hwaddress %} hwaddress {{interface.hwaddress}}
{%endif%}{# START V6ONLOPTS #}{% if interface.accept_ra %} accept_ra {{interface.accept_ra}}
{%endif%}{% if interface.autoconf %} autoconf {{interface.autoconf}}
{%endif%}{% if interface.privext %} privext {{interface.privext}}
{%endif%}{% if interface.dhcp %} dhcp {{interface.dhcp}}{# END V6ONLOPTS #}
{%endif%}{% if interface.vlan_raw_device %} vlan-raw-device {{interface.vlan_raw_device}}
{%endif%}{% if interface.address %} address {{interface.address}}
{%endif%}{% if interface.addresses %}{% for addr in interface.addresses %} address {{addr}}
{%endfor%}{%endif%}{% if interface.netmask %} netmask {{interface.netmask}}
{%endif%}{% if interface.broadcast %} broadcast {{interface.broadcast}}
{%endif%}{% if interface.metric %} metric {{interface.metric}}
{%endif%}{% if interface.gateway %} gateway {{interface.gateway}}
{%endif%}{% if interface.pointopoint %} pointopoint {{interface.pointopoint}}
{%endif%}{% if interface.scope %} scope {{interface.scope}}
{%endif%}{% if interface.hostname %} hostname {{interface.hostname}}
{%endif%}{% if interface.media %} media {{interface.media}}
{%endif%}{% if interface.leasehours %} leasehours {{interface.leasehours}}
{%endif%}{% if interface.leasetime %} leasetime {{interface.leasetime}}
{%endif%}{% if interface.vendor %} vendor {{interface.vendor}}
{%endif%}{% if interface.client %} client {{interface.client}}
{%endif%}{% if interface.bootfile %} bootfile {{interface.bootfile}}
{%endif%}{% if interface.server %} server {{interface.server}}
{%endif%}{% if interface.hwaddr %} hwaddress {{interface.hwaddr}}
{%endif%}{% if interface.mode %} mode {{interface.mode}}
{%endif%}{% if interface.endpoint %} endpoint {{interface.endpoint}}
{%endif%}{% if interface.dstaddr %} dstaddr {{interface.dstaddr}}
{%endif%}{% if interface.local %} local {{interface.local}}
{%endif%}{% if interface.ttl %} ttl {{interface.ttl}}
{%endif%}{% if interface.mtu %} mtu {{interface.mtu}}
{%endif%}{% if interface.provider %} provider {{interface.provider}}
{%endif%}{% if interface.unit %} unit {{interface.unit}}
{%endif%}{% if interface.options %} options {{interface.options}}
{%endif%}{% if interface.master %} bond-master {{interface.master}}
{%endif%}{% if interface.dns_nameservers %} dns-nameservers {%
if interface.dns_nameservers is string %}{{ interface.dns_nameservers }}{%
else %}{{ interface.dns_nameservers|join(" ") }}{% endif %}
{%endif%}{% if interface.dns_search %} dns-search {%
if interface.dns_search is string %}{{interface.dns_search }}{%
else %}{{ interface.dns_search|join(" ") }}{% endif %}
{%endif%}{% if interface.ethtool %}{%for item in interface.ethtool_keys %} {{item}} {{interface.ethtool[item]}}
{%endfor%}{%endif%}{% if interface.bonding %}{%for item in interface.bonding_keys %} bond-{{item}} {{interface.bonding[item]}}
{%endfor%}{%endif%}{% if interface.bridging %}{%for item in interface.bridging_keys %} bridge_{{item}} {{interface.bridging[item]}}
{%endfor%}{%endif%}{% if interface.wireless_essid %} wireless-essid {{interface.wireless_essid}}
{%endif%}{% if interface.wireless_mode %} wireless-mode {{interface.wireless_mode}}
{%endif%}{% if interface.wpa_ap_scan %} wpa-ap-scan {{interface.wpa_ap_scan}}
{%endif%}{% if interface.wpa_conf %} wpa-conf {{interface.wpa_conf}}
{%endif%}{% if interface.wpa_driver %} wpa-driver {{interface.wpa_driver}}
{%endif%}{% if interface.wpa_group %} wpa-group {{interface.wpa_group}}
{%endif%}{% if interface.wpa_key_mgmt %} wpa-key-mgmt {{interface.wpa_key_mgmt}}
{%endif%}{% if interface.wpa_pairwise %} wpa-pairwise {{interface.wpa_pairwise}}
{%endif%}{% if interface.wpa_proto %} wpa-proto {{interface.wpa_proto}}
{%endif%}{% if interface.wpa_psk %} wpa-psk {{interface.wpa_psk}}
{%endif%}{% if interface.wpa_roam %} wpa-roam {{interface.wpa_roam}}
{%endif%}{% if interface.wpa_ssid %} wpa-ssid {{interface.wpa_ssid}}
{%endif%}{% if interface.up_cmds %}{% for cmd in interface.up_cmds %} up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.down_cmds %}{% for cmd in interface.down_cmds %} down {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.pre_up_cmds %}{% for cmd in interface.pre_up_cmds %} pre-up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.post_up_cmds %}{% for cmd in interface.post_up_cmds %} post-up {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.pre_down_cmds %}{% for cmd in interface.pre_down_cmds %} pre-down {{ cmd }}
{%endfor-%}
{%endif%}{% if interface.post_down_cmds %}{% for cmd in interface.post_down_cmds %} post-down {{ cmd }}
{%endfor-%}{%endif%}{%endif%}{# END V6IF #}{%endif%}{# END IPV6#}
{% set optmap = {
'addresses': 'address',
'hwaddr': 'hwaddress',
'up_cmds': 'up',
'down_cmds': 'down',
'pre_up_cmds': 'pre-up',
'post_up_cmds': 'post-up',
'pre_down_cmds': 'pre-down',
'post_down_cmds': 'post-down',
'master': 'bond-master',
} -%}
{% set concat_opts = ['dns_nameservers'] -%}
{% set valid_opts = [
'autoconf', 'privext', 'dhcp', 'hwaddress', 'vlan_raw_device', 'address', 'addresses', 'netmask',
'metric', 'gateway', 'pointopoint', 'scope', 'hostname', 'media', 'leasehours', 'leasetime',
'vendor', 'client', 'bootfile', 'server', 'mode', 'endpoint', 'dstaddr', 'local', 'ttl', 'mtu',
'provider', 'unit', 'options', 'master', 'dns_nameservers', 'wireless_mode', 'wpa_ap_scan',
'wpa_conf', 'wpa_driver', 'wpa_group', 'wpa_key_mgmt', 'wpa_pairwise', 'wpa_proto', 'wpa_psk',
'wpa_roam', 'wpa_ssid',
] -%}
{% if data.enabled %}auto {{ name }}
{% endif %}{% if data.hotplug %}allow-hotplug {{ name }}
{% endif %}{% for inet in ['inet', 'inet6'] -%}
{% if data.data[inet] and data.data[inet].addrfam == inet %}{% set interface = data.data[inet] -%}
{% if interface.proto and interface.addrfam %}iface {{name}} {{interface.addrfam}} {{interface.proto}}
{% for opt in valid_opts %}{% set debopt = optmap.get(opt, opt).replace('_', '-') -%}
{% set val = interface.get(opt, False) %}{% if val -%}
{% if opt in concat_opts and val is iterable %} {{ debopt }} {{ val|join(' ') }}
{% elif val is string or val is number %} {{ debopt }} {{ val }}
{% elif val is iterable %}{% for v in val %} {{ debopt }} {{ v }}
{% endfor %}{% endif %}{% endif -%}
{% endfor %}{% if interface.ethtool %}{% for item in interface.ethtool_keys -%}
{% if interface.ethtool[item] is string %} {{ item }} {{ interface.ethtool[item] }}
{% elif interface.ethtool[item] is iterable %} {{ item }} {{ interface.ethtool[item]|join(' ') }}
{% endif %}{% endfor %}{% endif %}{% if interface.bonding %}{% for item in interface.bonding_keys -%}
{% if interface.bonding[item] is string %} bond-{{ item }} {{ interface.bonding[item] }}
{% elif interface.bonding[item] is iterable %} bond-{{ item }} {{ interface.bonding[item]|join(' ') }}
{% endif %}{% endfor %}{% endif %}{% if interface.bridging %}{% for item in interface.bridging_keys -%}.
{% if interface.bridging[item] is string %} bridge_{{ item }} {{ interface.bridging[item] }}
{% elif interface.bridging[item] is iterable %} bridge_{{ item }} {{ interface.bridging[item]|join(' ') }}
{% endif %}{% endfor %}{% endif %}{% endif %}{% endif %}{% endfor %}

View file

@ -576,7 +576,7 @@ class AsyncTCPPubChannel(
yield self.auth.authenticate()
if self.auth.authenticated:
# if this is changed from the default, we assume it was intentional
if int(self.opts.get("publish_port", 4506)) != 4506:
if int(self.opts.get("publish_port", 4505)) != 4505:
self.publish_port = self.opts.get("publish_port")
# else take the relayed publish_port master reports
else:

View file

@ -7,7 +7,6 @@ Jinja loading utils to enable a more powerful backend for jinja templates
from __future__ import absolute_import, unicode_literals
import atexit
import collections
import logging
import os.path
import pipes
@ -38,6 +37,13 @@ from salt.ext import six
from salt.utils.decorators.jinja import jinja_filter, jinja_global, jinja_test
from salt.utils.odict import OrderedDict
try:
from collections.abc import Hashable
except ImportError:
# pylint: disable=no-name-in-module
from collections import Hashable
log = logging.getLogger(__name__)
__all__ = ["SaltCacheLoader", "SerializerExtension"]
@ -342,7 +348,7 @@ def to_bool(val):
return val.lower() in ("yes", "1", "true")
if isinstance(val, six.integer_types):
return val > 0
if not isinstance(val, collections.Hashable):
if not isinstance(val, Hashable):
return len(val) > 0
return False
@ -507,7 +513,7 @@ def unique(values):
['a', 'b', 'c']
"""
ret = None
if isinstance(values, collections.Hashable):
if isinstance(values, Hashable):
ret = set(values)
else:
ret = []
@ -571,7 +577,7 @@ def lst_avg(lst):
2.5
"""
if not isinstance(lst, collections.Hashable):
if not isinstance(lst, Hashable):
return float(sum(lst) / len(lst))
return float(lst)
@ -592,9 +598,7 @@ def union(lst1, lst2):
[1, 2, 3, 4, 6]
"""
if isinstance(lst1, collections.Hashable) and isinstance(
lst2, collections.Hashable
):
if isinstance(lst1, Hashable) and isinstance(lst2, Hashable):
return set(lst1) | set(lst2)
return unique(lst1 + lst2)
@ -615,9 +619,7 @@ def intersect(lst1, lst2):
[2, 4]
"""
if isinstance(lst1, collections.Hashable) and isinstance(
lst2, collections.Hashable
):
if isinstance(lst1, Hashable) and isinstance(lst2, Hashable):
return set(lst1) & set(lst2)
return unique([ele for ele in lst1 if ele in lst2])
@ -638,9 +640,7 @@ def difference(lst1, lst2):
[1, 3, 6]
"""
if isinstance(lst1, collections.Hashable) and isinstance(
lst2, collections.Hashable
):
if isinstance(lst1, Hashable) and isinstance(lst2, Hashable):
return set(lst1) - set(lst2)
return unique([ele for ele in lst1 if ele not in lst2])
@ -661,9 +661,7 @@ def symmetric_difference(lst1, lst2):
[1, 3]
"""
if isinstance(lst1, collections.Hashable) and isinstance(
lst2, collections.Hashable
):
if isinstance(lst1, Hashable) and isinstance(lst2, Hashable):
return set(lst1) ^ set(lst2)
return unique(
[ele for ele in union(lst1, lst2) if ele not in intersect(lst1, lst2)]

View file

@ -23,7 +23,11 @@ Rob Speer's changes are as follows:
"""
from __future__ import absolute_import, print_function, unicode_literals
import collections
try:
from collections.abc import MutableSet
except ImportError:
# pylint: disable=no-name-in-module
from collections import MutableSet
SLICE_ALL = slice(None)
__version__ = "2.0.1"
@ -49,7 +53,7 @@ def is_iterable(obj):
)
class OrderedSet(collections.MutableSet):
class OrderedSet(MutableSet):
"""
An OrderedSet is a custom MutableSet that remembers its order, so that
every entry has an index that can be looked up.

156
salt/utils/win_dotnet.py Normal file
View file

@ -0,0 +1,156 @@
# -*- coding: utf-8 -*-
"""
Dot NET functions
.. versionadded:: Sodium
"""
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt libs
import salt.utils.platform
import salt.utils.win_reg as win_reg
from salt.utils.versions import LooseVersion
__virtualname__ = "dotnet"
# Although utils are often directly imported, it is also possible to use the
# loader.
def __virtual__():
"""
Only load if platform is Windows
"""
if not salt.utils.platform.is_windows():
return False, "This utility only works on Windows"
return __virtualname__
def versions():
"""
Figure out what versions of .NET are installed on the system
Returns:
dict: A dictionary containing two keys:
- versions: A list of versions installed on the system
- details: A dictionary with details about the versions installed on
the system
"""
hive = "HKLM"
key = "SOFTWARE\\Microsoft\\NET Framework Setup\\NDP"
ver_keys = win_reg.list_keys(hive=hive, key=key)
def dotnet_45_plus_versions(release):
if release >= 528040:
return "4.8"
if release >= 461808:
return "4.7.2"
if release >= 461308:
return "4.7.1"
if release >= 460798:
return "4.7"
if release >= 394802:
return "4.6.2"
if release >= 394254:
return "4.6.1"
if release >= 393295:
return "4.6"
if release >= 379893:
return "4.5.2"
if release >= 378675:
return "4.5.1"
if release >= 378389:
return "4.5"
return_dict = {"versions": [], "details": {}}
for ver_key in ver_keys:
if ver_key.startswith("v"):
if win_reg.value_exists(
hive=hive, key="\\".join([key, ver_key]), vname="Version"
):
# https://docs.microsoft.com/en-us/dotnet/framework/migration-guide/how-to-determine-which-versions-are-installed#find-net-framework-versions-1-4-with-codep
install = win_reg.read_value(
hive=hive, key="\\".join([key, ver_key]), vname="Install"
)["vdata"]
if not install:
continue
version = win_reg.read_value(
hive=hive, key="\\".join([key, ver_key]), vname="Version"
)["vdata"]
sp = win_reg.read_value(
hive=hive, key="\\".join([key, ver_key]), vname="SP"
)["vdata"]
elif win_reg.value_exists(
hive=hive, key="\\".join([key, ver_key, "Full"]), vname="Release"
):
# https://docs.microsoft.com/en-us/dotnet/framework/migration-guide/how-to-determine-which-versions-are-installed#find-net-framework-versions-45-and-later-with-code
install = win_reg.read_value(
hive=hive, key="\\".join([key, ver_key, "Full"]), vname="Install"
)["vdata"]
if not install:
continue
version = dotnet_45_plus_versions(
win_reg.read_value(
hive=hive,
key="\\".join([key, ver_key, "Full"]),
vname="Release",
)["vdata"]
)
sp = "N/A"
else:
continue
service_pack = " SP{0}".format(sp) if sp != "N/A" else ""
return_dict["versions"].append(version)
return_dict["details"][ver_key] = {
"version": version,
"service_pack": sp,
"full": "{0}{1}".format(version, service_pack),
}
return return_dict
def versions_list():
"""
Get a sorted list of .NET versions installed on the system
Returns:
list: A sorted list of versions installed on the system
"""
return sorted(versions()["versions"])
def versions_details():
"""
Get the details for all versions of .NET installed on a system
Returns:
dict: A dictionary of details for each version on the system. Contains
the following keys:
- version: The version installed
- service_pack: The service pack for the version installed
- full: The full version name including the service pack
"""
return versions()["details"]
def version_at_least(version):
"""
Check that the system contains a version of .NET that is at least the
passed version.
Args:
version (str): The version to check for
Returns:
bool: ``True`` if the system contains a version of .NET that is at least
the passed version, otherwise ``False``
"""
for dotnet_version in versions_list():
if LooseVersion(dotnet_version) >= LooseVersion(str(version)):
return True
return False

28
tasks/README.md Normal file
View file

@ -0,0 +1,28 @@
# What is this directory?
This directory contains python scripts which should be called by [invoke](https://pypi.org/project/invoke).
Instead of having several multi-purpose python scripts scatered through multiple paths in the salt code base,
we will now concentrate them under an invoke task.
## Calling Invoke
Invoke can be called in the following ways.
### Installed system-wide
If invoke is installed system-wide, be sure you also have `blessings` installed if you want coloured output, although
it's not a hard requirement.
```
inv docs.check
```
### Using Nox
Since salt already uses nox, and nox manages virtual environments and respective requirements, calling invoke is as
simple as:
```
nox -e invoke -- docs.check
```

9
tasks/__init__.py Normal file
View file

@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-
from invoke import Collection # pylint: disable=3rd-party-module-not-gated
from . import docs, loader
ns = Collection()
ns.add_collection(Collection.from_module(docs, name="docs"), name="docs")
ns.add_collection(Collection.from_module(loader, name="loader"), name="loader")

459
tasks/docs.py Normal file
View file

@ -0,0 +1,459 @@
# -*- coding: utf-8 -*-
"""
tasks.docstrings
~~~~~~~~~~~~~~~~
Check salt code base for for missing or wrong docstrings
"""
import ast
import collections
import os
import pathlib
import re
from invoke import task # pylint: disable=3rd-party-module-not-gated
from tasks import utils
CODE_DIR = pathlib.Path(__file__).resolve().parent.parent
DOCS_DIR = CODE_DIR / "doc"
SALT_CODE_DIR = CODE_DIR / "salt"
os.chdir(str(CODE_DIR))
python_module_to_doc_path = {}
doc_path_to_python_module = {}
check_paths = (
"salt/auth",
"salt/beacons",
"salt/cache",
"salt/cloud",
"salt/engine",
"salt/executors",
"salt/fileserver",
"salt/grains",
"salt/modules",
"salt/netapi",
"salt/output",
"salt/pillar",
"salt/proxy",
"salt/queues",
"salt/renderers",
"salt/returners",
"salt/roster",
"salt/runners",
"salt/sdb",
"salt/serializers",
"salt/states",
"salt/thorium",
"salt/tokens",
"salt/tops",
"salt/wheel",
)
exclude_paths = (
"salt/cloud/cli.py",
"salt/cloud/exceptions.py",
"salt/cloud/libcloudfuncs.py",
)
def build_path_cache():
"""
Build a python module to doc module cache
"""
for path in SALT_CODE_DIR.rglob("*.py"):
path = path.resolve().relative_to(CODE_DIR)
strpath = str(path)
if strpath.endswith("__init__.py"):
continue
if not strpath.startswith(check_paths):
continue
if strpath.startswith(exclude_paths):
continue
parts = list(path.parts)
stub_path = DOCS_DIR / "ref"
# Remove salt from parts
parts.pop(0)
# Remove the package from parts
package = parts.pop(0)
# Remove the module from parts
module = parts.pop()
if package == "cloud":
package = "clouds"
if package == "fileserver":
package = "file_server"
if package == "netapi":
# These are handled differently
if not parts:
# This is rest_wsgi
stub_path = (
stub_path
/ package
/ "all"
/ str(path).replace(".py", ".rst").replace(os.sep, ".")
)
else:
# rest_cherrypy, rest_tornado
subpackage = parts.pop(0)
stub_path = (
stub_path
/ package
/ "all"
/ "salt.netapi.{}.rst".format(subpackage)
)
else:
stub_path = (
stub_path
/ package
/ "all"
/ str(path).replace(".py", ".rst").replace(os.sep, ".")
)
stub_path = stub_path.relative_to(CODE_DIR)
python_module_to_doc_path[path] = stub_path
doc_path_to_python_module[stub_path] = path
build_path_cache()
def build_file_list(files, extension):
# Unfortunately invoke does not support nargs.
# We migth have been passed --files="foo.py bar.py"
# Turn that into a list of paths
_files = []
for path in files:
if not path:
continue
for spath in path.split():
if not spath.endswith(extension):
continue
_files.append(spath)
if not _files:
_files = CODE_DIR.rglob("*{}".format(extension))
else:
_files = [pathlib.Path(fname).resolve() for fname in _files]
_files = [path.relative_to(CODE_DIR) for path in _files]
return _files
def build_python_module_paths(files):
_files = []
for path in build_file_list(files, ".py"):
strpath = str(path)
if strpath.endswith("__init__.py"):
continue
if not strpath.startswith(check_paths):
continue
if strpath.startswith(exclude_paths):
continue
_files.append(path)
return _files
def build_docs_paths(files):
return build_file_list(files, ".rst")
@task(iterable=["files"], positional=["files"])
def check_inline_markup(ctx, files):
"""
Check docstring for :doc: usage
We should not be using the ``:doc:`` inline markup option when
cross-referencing locations. Use ``:ref:`` or ``:mod:`` instead.
This task checks for reference to ``:doc:`` usage.
See Issue #12788 for more information.
https://github.com/saltstack/salt/issues/12788
"""
# CD into Salt's repo root directory
ctx.cd(CODE_DIR)
files = build_python_module_paths(files)
exitcode = 0
for path in files:
module = ast.parse(path.read_text(), filename=str(path))
funcdefs = [node for node in module.body if isinstance(node, ast.FunctionDef)]
for funcdef in funcdefs:
docstring = ast.get_docstring(funcdef, clean=True)
if not docstring:
continue
if ":doc:" in docstring:
utils.error(
"The {} function in {} contains ':doc:' usage", funcdef.name, path
)
exitcode += 1
utils.exit_invoke(exitcode)
@task(iterable=["files"])
def check_stubs(ctx, files):
# CD into Salt's repo root directory
ctx.cd(CODE_DIR)
files = build_python_module_paths(files)
exitcode = 0
for path in files:
strpath = str(path)
if strpath.endswith("__init__.py"):
continue
if not strpath.startswith(check_paths):
continue
if strpath.startswith(exclude_paths):
continue
stub_path = python_module_to_doc_path[path]
if not stub_path.exists():
exitcode += 1
utils.error(
"The module at {} does not have a sphinx stub at {}", path, stub_path
)
utils.exit_invoke(exitcode)
@task(iterable=["files"])
def check_virtual(ctx, files):
"""
Check if .rst files for each module contains the text ".. _virtual"
indicating it is a virtual doc page, and, in case a module exists by
the same name, it's going to be shaddowed and not accessible
"""
exitcode = 0
files = build_docs_paths(files)
for path in files:
if path.name == "index.rst":
continue
contents = path.read_text()
if ".. _virtual-" in contents:
try:
python_module = doc_path_to_python_module[path]
utils.error(
"The doc file at {} indicates that it's virtual, yet, there's a python module "
"at {} that will shaddow it.",
path,
python_module,
)
exitcode += 1
except KeyError:
# This is what we're expecting
continue
utils.exit_invoke(exitcode)
@task(iterable=["files"])
def check_module_indexes(ctx, files):
exitcode = 0
files = build_docs_paths(files)
for path in files:
if path.name != "index.rst":
continue
contents = path.read_text()
if ".. autosummary::" not in contents:
continue
module_index_block = re.search(
r"""
\.\.\s+autosummary::\s*\n
(\s+:[a-z]+:.*\n)*
(\s*\n)+
(?P<mods>(\s*[a-z0-9_\.]+\s*\n)+)
""",
contents,
flags=re.VERBOSE,
)
if not module_index_block:
continue
module_index = re.findall(
r"""\s*([a-z0-9_\.]+)\s*\n""", module_index_block.group("mods")
)
if module_index != sorted(module_index):
exitcode += 1
utils.error(
"The autosummary mods in {} are not properly sorted. Please sort them.",
path,
)
module_index_duplicates = [
mod for mod, count in collections.Counter(module_index).items() if count > 1
]
if module_index_duplicates:
exitcode += 1
utils.error(
"Module index {} contains duplicates: {}", path, module_index_duplicates
)
# Let's check if all python modules are included in the index
path_parts = list(path.parts)
# drop doc
path_parts.pop(0)
# drop ref
path_parts.pop(0)
# drop "index.rst"
path_parts.pop()
# drop "all"
path_parts.pop()
package = path_parts.pop(0)
if package == "clouds":
package = "cloud"
if package == "file_server":
package = "fileserver"
if package == "configuration":
package = "log"
path_parts = ["handlers"]
python_package = SALT_CODE_DIR.joinpath(package, *path_parts).relative_to(
CODE_DIR
)
modules = set()
for module in python_package.rglob("*.py"):
if package == "netapi":
if module.stem == "__init__":
continue
if len(module.parts) > 4:
continue
if len(module.parts) > 3:
modules.add(module.parent.stem)
else:
modules.add(module.stem)
elif package == "cloud":
if len(module.parts) < 4:
continue
if module.name == "__init__.py":
continue
modules.add(module.stem)
elif package == "modules":
if len(module.parts) > 3:
# salt.modules.inspeclib
if module.name == "__init__.py":
modules.add(module.parent.stem)
continue
modules.add("{}.{}".format(module.parent.stem, module.stem))
continue
if module.name == "__init__.py":
continue
modules.add(module.stem)
elif module.name == "__init__.py":
continue
elif module.name != "__init__.py":
modules.add(module.stem)
missing_modules_in_index = set(modules) - set(module_index)
if missing_modules_in_index:
exitcode += 1
utils.error(
"The module index at {} is missing the following modules: {}",
path,
", ".join(missing_modules_in_index),
)
extra_modules_in_index = set(module_index) - set(modules)
if extra_modules_in_index:
exitcode += 1
utils.error(
"The module index at {} has extra modules(non existing): {}",
path,
", ".join(extra_modules_in_index),
)
utils.exit_invoke(exitcode)
@task(iterable=["files"])
def check_stray(ctx, files):
exitcode = 0
exclude_paths = (
DOCS_DIR / "_inc",
DOCS_DIR / "ref" / "cli" / "_includes",
DOCS_DIR / "ref" / "cli",
DOCS_DIR / "ref" / "configuration",
DOCS_DIR / "ref" / "file_server" / "backends.rst",
DOCS_DIR / "ref" / "file_server" / "environments.rst",
DOCS_DIR / "ref" / "file_server" / "file_roots.rst",
DOCS_DIR / "ref" / "internals",
DOCS_DIR / "ref" / "modules" / "all" / "salt.modules.inspectlib.rst",
DOCS_DIR / "ref" / "peer.rst",
DOCS_DIR / "ref" / "publisheracl.rst",
DOCS_DIR / "ref" / "python-api.rst",
DOCS_DIR / "ref" / "states" / "aggregate.rst",
DOCS_DIR / "ref" / "states" / "altering_states.rst",
DOCS_DIR / "ref" / "states" / "backup_mode.rst",
DOCS_DIR / "ref" / "states" / "compiler_ordering.rst",
DOCS_DIR / "ref" / "states" / "extend.rst",
DOCS_DIR / "ref" / "states" / "failhard.rst",
DOCS_DIR / "ref" / "states" / "global_state_arguments.rst",
DOCS_DIR / "ref" / "states" / "highstate.rst",
DOCS_DIR / "ref" / "states" / "include.rst",
DOCS_DIR / "ref" / "states" / "layers.rst",
DOCS_DIR / "ref" / "states" / "master_side.rst",
DOCS_DIR / "ref" / "states" / "ordering.rst",
DOCS_DIR / "ref" / "states" / "parallel.rst",
DOCS_DIR / "ref" / "states" / "providers.rst",
DOCS_DIR / "ref" / "states" / "requisites.rst",
DOCS_DIR / "ref" / "states" / "startup.rst",
DOCS_DIR / "ref" / "states" / "testing.rst",
DOCS_DIR / "ref" / "states" / "top.rst",
DOCS_DIR / "ref" / "states" / "vars.rst",
DOCS_DIR / "ref" / "states" / "writing.rst",
DOCS_DIR / "topics",
)
exclude_paths = tuple([str(p.relative_to(CODE_DIR)) for p in exclude_paths])
files = build_docs_paths(files)
for path in files:
if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))):
continue
if str(path).startswith(exclude_paths):
continue
if path.name in ("index.rst", "glossary.rst", "faq.rst", "README.rst"):
continue
try:
python_module = doc_path_to_python_module[path]
except KeyError:
contents = path.read_text()
if ".. _virtual-" in contents:
continue
exitcode += 1
utils.error(
"The doc at {} doesn't have a corresponding python module an is considered a stray "
"doc. Please remove it.",
path,
)
utils.exit_invoke(exitcode)
@task(iterable=["files"])
def check(ctx, files):
try:
utils.info("Checking inline :doc: markup")
check_inline_markup(ctx, files)
except SystemExit as exc:
if exc.code != 0:
raise
try:
utils.info("Checking python module stubs")
check_stubs(ctx, files)
except SystemExit as exc:
if exc.code != 0:
raise
try:
utils.info("Checking virtual modules")
check_virtual(ctx, files)
except SystemExit as exc:
if exc.code != 0:
raise
try:
utils.info("Checking doc module indexes")
check_module_indexes(ctx, files)
except SystemExit as exc:
if exc.code != 0:
raise
try:
utils.info("Checking stray docs")
check_stray(ctx, files)
except SystemExit as exc:
if exc.code != 0:
raise

141
tasks/loader.py Normal file
View file

@ -0,0 +1,141 @@
# -*- coding: utf-8 -*-
"""
tasks.loader
~~~~~~~~~~~~
Salt loader checks
"""
import ast
import pathlib
from invoke import task # pylint: disable=3rd-party-module-not-gated
from tasks import utils
CODE_DIR = pathlib.Path(__file__).resolve().parent.parent
SALT_CODE_DIR = CODE_DIR / "salt"
@task(iterable=["files"], positional=["files"])
def check_virtual(ctx, files):
"""
Check Salt loader modules for a defined `__virtualname__` attribute and `__virtual__` function.
This is meant to replace:
https://github.com/saltstack/salt/blob/27ae8260983b11fe6e32a18e777d550be9fe1dc2/tests/unit/test_virtualname.py
"""
# CD into Salt's repo root directory
ctx.cd(CODE_DIR)
# Unfortunately invoke does not support nargs.
# We migth have been passed --files="foo.py bar.py"
# Turn that into a list of paths
_files = []
for path in files:
if not path:
continue
_files.extend(path.split())
if not _files:
_files = SALT_CODE_DIR.rglob("*.py")
else:
_files = [pathlib.Path(fname) for fname in _files]
_files = [path.resolve() for path in _files]
salt_loaders = (
CODE_DIR / "salt" / "modules",
CODE_DIR / "salt" / "metaproxy",
CODE_DIR / "salt" / "matchers",
CODE_DIR / "salt" / "engines",
CODE_DIR / "salt" / "proxy",
CODE_DIR / "salt" / "returners",
CODE_DIR / "salt" / "utils",
CODE_DIR / "salt" / "pillar",
CODE_DIR / "salt" / "tops",
CODE_DIR / "salt" / "wheel",
CODE_DIR / "salt" / "output",
CODE_DIR / "salt" / "serializers",
CODE_DIR / "salt" / "tokens",
CODE_DIR / "salt" / "auth",
CODE_DIR / "salt" / "fileserver",
CODE_DIR / "salt" / "roster",
CODE_DIR / "salt" / "thorium",
CODE_DIR / "salt" / "states",
CODE_DIR / "salt" / "beacons",
CODE_DIR / "salt" / "log" / "handlers",
CODE_DIR / "salt" / "client" / "ssh",
CODE_DIR / "salt" / "renderers",
CODE_DIR / "salt" / "grains",
CODE_DIR / "salt" / "runners",
CODE_DIR / "salt" / "queues",
CODE_DIR / "salt" / "sdb",
CODE_DIR / "salt" / "spm" / "pkgdb",
CODE_DIR / "salt" / "spm" / "pkgfiles",
CODE_DIR / "salt" / "cloud" / "clouds",
CODE_DIR / "salt" / "netapi",
CODE_DIR / "salt" / "executors",
CODE_DIR / "salt" / "cache",
)
# This is just internal task checking
for loader in salt_loaders:
if not pathlib.Path(loader).is_dir():
utils.error("The {} path is not a directory", loader)
errors = 0
exitcode = 0
for path in _files:
strpath = str(path)
if strpath.endswith("__init__.py"):
continue
for loader in salt_loaders:
try:
path.relative_to(loader)
break
except ValueError:
# Path doesn't start with the loader path, carry on
continue
module = ast.parse(path.read_text(), filename=strpath)
found_virtual_func = False
for funcdef in [
node for node in module.body if isinstance(node, ast.FunctionDef)
]:
if funcdef.name == "__virtual__":
found_virtual_func = True
break
if not found_virtual_func:
# If the module does not define a __virtual__() function, we don't require a __virtualname__ attribute
continue
found_virtualname_attr = False
for node in module.body:
if isinstance(node, ast.Assign):
if not found_virtualname_attr:
for target in node.targets:
if not isinstance(target, ast.Name):
continue
if target.id == "__virtualname__":
found_virtualname_attr = True
if node.value.s not in path.name:
errors += 1
exitcode = 1
utils.error(
'The value of the __virtualname__ attribute, "{}" is not part of {}',
node.value.s,
path.name,
)
if found_virtualname_attr:
break
if not found_virtualname_attr:
errors += 1
exitcode = 1
utils.error(
"The salt loader module {} defines a __virtual__() function but does not define a "
"__virtualname__ attribute",
path.relative_to(CODE_DIR),
)
if exitcode:
utils.error("Found {} errors", errors)
utils.exit_invoke(exitcode)

61
tasks/utils.py Normal file
View file

@ -0,0 +1,61 @@
# -*- coding: utf-8 -*-
"""
tasks.utils
~~~~~~~~~~~
Invoke utilities
"""
import sys
try:
from blessings import Terminal
terminal = Terminal()
HAS_BLESSINGS = True
except ImportError:
terminal = None
HAS_BLESSINGS = False
def exit_invoke(exitcode, message=None, *args, **kwargs):
if message is not None:
if exitcode > 0:
warn(message, *args, **kwargs)
else:
info(message, *args, **kwargs)
sys.exit(exitcode)
def info(message, *args, **kwargs):
if not isinstance(message, str):
message = str(message)
message = message.format(*args, **kwargs)
if terminal:
message = terminal.bold(terminal.green(message))
write_message(message)
def warn(message, *args, **kwargs):
if not isinstance(message, str):
message = str(message)
message = message.format(*args, **kwargs)
if terminal:
message = terminal.bold(terminal.yellow(message))
write_message(message)
def error(message, *args, **kwargs):
if not isinstance(message, str):
message = str(message)
message = message.format(*args, **kwargs)
if terminal:
message = terminal.bold(terminal.red(message))
write_message(message)
def write_message(message):
sys.stderr.write(message)
if not message.endswith("\n"):
sys.stderr.write("\n")
sys.stderr.flush()

View file

@ -225,12 +225,13 @@ salt/utils/docker/*:
- unit.utils.test_docker
salt/utils/schedule.py:
- integration.scheduler.test_error
- integration.scheduler.test_eval
- integration.scheduler.test_postpone
- integration.scheduler.test_skip
- integration.scheduler.test_maxrunning
- integration.scheduler.test_helpers
- unit.utils.scheduler.test_error
- unit.utils.scheduler.test_eval
- unit.utils.scheduler.test_postpone
- unit.utils.scheduler.test_skip
- unit.utils.scheduler.test_maxrunning
- unit.utils.scheduler.test_helpers
- unit.utils.scheduler.test_schedule
salt/utils/vt.py:
- integration.cli.test_custom_module

View file

@ -159,7 +159,7 @@ class CloudTest(ShellCase):
# Use the first three letters of the subclass, fill with '-' if too short
self._instance_name = random_string(
"cloud-test-{:-<3}-".format(subclass[:3]), uppercase=False
)
).lower()
return self._instance_name
@property

View file

@ -184,7 +184,6 @@ class FileModuleTest(ModuleCase):
ret = self.run_function("file.chgrp", arg=[self.myfile, group])
self.assertIn("not exist", ret)
@skipIf(True, "SLOWTEST skip")
def test_patch(self):
if not self.run_function("cmd.has_exec", ["patch"]):
self.skipTest("patch is not installed")
@ -207,53 +206,44 @@ class FileModuleTest(ModuleCase):
salt.utils.stringutils.to_unicode(fp.read()), "Hello world\n"
)
@skipIf(True, "SLOWTEST skip")
def test_remove_file(self):
ret = self.run_function("file.remove", arg=[self.myfile])
self.assertTrue(ret)
@skipIf(True, "SLOWTEST skip")
def test_remove_dir(self):
ret = self.run_function("file.remove", arg=[self.mydir])
self.assertTrue(ret)
@skipIf(True, "SLOWTEST skip")
def test_remove_symlink(self):
ret = self.run_function("file.remove", arg=[self.mysymlink])
self.assertTrue(ret)
@skipIf(True, "SLOWTEST skip")
def test_remove_broken_symlink(self):
ret = self.run_function("file.remove", arg=[self.mybadsymlink])
self.assertTrue(ret)
@skipIf(True, "SLOWTEST skip")
def test_cannot_remove(self):
ret = self.run_function("file.remove", arg=["tty"])
self.assertEqual(
"ERROR executing 'file.remove': File path must be absolute: tty", ret
)
@skipIf(True, "SLOWTEST skip")
def test_source_list_for_single_file_returns_unchanged(self):
ret = self.run_function(
"file.source_list", ["salt://http/httpd.conf", "filehash", "base"]
)
self.assertEqual(list(ret), ["salt://http/httpd.conf", "filehash"])
@skipIf(True, "SLOWTEST skip")
def test_source_list_for_single_local_file_slash_returns_unchanged(self):
ret = self.run_function("file.source_list", [self.myfile, "filehash", "base"])
self.assertEqual(list(ret), [self.myfile, "filehash"])
@skipIf(True, "SLOWTEST skip")
def test_source_list_for_single_local_file_proto_returns_unchanged(self):
ret = self.run_function(
"file.source_list", ["file://" + self.myfile, "filehash", "base"]
)
self.assertEqual(list(ret), ["file://" + self.myfile, "filehash"])
@skipIf(True, "SLOWTEST skip")
def test_file_line_changes_format(self):
"""
Test file.line changes output formatting.
@ -265,7 +255,33 @@ class FileModuleTest(ModuleCase):
)
self.assertIn("Hello" + os.linesep + "+Goodbye", ret)
@skipIf(True, "SLOWTEST skip")
def test_file_line_changes_entire_line(self):
"""
Test file.line entire line matching
Issue #49855
"""
ret = self.minion_run(
"file.line", self.myfile, "Goodbye", mode="insert", after="Hello"
)
assert "Hello" + os.linesep + "+Goodbye" in ret
ret = self.minion_run(
"file.line", self.myfile, "Goodbye 1", mode="insert", after="Hello"
)
assert (
"Hello" + os.linesep + "+Goodbye 1" + os.linesep + " Goodbye" + os.linesep
in ret
)
with salt.utils.files.fopen(self.myfile, "r") as fh_:
content = fh_.read()
assert (
"Hello" + os.linesep + "Goodbye 1" + os.linesep + "Goodbye" + os.linesep
== content
)
def test_file_line_content(self):
self.minion_run(
"file.line", self.myfile, "Goodbye", mode="insert", after="Hello"
@ -274,7 +290,6 @@ class FileModuleTest(ModuleCase):
content = fp.read()
self.assertEqual(content, "Hello" + os.linesep + "Goodbye" + os.linesep)
@skipIf(True, "SLOWTEST skip")
def test_file_line_duplicate_insert_after(self):
"""
Test file.line duplicates line.
@ -290,7 +305,6 @@ class FileModuleTest(ModuleCase):
content = fp.read()
self.assertEqual(content, "Hello" + os.linesep + "Goodbye" + os.linesep)
@skipIf(True, "SLOWTEST skip")
def test_file_line_duplicate_insert_before(self):
"""
Test file.line duplicates line.
@ -306,7 +320,6 @@ class FileModuleTest(ModuleCase):
content = fp.read()
self.assertEqual(content, "Hello" + os.linesep + "Goodbye" + os.linesep)
@skipIf(True, "SLOWTEST skip")
def test_file_line_duplicate_ensure_after(self):
"""
Test file.line duplicates line.
@ -322,7 +335,6 @@ class FileModuleTest(ModuleCase):
content = fp.read()
self.assertEqual(content, "Hello" + os.linesep + "Goodbye" + os.linesep)
@skipIf(True, "SLOWTEST skip")
def test_file_line_duplicate_ensure_before(self):
"""
Test file.line duplicates line.

View file

@ -1,64 +0,0 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import copy
import logging
import os
import pytest
import salt.utils.platform
import salt.utils.schedule
from salt.modules.test import ping
from tests.support.case import ModuleCase
from tests.support.mixins import SaltReturnAssertsMixin
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
log = logging.getLogger(__name__)
ROOT_DIR = os.path.join(RUNTIME_VARS.TMP, "schedule-unit-tests")
SOCK_DIR = os.path.join(ROOT_DIR, "test-socks")
DEFAULT_CONFIG = salt.config.minion_config(None)
DEFAULT_CONFIG["conf_dir"] = ROOT_DIR
DEFAULT_CONFIG["root_dir"] = ROOT_DIR
DEFAULT_CONFIG["sock_dir"] = SOCK_DIR
DEFAULT_CONFIG["pki_dir"] = os.path.join(ROOT_DIR, "pki")
DEFAULT_CONFIG["cachedir"] = os.path.join(ROOT_DIR, "cache")
@pytest.mark.windows_whitelisted
class SchedulerHelpersTest(ModuleCase, SaltReturnAssertsMixin):
"""
Test scheduler helper functions
"""
def setUp(self):
with patch("salt.utils.schedule.clean_proc_dir", MagicMock(return_value=None)):
functions = {"test.ping": ping}
self.schedule = salt.utils.schedule.Schedule(
copy.deepcopy(DEFAULT_CONFIG), functions, returners={}
)
self.schedule.opts["loop_interval"] = 1
def tearDown(self):
self.schedule.reset()
def test_get_schedule(self):
"""
verify that the _get_schedule function works
when remove_hidden is True and schedule data
contains enabled key
"""
job_name = "test_get_schedule"
job = {
"schedule": {
"enabled": True,
job_name: {"function": "test.ping", "seconds": 60},
}
}
# Add the job to the scheduler
self.schedule.opts.update(job)
ret = self.schedule._get_schedule(remove_hidden=True)
self.assertEqual(job["schedule"], ret)

View file

@ -1,70 +0,0 @@
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import copy
import logging
import os
import salt.utils.platform
# Import Salt libs
import salt.utils.schedule
from salt.modules.test import ping
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.mixins import SaltReturnAssertsMixin
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
try:
import croniter # pylint: disable=W0611
HAS_CRONITER = True
except ImportError:
HAS_CRONITER = False
log = logging.getLogger(__name__)
ROOT_DIR = os.path.join(RUNTIME_VARS.TMP, "schedule-unit-tests")
SOCK_DIR = os.path.join(ROOT_DIR, "test-socks")
DEFAULT_CONFIG = salt.config.minion_config(None)
DEFAULT_CONFIG["conf_dir"] = ROOT_DIR
DEFAULT_CONFIG["root_dir"] = ROOT_DIR
DEFAULT_CONFIG["sock_dir"] = SOCK_DIR
DEFAULT_CONFIG["pki_dir"] = os.path.join(ROOT_DIR, "pki")
DEFAULT_CONFIG["cachedir"] = os.path.join(ROOT_DIR, "cache")
class SchedulerRunJobTest(ModuleCase, SaltReturnAssertsMixin):
"""
Validate the pkg module
"""
def setUp(self):
with patch("salt.utils.schedule.clean_proc_dir", MagicMock(return_value=None)):
functions = {"test.ping": ping}
self.schedule = salt.utils.schedule.Schedule(
copy.deepcopy(DEFAULT_CONFIG), functions, returners={}
)
self.schedule.opts["loop_interval"] = 1
def tearDown(self):
self.schedule.reset()
def test_run_job(self):
"""
verify that scheduled job runs
"""
job_name = "test_run_job"
job = {"schedule": {job_name: {"function": "test.ping"}}}
# Add the job to the scheduler
self.schedule.opts.update(job)
# Run job
self.schedule.run_job(job_name)
ret = self.schedule.job_status(job_name)
expected = {"function": "test.ping", "run": True, "name": "test_run_job"}
self.assertEqual(ret, expected)

View file

@ -137,7 +137,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
if user in str(self.run_function("user.list_users")):
self.run_function("user.delete", [user])
@skipIf(True, "SLOWTEST skip")
def test_symlink(self):
"""
file.symlink
@ -156,7 +155,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
ret = self.run_state("file.symlink", name=name, target=tgt)
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_test_symlink(self):
"""
file.symlink test interface
@ -166,7 +164,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
ret = self.run_state("file.symlink", test=True, name=name, target=tgt)
self.assertSaltNoneReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_absent_file(self):
"""
file.absent
@ -178,7 +175,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
self.assertFalse(os.path.isfile(name))
@skipIf(True, "SLOWTEST skip")
def test_absent_dir(self):
"""
file.absent
@ -191,7 +187,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
self.assertFalse(os.path.isdir(name))
@skipIf(True, "SLOWTEST skip")
def test_absent_link(self):
"""
file.absent
@ -216,7 +211,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.run_function("file.remove", [name])
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_test_absent(self, name):
"""
file.absent test interface
@ -227,7 +221,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
self.assertTrue(os.path.isfile(name))
@skipIf(True, "SLOWTEST skip")
def test_managed(self):
"""
file.managed
@ -242,7 +235,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(master_data, minion_data)
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_managed_file_mode(self):
"""
file.managed, correct file permissions
@ -264,7 +256,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@skipIf(IS_WINDOWS, "Windows does not report any file modes. Skipping.")
@skipIf(True, "SLOWTEST skip")
def test_managed_file_mode_keep(self):
"""
Test using "mode: keep" in a file.managed state
@ -272,7 +263,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
_test_managed_file_mode_keep_helper(self, local=False)
@skipIf(IS_WINDOWS, "Windows does not report any file modes. Skipping.")
@skipIf(True, "SLOWTEST skip")
def test_managed_file_mode_keep_local_source(self):
"""
Test using "mode: keep" in a file.managed state, with a local file path
@ -280,7 +270,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
"""
_test_managed_file_mode_keep_helper(self, local=True)
@skipIf(True, "SLOWTEST skip")
def test_managed_file_mode_file_exists_replace(self):
"""
file.managed, existing file with replace=True, change permissions
@ -316,7 +305,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(oct(desired_mode), oct(resulting_mode))
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_managed_file_mode_file_exists_noreplace(self):
"""
file.managed, existing file with replace=False, change permissions
@ -349,7 +337,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(oct(desired_mode), oct(resulting_mode))
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_managed_file_with_grains_data(self):
"""
Test to ensure we can render grains data into a managed
@ -370,7 +357,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
match = "^minion\n"
self.assertTrue(re.match(match, file_contents[0]))
@skipIf(True, "SLOWTEST skip")
def test_managed_file_with_pillar_sls(self):
"""
Test to ensure pillar data in sls file
@ -389,7 +375,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
check_file = self.run_function("file.file_exists", [file_pillar])
self.assertTrue(check_file)
@skipIf(True, "SLOWTEST skip")
def test_managed_file_with_pillardefault_sls(self):
"""
Test to ensure when pillar data is not available
@ -409,7 +394,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(check_file)
@skip_if_not_root
@skipIf(True, "SLOWTEST skip")
def test_managed_dir_mode(self):
"""
Tests to ensure that file.managed creates directories with the
@ -443,7 +427,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
self.assertEqual(desired_owner, resulting_owner)
@skipIf(True, "SLOWTEST skip")
def test_test_managed(self):
"""
file.managed test interface
@ -455,7 +438,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
self.assertFalse(os.path.isfile(name))
@skipIf(True, "SLOWTEST skip")
def test_managed_show_changes_false(self):
"""
file.managed test interface
@ -471,7 +453,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
changes = next(six.itervalues(ret))["changes"]
self.assertEqual("<show_changes=False>", changes["diff"])
@skipIf(True, "SLOWTEST skip")
def test_managed_show_changes_true(self):
"""
file.managed test interface
@ -486,7 +467,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertIn("diff", changes)
@skipIf(IS_WINDOWS, "Don't know how to fix for Windows")
@skipIf(True, "SLOWTEST skip")
def test_managed_escaped_file_path(self):
"""
file.managed test that 'salt://|' protects unusual characters in file path
@ -526,7 +506,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
ret = self.run_function("state.sls", [state_name])
self.assertTrue(ret[state_key]["result"])
@skipIf(True, "SLOWTEST skip")
def test_managed_contents(self):
"""
test file.managed with contents that is a boolean, string, integer,
@ -598,7 +577,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
if os.path.exists(managed_files[typ]):
os.remove(managed_files[typ])
@skipIf(True, "SLOWTEST skip")
def test_managed_contents_with_contents_newline(self):
"""
test file.managed with contents by using the default content_newline
@ -615,7 +593,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
last_line = fp_.read()
self.assertEqual((contents + os.linesep), last_line)
@skipIf(True, "SLOWTEST skip")
def test_managed_contents_with_contents_newline_false(self):
"""
test file.managed with contents by using the non default content_newline
@ -632,7 +609,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
last_line = fp_.read()
self.assertEqual(contents, last_line)
@skipIf(True, "SLOWTEST skip")
def test_managed_multiline_contents_with_contents_newline(self):
"""
test file.managed with contents by using the non default content_newline
@ -649,7 +625,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
last_line = fp_.read()
self.assertEqual((contents + os.linesep), last_line)
@skipIf(True, "SLOWTEST skip")
def test_managed_multiline_contents_with_contents_newline_false(self):
"""
test file.managed with contents by using the non default content_newline
@ -669,7 +644,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(IS_WINDOWS, 'Windows does not support "mode" kwarg. Skipping.')
@skipIf(not salt.utils.path.which("visudo"), "sudo is missing")
@skipIf(True, "SLOWTEST skip")
def test_managed_check_cmd(self):
"""
Test file.managed passing a basic check_cmd kwarg. See Issue #38111.
@ -697,7 +671,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
if os.path.exists("/tmp/sudoers"):
os.remove("/tmp/sudoers")
@skipIf(True, "SLOWTEST skip")
def test_managed_local_source_with_source_hash(self):
"""
Make sure that we enforce the source_hash even with local files
@ -758,7 +731,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
finally:
remove_file()
@skipIf(True, "SLOWTEST skip")
def test_managed_local_source_does_not_exist(self):
"""
Make sure that we exit gracefully when a local source doesn't exist
@ -777,7 +749,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
# Check that we identified a hash mismatch
self.assertIn("does not exist", ret["comment"])
@skipIf(True, "SLOWTEST skip")
def test_managed_unicode_jinja_with_tojson_filter(self):
"""
Using {{ varname }} with a list or dictionary which contains unicode
@ -821,7 +792,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
)
assert managed == expected, "{0!r} != {1!r}".format(managed, expected)
@skipIf(True, "SLOWTEST skip")
def test_managed_source_hash_indifferent_case(self):
"""
Test passing a source_hash as an uppercase hash.
@ -870,7 +840,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
os.remove(name)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_managed_latin1_diff(self, name):
"""
Tests that latin-1 file contents are represented properly in the diff
@ -892,7 +861,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
assert "+räksmörgås" in diff_lines, diff_lines
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_managed_keep_source_false_salt(self, name):
"""
This test ensures that we properly clean the cached file if keep_source
@ -914,7 +882,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempfile(create=False)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_file_managed_onchanges(self, file1, file2):
"""
Test file.managed state with onchanges
@ -953,7 +920,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempfile(create=False)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_file_managed_prereq(self, file1, file2):
"""
Test file.managed state with prereq
@ -987,7 +953,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
# The state watching 'three' should not have been run
assert ret["four"]["comment"] == "No changes detected", ret["four"]["comment"]
@skipIf(True, "SLOWTEST skip")
def test_directory(self):
"""
file.directory
@ -997,7 +962,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(name))
@skipIf(True, "SLOWTEST skip")
def test_directory_symlink_dry_run(self):
"""
Ensure that symlinks are followed when file.directory is run with
@ -1036,7 +1000,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(IS_WINDOWS, "Mode not available in Windows")
@skipIf(True, "SLOWTEST skip")
def test_directory_max_depth(self):
"""
file.directory
@ -1091,7 +1054,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
finally:
shutil.rmtree(top)
@skipIf(True, "SLOWTEST skip")
def test_test_directory(self):
"""
file.directory
@ -1102,7 +1064,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertFalse(os.path.isdir(name))
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_directory_clean(self, base_dir):
"""
file.directory with clean=True
@ -1127,7 +1088,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertFalse(os.path.exists(straydir))
self.assertTrue(os.path.isdir(name))
@skipIf(True, "SLOWTEST skip")
def test_directory_is_idempotent(self):
"""
Ensure the file.directory state produces no changes when rerun.
@ -1154,7 +1114,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltStateChangesEqual(ret, {})
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_directory_clean_exclude(self, base_dir):
"""
file.directory with clean=True and exclude_pat set
@ -1194,7 +1153,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skipIf(IS_WINDOWS, "Skip on windows")
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_test_directory_clean_exclude(self, base_dir):
"""
file.directory with test=True, clean=True and exclude_pat set
@ -1241,7 +1199,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertNotIn(keepfile, comment)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_directory_clean_require_in(self, name):
"""
file.directory test with clean=True and require_in file
@ -1280,7 +1237,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertFalse(os.path.exists(wrong_file))
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_directory_clean_require_in_with_id(self, name):
"""
file.directory test with clean=True and require_in file with an ID
@ -1325,7 +1281,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
"WAR ROOM TEMPORARY SKIP, Test is flaky on macosx",
)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_directory_clean_require_with_name(self, name):
"""
file.directory test with clean=True and require with a file state
@ -1367,7 +1322,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.exists(good_file))
self.assertFalse(os.path.exists(wrong_file))
@skipIf(True, "SLOWTEST skip")
def test_directory_broken_symlink(self):
"""
Ensure that file.directory works even if a directory
@ -1405,7 +1359,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.run_function("file.remove", [tmp_dir])
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_recurse(self, name):
"""
file.recurse
@ -1416,7 +1369,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempdir(create=False)
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_recurse_specific_env(self, dir1, dir2):
"""
file.recurse passing __env__
@ -1435,7 +1387,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempdir(create=False)
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_recurse_specific_env_in_url(self, dir1, dir2):
"""
file.recurse passing __env__
@ -1453,7 +1404,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.isfile(os.path.join(dir2, "32", "scene")))
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_test_recurse(self, name):
"""
file.recurse test interface
@ -1467,7 +1417,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempdir(create=False)
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_test_recurse_specific_env(self, dir1, dir2):
"""
file.recurse test interface
@ -1487,7 +1436,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertFalse(os.path.exists(dir2))
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_recurse_template(self, name):
"""
file.recurse with jinja template enabled
@ -1506,7 +1454,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertIn(_ts, contents)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_recurse_clean(self, name):
"""
file.recurse with clean=True
@ -1528,7 +1475,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.isfile(os.path.join(name, "scene33")))
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_recurse_clean_specific_env(self, name):
"""
file.recurse with clean=True and __env__=prod
@ -1551,7 +1497,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skipIf(IS_WINDOWS, "Skip on windows")
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_recurse_issue_34945(self, base_dir):
"""
This tests the case where the source dir for the file.recurse state
@ -1581,7 +1526,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(dir_mode, actual_dir_mode)
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_recurse_issue_40578(self, name):
"""
This ensures that the state doesn't raise an exception when it
@ -1602,7 +1546,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_replace(self, name):
"""
file.replace
@ -1620,7 +1563,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18612(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18612:
@ -1665,7 +1607,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(item)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18612_prepend(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18612:
@ -1714,7 +1655,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(item)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18612_append(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18612:
@ -1763,7 +1703,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(item)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18612_append_not_found_content(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18612:
@ -1813,7 +1752,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(item)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18612_change_mid_line_with_comment(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18612:
@ -1863,7 +1801,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(item)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18841_no_changes(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18841:
@ -1922,7 +1859,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
# ensure, all 'file.replace' runs reported success
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_serialize(self):
"""
Test to ensure that file.serialize returns a data structure that's
@ -1962,7 +1898,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(serialized_file, expected_file)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_serializer_deserializer_opts(self, name):
"""
Test the serializer_opts and deserializer_opts options
@ -2013,7 +1948,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
assert serialized_data["foo"]["bar"] == merged["foo"]["bar"]
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_replace_issue_18841_omit_backup(self, base_dir):
"""
Test the (mis-)behaviour of file.replace as described in #18841:
@ -2072,7 +2006,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_comment(self, name):
"""
file.comment
@ -2107,7 +2040,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_test_comment(self, name):
"""
file.comment test interface
@ -2120,7 +2052,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_uncomment(self, name):
"""
file.uncomment
@ -2133,7 +2064,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_test_uncomment(self, name):
"""
file.comment test interface
@ -2146,8 +2076,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
@skipIf(True, "SLOWTEST skip")
def test_append(self, name):
"""
file.append
@ -2160,7 +2088,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_test_append(self, name):
"""
file.append test interface
@ -2173,7 +2100,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_append_issue_1864_makedirs(self, base_dir):
"""
file.append but create directories if needed as an option, and create
@ -2198,7 +2124,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.isfile(name))
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_prepend_issue_27401_makedirs(self, base_dir):
"""
file.prepend but create directories if needed as an option, and create
@ -2223,7 +2148,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.isfile(name))
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_touch(self, name):
"""
file.touch
@ -2233,7 +2157,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_test_touch(self, name):
"""
file.touch test interface
@ -2243,7 +2166,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltNoneReturn(ret)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_touch_directory(self, base_dir):
"""
file.touch a directory
@ -2256,7 +2178,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertTrue(os.path.isdir(name))
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_2227_file_append(self, base_dir):
"""
Text to append includes a percent symbol
@ -2298,7 +2219,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
raise
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_2401_file_comment(self, base_dir):
# Get a path to the temporary file
tmp_file = os.path.join(base_dir, "issue-2041-comment.txt")
@ -2328,7 +2248,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
raise
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_2379_file_append(self, base_dir):
# Get a path to the temporary file
tmp_file = os.path.join(base_dir, "issue-2379-file-append.txt")
@ -2358,7 +2277,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skipIf(IS_WINDOWS, "Mode not available in Windows")
@with_tempdir(create=False)
@with_tempdir(create=False)
@skipIf(True, "SLOWTEST skip")
def test_issue_2726_mode_kwarg(self, dir1, dir2):
# Let's test for the wrong usage approach
bad_mode_kwarg_testfile = os.path.join(dir1, "bad_mode_kwarg", "testfile")
@ -2393,7 +2311,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_8343_accumulated_require_in(self, base_dir):
template_path = os.path.join(RUNTIME_VARS.TMP_STATE_TREE, "issue-8343.sls")
testcase_filedest = os.path.join(base_dir, "issue-8343.txt")
@ -2472,7 +2389,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skipIf(
salt.utils.platform.is_darwin() and six.PY2, "This test hangs on OS X on Py2"
)
@skipIf(True, "SLOWTEST skip")
def test_issue_11003_immutable_lazy_proxy_sum(self, base_dir):
# causes the Import-Module ServerManager error on Windows
template_path = os.path.join(RUNTIME_VARS.TMP_STATE_TREE, "issue-11003.sls")
@ -2530,7 +2446,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(block_contents, [])
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_8947_utf8_sls(self, base_dir):
"""
Test some file operation with utf-8 characters on the sls
@ -2657,7 +2572,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
TEST_SYSTEM_USER, TEST_SYSTEM_GROUP, on_existing="delete", delete=True
)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_12209_follow_symlinks(self, tempdir, user, group):
"""
Ensure that symlinks are properly chowned when recursing (following
@ -2699,7 +2613,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
TEST_SYSTEM_USER, TEST_SYSTEM_GROUP, on_existing="delete", delete=True
)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_12209_no_follow_symlinks(self, tempdir, user, group):
"""
Ensure that symlinks are properly chowned when recursing (not following
@ -2734,7 +2647,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempfile(create=False)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_template_local_file(self, source, dest):
"""
Test a file.managed state with a local file as the source. Test both
@ -2754,7 +2666,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(ret)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_template_local_file_noclobber(self, source):
"""
Test the case where a source file is in the minion's local filesystem,
@ -2778,7 +2689,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_tempfile(create=False)
@with_tempfile(create=False)
@skipIf(True, "SLOWTEST skip")
def test_issue_25250_force_copy_deletes(self, source, dest):
"""
ensure force option in copy state does not delete target file
@ -2797,7 +2707,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@skip_if_not_root
@skipIf(IS_WINDOWS, "Windows does not report any file modes. Skipping.")
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_file_copy_make_dirs(self, source):
"""
ensure make_dirs creates correct user perms
@ -2824,7 +2733,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(user_check, user)
self.assertEqual(salt.utils.files.normalize_mode(mode_check), mode)
@skipIf(True, "SLOWTEST skip")
def test_contents_pillar_with_pillar_list(self):
"""
This tests for any regressions for this issue:
@ -2841,7 +2749,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
@with_system_user_and_group(
TEST_SYSTEM_USER, TEST_SYSTEM_GROUP, on_existing="delete", delete=True
)
@skipIf(True, "SLOWTEST skip")
def test_owner_after_setuid(self, user, group):
"""
@ -2880,7 +2787,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(desired["group"], result["group"])
self.assertEqual(desired["mode"], result["mode"].lstrip("0Oo"))
@skipIf(True, "SLOWTEST skip")
def test_binary_contents(self):
"""
This tests to ensure that binary contents do not cause a traceback.
@ -2895,7 +2801,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
except OSError:
pass
@skipIf(True, "SLOWTEST skip")
def test_binary_contents_twice(self):
"""
This test ensures that after a binary file is created, salt can confirm
@ -2923,7 +2828,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
TEST_SYSTEM_USER, TEST_SYSTEM_GROUP, on_existing="delete", delete=True
)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_48336_file_managed_mode_setuid(self, tempdir, user, group):
"""
Ensure that mode is correct with changing of ownership and group
@ -2950,7 +2854,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(grp.getgrgid(temp_file_stats.st_gid).gr_name, group)
@with_tempdir()
@skipIf(True, "SLOWTEST skip")
def test_issue_48557(self, tempdir):
tempfile = os.path.join(tempdir, "temp_file_issue_48557")
with salt.utils.files.fopen(tempfile, "wb") as fp:
@ -2967,7 +2870,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_issue_50221(self, name):
expected = "abc{0}{0}{0}".format(os.linesep)
ret = self.run_function("pillar.get", ["issue-50221"])
@ -2978,7 +2880,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
contents = fp.read()
assert contents == expected
@skipIf(True, "SLOWTEST skip")
def test_managed_file_issue_51208(self):
"""
Test to ensure we can handle a file with escaped double-quotes
@ -3177,7 +3078,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
return salt.utils.stringutils.to_unicode(fp_.read())
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_prepend(self, name):
"""
Test blockreplace when prepend_if_not_found=True and block doesn't
@ -3245,7 +3145,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_prepend_append_newline(self, name):
"""
Test blockreplace when prepend_if_not_found=True and block doesn't
@ -3325,7 +3224,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_prepend_no_append_newline(self, name):
"""
Test blockreplace when prepend_if_not_found=True and block doesn't
@ -3404,8 +3302,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
@skipIf(True, "SLOWTEST skip")
def test_append(self, name):
"""
Test blockreplace when append_if_not_found=True and block doesn't
@ -3473,7 +3369,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_append_append_newline(self, name):
"""
Test blockreplace when append_if_not_found=True and block doesn't
@ -3553,7 +3448,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_append_no_append_newline(self, name):
"""
Test blockreplace when append_if_not_found=True and block doesn't
@ -3632,7 +3526,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), expected)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_prepend_auto_line_separator(self, name):
"""
This tests the line separator auto-detection when prepending the block
@ -3698,7 +3591,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_append_auto_line_separator(self, name):
"""
This tests the line separator auto-detection when appending the block
@ -3764,7 +3656,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block(self, name):
"""
Test blockreplace when block exists but its contents are not a
@ -3819,7 +3710,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block_append_newline(self, name):
"""
Test blockreplace when block exists but its contents are not a
@ -3878,7 +3768,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block_no_append_newline(self, name):
"""
Test blockreplace when block exists but its contents are not a
@ -3941,7 +3830,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block_and_marker_not_after_newline(self, name):
"""
Test blockreplace when block exists but its contents are not a
@ -3996,7 +3884,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block_and_marker_not_after_newline_append_newline(self, name):
"""
Test blockreplace when block exists but its contents are not a match,
@ -4056,7 +3943,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_non_matching_block_and_marker_not_after_newline_no_append_newline(
self, name
):
@ -4122,7 +4008,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block(self, name):
"""
Test blockreplace when block exists and its contents are a match. No
@ -4177,7 +4062,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block_append_newline(self, name):
"""
Test blockreplace when block exists and its contents are a match. Test
@ -4238,7 +4122,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block_no_append_newline(self, name):
"""
Test blockreplace when block exists and its contents are a match. Test
@ -4305,7 +4188,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block_and_marker_not_after_newline(self, name):
"""
Test blockreplace when block exists and its contents are a match, but
@ -4360,7 +4242,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block_and_marker_not_after_newline_append_newline(self, name):
"""
Test blockreplace when block exists and its contents are a match, but
@ -4422,7 +4303,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(self._read(name), self.with_matching_block)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_matching_block_and_marker_not_after_newline_no_append_newline(self, name):
"""
Test blockreplace when block exists and its contents are a match, but
@ -4486,7 +4366,6 @@ class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
)
@with_tempfile()
@skipIf(True, "SLOWTEST skip")
def test_issue_49043(self, name):
ret = self.run_function("state.sls", mods="issue-49043", pillar={"name": name},)
log.error("ret = %s", repr(ret))
@ -4540,7 +4419,6 @@ class RemoteFileTest(ModuleCase, SaltReturnAssertsMixin):
log.debug("ret = %s", ret)
return ret
@skipIf(True, "SLOWTEST skip")
def test_file_managed_http_source_no_hash(self):
"""
Test a remote file with no hash
@ -4551,7 +4429,6 @@ class RemoteFileTest(ModuleCase, SaltReturnAssertsMixin):
# This should fail because no hash was provided
self.assertSaltFalseReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_file_managed_http_source(self):
"""
Test a remote file with no hash
@ -4565,7 +4442,6 @@ class RemoteFileTest(ModuleCase, SaltReturnAssertsMixin):
)
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_file_managed_http_source_skip_verify(self):
"""
Test a remote file using skip_verify
@ -4575,7 +4451,6 @@ class RemoteFileTest(ModuleCase, SaltReturnAssertsMixin):
)
self.assertSaltTrueReturn(ret)
@skipIf(True, "SLOWTEST skip")
def test_file_managed_keep_source_false_http(self):
"""
This test ensures that we properly clean the cached file if keep_source
@ -4708,7 +4583,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.addCleanup(shutil.rmtree, self.base_dir, ignore_errors=True)
@skipIf(True, "SLOWTEST skip")
def test_patch_single_file(self):
"""
Test file.patch using a patch applied to a single file
@ -4730,7 +4604,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_directory(self):
"""
Test file.patch using a patch applied to a directory, with changes
@ -4754,7 +4627,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_strip_parsing(self):
"""
Test that we successfuly parse -p/--strip when included in the options
@ -4809,7 +4681,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
"Source file {0} not found in saltenv 'prod'".format(self.math_patch),
)
@skipIf(True, "SLOWTEST skip")
def test_patch_single_file_failure(self):
"""
Test file.patch using a patch applied to a single file. This tests a
@ -4843,7 +4714,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
ret["comment"], "saving rejects to (file )?{0}".format(reject_file)
)
@skipIf(True, "SLOWTEST skip")
def test_patch_directory_failure(self):
"""
Test file.patch using a patch applied to a directory, with changes
@ -4877,7 +4747,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
ret["comment"], "saving rejects to (file )?{0}".format(reject_file)
)
@skipIf(True, "SLOWTEST skip")
def test_patch_single_file_remote_source(self):
"""
Test file.patch using a patch applied to a single file, with the patch
@ -4916,7 +4785,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_directory_remote_source(self):
"""
Test file.patch using a patch applied to a directory, with changes
@ -4959,7 +4827,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_single_file_template(self):
"""
Test file.patch using a patch applied to a single file, with jinja
@ -4990,7 +4857,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_directory_template(self):
"""
Test file.patch using a patch applied to a directory, with changes
@ -5025,7 +4891,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_single_file_remote_source_template(self):
"""
Test file.patch using a patch applied to a single file, with the patch
@ -5072,7 +4937,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_directory_remote_source_template(self):
"""
Test file.patch using a patch applied to a directory, with changes
@ -5124,7 +4988,6 @@ class PatchTest(ModuleCase, SaltReturnAssertsMixin):
self.assertEqual(ret["comment"], "Patch was already applied")
self.assertEqual(ret["changes"], {})
@skipIf(True, "SLOWTEST skip")
def test_patch_test_mode(self):
"""
Test file.patch using test=True
@ -5198,14 +5061,12 @@ class WinFileTest(ModuleCase):
def tearDown(self):
self.run_state("file.absent", name=WIN_TEST_FILE)
@skipIf(True, "SLOWTEST skip")
def test_file_managed(self):
"""
Test file.managed on Windows
"""
self.assertTrue(self.run_state("file.exists", name=WIN_TEST_FILE))
@skipIf(True, "SLOWTEST skip")
def test_file_copy(self):
"""
Test file.copy on Windows
@ -5215,7 +5076,6 @@ class WinFileTest(ModuleCase):
)
self.assertTrue(ret)
@skipIf(True, "SLOWTEST skip")
def test_file_comment(self):
"""
Test file.comment on Windows
@ -5224,7 +5084,6 @@ class WinFileTest(ModuleCase):
with salt.utils.files.fopen(WIN_TEST_FILE, "r") as fp_:
self.assertTrue(fp_.read().startswith("#Only"))
@skipIf(True, "SLOWTEST skip")
def test_file_replace(self):
"""
Test file.replace on Windows
@ -5235,7 +5094,6 @@ class WinFileTest(ModuleCase):
with salt.utils.files.fopen(WIN_TEST_FILE, "r") as fp_:
self.assertIn("testing", fp_.read())
@skipIf(True, "SLOWTEST skip")
def test_file_absent(self):
"""
Test file.absent on Windows

View file

@ -129,7 +129,6 @@ TEST_SUITES_UNORDERED = {
"proxy": {"display_name": "Proxy", "path": "integration/proxy"},
"external_api": {"display_name": "ExternalAPIs", "path": "integration/externalapi"},
"daemons": {"display_name": "Daemon", "path": "integration/daemons"},
"scheduler": {"display_name": "Scheduler", "path": "integration/scheduler"},
"sdb": {"display_name": "Sdb", "path": "integration/sdb"},
"logging": {"display_name": "Logging", "path": "integration/logging"},
}
@ -142,6 +141,7 @@ TEST_SUITES = collections.OrderedDict(
class SaltTestsuiteParser(SaltCoverageTestingParser):
support_docker_execution = True
support_destructive_tests_selection = True
support_expensive_tests_selection = True
source_code_basedir = SALT_ROOT
def _get_suites(

View file

@ -667,6 +667,15 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
}
self._run_os_grains_tests("debian-9", _os_release_map, expectation)
def test_unicode_error(self):
raise_unicode_mock = MagicMock(
name="raise_unicode_error", side_effect=UnicodeError
)
with patch("salt.grains.core.hostname"):
with patch("socket.getaddrinfo", raise_unicode_mock):
ret = salt.grains.core.ip_fqdn()
assert ret["fqdn_ip4"] == ret["fqdn_ip6"] == []
@skipIf(not salt.utils.platform.is_linux(), "System is not Linux")
def test_ubuntu_xenial_os_grains(self):
"""

View file

@ -158,3 +158,100 @@ class ArtifactoryTestCase(TestCase, LoaderModuleMockMixin):
"http://artifactory.example.com/artifactory/libs-snapshots/com/company/sampleapp/web-module/web/1.0_RC10-SNAPSHOT/web-1.0_RC10-20131127.105838-2.war",
)
self.assertEqual(file_name, "web-1.0_RC10-20131127.105838-2.war")
def test_get_snapshot_url_with_classifier(self):
with patch(
"salt.modules.artifactory._get_snapshot_version_metadata_xml",
MagicMock(
return_value="""<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>com.company.sampleapp.web-module</groupId>
<artifactId>web</artifactId>
<version>1.1_RC8-SNAPSHOT</version>
<versioning>
<snapshot>
<timestamp>20140418.150212</timestamp>
<buildNumber>1</buildNumber>
</snapshot>
<lastUpdated>20140623104055</lastUpdated>
<snapshotVersions>
<snapshotVersion>
<extension>pom</extension>
<value>1.1_RC8-20140418.150212-1</value>
<updated>20140418150212</updated>
</snapshotVersion>
<snapshotVersion>
<classifier>test</classifier>
<extension>war</extension>
<value>1.1_RC8-20140418.150212-1</value>
<updated>20140418150212</updated>
</snapshotVersion>
</snapshotVersions>
</versioning>
</metadata>
"""
),
):
artifact_url, file_name = artifactory._get_snapshot_url(
artifactory_url="http://artifactory.example.com/artifactory",
repository="libs-snapshots",
group_id="com.company.sampleapp.web-module",
artifact_id="web",
version="1.1_RC8-SNAPSHOT",
packaging="war",
classifier="test",
headers={},
)
self.assertEqual(
artifact_url,
"http://artifactory.example.com/artifactory/libs-snapshots/com/company/sampleapp/web-module/web/1.1_RC8-SNAPSHOT/web-1.1_RC8-20140418.150212-1-test.war",
)
def test_get_snapshot_url_without_classifier(self):
"""
test when classifier not set and packaging
does not match snapshot_versions in the metadata.
"""
with patch(
"salt.modules.artifactory._get_snapshot_version_metadata_xml",
MagicMock(
return_value="""<?xml version="1.0" encoding="UTF-8"?>
<metadata>
<groupId>com.company.sampleapp.web-module</groupId>
<artifactId>web</artifactId>
<version>1.1_RC8-SNAPSHOT</version>
<versioning>
<snapshot>
<timestamp>20140418.150212</timestamp>
<buildNumber>1</buildNumber>
</snapshot>
<lastUpdated>20140623104055</lastUpdated>
<snapshotVersions>
<snapshotVersion>
<extension>pom</extension>
<value>1.1_RC8-20140418.150212-1</value>
<updated>20140418150212</updated>
</snapshotVersion>
<snapshotVersion>
<classifier>test</classifier>
<extension>war</extension>
<value>1.1_RC8-20140418.150212-1</value>
<updated>20140418150212</updated>
</snapshotVersion>
</snapshotVersions>
</versioning>
</metadata>
"""
),
):
with self.assertRaises(artifactory.ArtifactoryError):
artifact_url, file_name = artifactory._get_snapshot_url(
artifactory_url="http://artifactory.example.com/artifactory",
repository="libs-snapshots",
group_id="com.company.sampleapp.web-module",
artifact_id="web",
version="1.1_RC8-SNAPSHOT",
packaging="war",
headers={},
)

View file

@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import logging
import os.path
import sys
from collections import namedtuple
import pkg_resources
@ -14,10 +15,12 @@ import salt.config
import salt.loader
import salt.modules.boto_route53 as boto_route53
import salt.utils.versions
from boto.route53.exception import DNSServerError
from salt.ext import six
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import MagicMock, patch
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import TestCase, skipIf
@ -154,3 +157,252 @@ class BotoRoute53TestCase(TestCase, LoaderModuleMockMixin):
]
del healthcheck["result"]["CreateHealthCheckResponse"]["HealthCheck"]["Id"]
self.assertEqual(healthcheck, expected)
class DummyConn(object):
"""
Simple object housing a mock to simulate Error conditions. Each keyword
argument passed into this will be set as MagicMock with the keyword value
being set as the side_effect for that function.
"""
def __init__(self, **kwargs):
for key, val in six.iteritems(kwargs):
setattr(self, key, MagicMock(side_effect=val))
class BotoRoute53RetryTestCase(TestCase, LoaderModuleMockMixin):
"""
TestCase for salt.modules.boto_route53 module
"""
_retryable_error = DNSServerError(
555, "Rejected", body={"Error": {"Code": "Foo", "Message": "Bar"}}
)
_fatal_error = DNSServerError(
666,
"Flagrant System Error",
body={
"Error": {
"Code": "SignatureDoesNotMatch",
"Message": "Computer Over. Virus = Very Yes.",
}
},
)
def setup_loader_modules(self):
return {
boto_route53: {
"__utils__": {
"boto.get_error": MagicMock(return_value="There was an error"),
},
},
}
def setUp(self):
# This would normally be set by __utils__["boto.assign_funcs"], but
# we're not running that as part of this test class, so we need to make
# sure this attribute is present so that it can be mocked.
boto_route53._get_conn = None # pylint: disable=unmocked-patch
def tearDown(self):
delattr(boto_route53, "_get_conn")
def test_zone_exists(self):
"""
Tests retry behavior for zone_exists
"""
# Retryable error (max retries reached)
conn = DummyConn(get_zone=[self._retryable_error, self._retryable_error])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.zone_exists("foo", error_retries=2,)
assert conn.get_zone.call_count == 2
assert result is False
# Retryable error (passes on 2nd attempt)
conn = DummyConn(get_zone=[self._retryable_error, True])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.zone_exists("foo")
assert conn.get_zone.call_count == 2
assert result is True
# Non-retryable error (should re-raise DNSServerError)
conn = DummyConn(get_zone=[self._fatal_error])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
try:
result = boto_route53.zone_exists("foo")
except DNSServerError:
# This is the expected result
pass
else:
raise Exception("DNSServerError not raised")
@patch.object(boto, "route53", MagicMock())
def test_create_healthcheck(self):
"""
Tests retry behavior for create_healthcheck
"""
# Retryable error (max retries reached)
conn = DummyConn(
create_health_check=[self._retryable_error, self._retryable_error]
)
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.create_healthcheck("foo", error_retries=2,)
assert conn.create_health_check.call_count == 2
assert result is False
# Retryable error (passes on 2nd attempt)
conn = DummyConn(create_health_check=[self._retryable_error, True])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.create_healthcheck("foo")
assert conn.create_health_check.call_count == 2
assert result == {"result": True}, result
# Non-retryable error (should re-raise DNSServerError)
conn = DummyConn(create_health_check=[self._fatal_error])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.create_healthcheck("foo")
assert conn.create_health_check.call_count == 1
assert result == {"error": "There was an error"}, result
def test_get_record(self):
"""
Tests retry behavior for get_record
"""
# Retryable error (max retries reached)
conn = DummyConn(get_zone=[self._retryable_error, self._retryable_error])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.get_record("foo", "bar", "baz", error_retries=2,)
assert conn.get_zone.call_count == 2
assert not result
# Retryable error (passes on 2nd attempt)
conn = DummyConn(
get_zone=[
self._retryable_error,
namedtuple("Zone", "find_records")(lambda *args, **kwargs: False),
]
)
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.get_record("foo", "bar", "baz")
assert conn.get_zone.call_count == 2
assert not result
# Non-retryable error (should re-raise DNSServerError)
conn = DummyConn(get_zone=[self._fatal_error])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
try:
result = boto_route53.get_record("foo", "bar", "baz")
except DNSServerError:
# This is the expected result
pass
else:
raise Exception("DNSServerError not raised")
@patch.object(boto_route53, "_wait_for_sync", MagicMock(return_value=True))
def test_add_record(self):
"""
Tests retry behavior for add_record
"""
# Retryable error (max retries reached)
zone = DummyConn(add_record=[self._retryable_error, self._retryable_error])
zone.id = "foo"
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.add_record("a", "b", "c", "d", error_retries=2,)
assert zone.add_record.call_count == 2
assert not result
# Retryable error (passes on 2nd attempt)
zone = DummyConn(
add_record=[self._retryable_error, namedtuple("Status", "id")("foo")]
)
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.add_record("a", "b", "c", "d")
assert zone.add_record.call_count == 2
assert result
# Non-retryable error (should re-raise DNSServerError)
zone = DummyConn(add_record=[self._fatal_error])
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
try:
result = boto_route53.add_record("a", "b", "c", "d")
except DNSServerError:
# This is the expected result
pass
else:
raise Exception("DNSServerError not raised")
@patch.object(boto_route53, "_wait_for_sync", MagicMock(return_value=True))
def test_update_record(self):
"""
Tests retry behavior for update_record
"""
# Retryable error (max retries reached)
zone = DummyConn(find_records=[self._retryable_error, self._retryable_error])
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.update_record("a", "b", "c", "d", error_retries=2,)
assert zone.find_records.call_count == 2
assert not result
# Retryable error (passes on 2nd attempt)
zone = DummyConn(
find_records=[True, True],
update_record=[self._retryable_error, namedtuple("Status", "id")("foo")],
)
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.update_record("a", "b", "c", "d")
assert zone.update_record.call_count == 2
assert result
# Non-retryable error (should re-raise DNSServerError)
zone = DummyConn(find_records=[self._fatal_error])
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
try:
result = boto_route53.update_record("a", "b", "c", "d")
except DNSServerError:
# This is the expected result
pass
else:
raise Exception("DNSServerError not raised")
@patch.object(boto_route53, "_wait_for_sync", MagicMock(return_value=True))
def test_delete_record(self):
"""
Tests retry behavior for delete_record
"""
# Retryable error (max retries reached)
zone = DummyConn(find_records=[self._retryable_error, self._retryable_error])
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.delete_record("a", "b", "c", "d", error_retries=2,)
assert zone.find_records.call_count == 2
assert not result
# Retryable error (passes on 2nd attempt)
zone = DummyConn(
find_records=[True, True],
delete_record=[self._retryable_error, namedtuple("Status", "id")("foo")],
)
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
result = boto_route53.delete_record("a", "b", "c", "d")
assert zone.delete_record.call_count == 2
assert result
# Non-retryable error (should re-raise DNSServerError)
zone = DummyConn(find_records=[self._fatal_error])
conn = DummyConn(get_zone=[zone])
with patch.object(boto_route53, "_get_conn", MagicMock(return_value=conn)):
try:
result = boto_route53.delete_record("a", "b", "c", "d")
except DNSServerError:
# This is the expected result
pass
else:
raise Exception("DNSServerError not raised")

View file

@ -149,3 +149,39 @@ class ChocolateyTestCase(TestCase, LoaderModuleMockMixin):
self.assertEqual(result, expected)
# Does it populate __context__
self.assertEqual(chocolatey.__context__["chocolatey._path"], expected)
def test_version_check_remote_false(self):
"""
Test version when remote is False
"""
list_return_value = {"ack": ["3.1.1"]}
with patch.object(chocolatey, "list_", return_value=list_return_value):
expected = {"ack": ["3.1.1"]}
result = chocolatey.version("ack", check_remote=False)
self.assertEqual(result, expected)
def test_version_check_remote_true(self):
"""
Test version when remote is True
"""
list_side_effect = [
{"ack": ["3.1.1"]},
{"ack": ["3.1.1"], "Wolfpack": ["3.0.17"], "blackbird": ["1.0.79.3"]},
]
with patch.object(chocolatey, "list_", side_effect=list_side_effect):
expected = {"ack": {"available": ["3.1.1"], "installed": ["3.1.1"]}}
result = chocolatey.version("ack", check_remote=True)
self.assertEqual(result, expected)
def test_version_check_remote_true_not_available(self):
"""
Test version when remote is True but remote version is unavailable
"""
list_side_effect = [
{"ack": ["3.1.1"]},
{"Wolfpack": ["3.0.17"], "blackbird": ["1.0.79.3"]},
]
with patch.object(chocolatey, "list_", side_effect=list_side_effect):
expected = {"ack": {"installed": ["3.1.1"]}}
result = chocolatey.version("ack", check_remote=True)
self.assertEqual(result, expected)

File diff suppressed because it is too large Load diff

View file

@ -48,19 +48,31 @@ class EnvironTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(os.environ, {}), patch.dict(
environ.__utils__,
{"reg.set_value": MagicMock(), "reg.delete_value": MagicMock()},
), patch("salt.utils.platform.is_windows", MagicMock(return_value=True)):
), patch("salt.utils.platform.is_windows", return_value=True):
environ.setval("key", "Test", permanent=True)
environ.__utils__["reg.set_value"].assert_called_with(
"HKCU", "Environment", "key", "Test"
)
def test_set_val_permanent_false_unsets(self):
with patch.dict(os.environ, {}), patch.dict(
environ.__utils__,
{"reg.set_value": MagicMock(), "reg.delete_value": MagicMock()},
), patch("salt.utils.platform.is_windows", return_value=True):
environ.setval("key", False, false_unsets=True, permanent=True)
environ.__utils__["reg.set_value"].asset_not_called()
environ.__utils__["reg.set_value"].assert_not_called()
environ.__utils__["reg.delete_value"].assert_called_with(
"HKCU", "Environment", "key"
)
def test_set_val_permanent_hklm(self):
with patch.dict(os.environ, {}), patch.dict(
environ.__utils__,
{"reg.set_value": MagicMock(), "reg.delete_value": MagicMock()},
), patch("salt.utils.platform.is_windows", return_value=True):
key = r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment"
environ.setval("key", "Test", permanent="HKLM")
environ.__utils__["reg.set_value"].assert_called_with(

File diff suppressed because it is too large Load diff

View file

@ -1167,8 +1167,9 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
MockTarFile.path = ""
with patch("salt.utils.files.fopen", mock_open()), patch.object(
salt.utils.json, "loads", mock_json_loads_true
):
), patch.object(state, "_format_cached_grains", MagicMock()):
self.assertEqual(state.pkg(tar_file, 0, "md5"), True)
state._format_cached_grains.assert_called_once()
MockTarFile.path = ""
if six.PY2:

View file

@ -1318,7 +1318,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin):
<alias name='net1'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x1'/>
</interface>
<graphics type='spice' port='5900' autoport='yes' listen='127.0.0.1'>
<graphics type='spice' listen='127.0.0.1' autoport='yes'>
<listen type='address' address='127.0.0.1'/>
</graphics>
<video>

View file

@ -1301,64 +1301,243 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
with pytest.raises(CommandExecutionError):
yumpkg._get_yum_config()
def test_group_install(self):
def test_group_info(self):
"""
Test group_install uses the correct keys from group_info and installs
default and mandatory packages.
Test yumpkg.group_info parsing
"""
groupinfo_output = """
Group: Printing Client
Group-Id: print-client
Description: Tools for printing to a local printer or a remote print server.
expected = {
"conditional": [],
"default": ["qgnomeplatform", "xdg-desktop-portal-gtk"],
"description": "GNOME is a highly intuitive and user friendly desktop environment.",
"group": "GNOME",
"id": "gnome-desktop",
"mandatory": [
"NetworkManager-libreswan-gnome",
"PackageKit-command-not-found",
"PackageKit-gtk3-module",
"abrt-desktop",
"at-spi2-atk",
"at-spi2-core",
"avahi",
"baobab",
"caribou",
"caribou-gtk2-module",
"caribou-gtk3-module",
"cheese",
"chrome-gnome-shell",
"compat-cheese314",
"control-center",
"dconf",
"empathy",
"eog",
"evince",
"evince-nautilus",
"file-roller",
"file-roller-nautilus",
"firewall-config",
"firstboot",
"fprintd-pam",
"gdm",
"gedit",
"glib-networking",
"gnome-bluetooth",
"gnome-boxes",
"gnome-calculator",
"gnome-classic-session",
"gnome-clocks",
"gnome-color-manager",
"gnome-contacts",
"gnome-dictionary",
"gnome-disk-utility",
"gnome-font-viewer",
"gnome-getting-started-docs",
"gnome-icon-theme",
"gnome-icon-theme-extras",
"gnome-icon-theme-symbolic",
"gnome-initial-setup",
"gnome-packagekit",
"gnome-packagekit-updater",
"gnome-screenshot",
"gnome-session",
"gnome-session-xsession",
"gnome-settings-daemon",
"gnome-shell",
"gnome-software",
"gnome-system-log",
"gnome-system-monitor",
"gnome-terminal",
"gnome-terminal-nautilus",
"gnome-themes-standard",
"gnome-tweak-tool",
"gnome-user-docs",
"gnome-weather",
"gucharmap",
"gvfs-afc",
"gvfs-afp",
"gvfs-archive",
"gvfs-fuse",
"gvfs-goa",
"gvfs-gphoto2",
"gvfs-mtp",
"gvfs-smb",
"initial-setup-gui",
"libcanberra-gtk2",
"libcanberra-gtk3",
"libproxy-mozjs",
"librsvg2",
"libsane-hpaio",
"metacity",
"mousetweaks",
"nautilus",
"nautilus-sendto",
"nm-connection-editor",
"orca",
"redhat-access-gui",
"sane-backends-drivers-scanners",
"seahorse",
"setroubleshoot",
"sushi",
"totem",
"totem-nautilus",
"vinagre",
"vino",
"xdg-user-dirs-gtk",
"yelp",
],
"optional": [
"",
"alacarte",
"dconf-editor",
"dvgrab",
"fonts-tweak-tool",
"gconf-editor",
"gedit-plugins",
"gnote",
"libappindicator-gtk3",
"seahorse-nautilus",
"seahorse-sharing",
"vim-X11",
"xguest",
],
"type": "package group",
}
cmd_out = """Group: GNOME
Group-Id: gnome-desktop
Description: GNOME is a highly intuitive and user friendly desktop environment.
Mandatory Packages:
+cups
+cups-pk-helper
+enscript
+ghostscript-cups
=NetworkManager-libreswan-gnome
=PackageKit-command-not-found
=PackageKit-gtk3-module
abrt-desktop
=at-spi2-atk
=at-spi2-core
=avahi
=baobab
-caribou
-caribou-gtk2-module
-caribou-gtk3-module
=cheese
=chrome-gnome-shell
=compat-cheese314
=control-center
=dconf
=empathy
=eog
=evince
=evince-nautilus
=file-roller
=file-roller-nautilus
=firewall-config
=firstboot
fprintd-pam
=gdm
=gedit
=glib-networking
=gnome-bluetooth
=gnome-boxes
=gnome-calculator
=gnome-classic-session
=gnome-clocks
=gnome-color-manager
=gnome-contacts
=gnome-dictionary
=gnome-disk-utility
=gnome-font-viewer
=gnome-getting-started-docs
=gnome-icon-theme
=gnome-icon-theme-extras
=gnome-icon-theme-symbolic
=gnome-initial-setup
=gnome-packagekit
=gnome-packagekit-updater
=gnome-screenshot
=gnome-session
=gnome-session-xsession
=gnome-settings-daemon
=gnome-shell
=gnome-software
=gnome-system-log
=gnome-system-monitor
=gnome-terminal
=gnome-terminal-nautilus
=gnome-themes-standard
=gnome-tweak-tool
=gnome-user-docs
=gnome-weather
=gucharmap
=gvfs-afc
=gvfs-afp
=gvfs-archive
=gvfs-fuse
=gvfs-goa
=gvfs-gphoto2
=gvfs-mtp
=gvfs-smb
initial-setup-gui
=libcanberra-gtk2
=libcanberra-gtk3
=libproxy-mozjs
=librsvg2
=libsane-hpaio
=metacity
=mousetweaks
=nautilus
=nautilus-sendto
=nm-connection-editor
=orca
-redhat-access-gui
=sane-backends-drivers-scanners
=seahorse
=setroubleshoot
=sushi
=totem
=totem-nautilus
=vinagre
=vino
=xdg-user-dirs-gtk
=yelp
Default Packages:
+colord
+gutenprint
+gutenprint-cups
+hpijs
+paps
+pnm2ppa
+python-smbc
+system-config-printer
+system-config-printer-udev
=qgnomeplatform
=xdg-desktop-portal-gtk
Optional Packages:
hplip
hplip-gui
samba-krb5-printing
alacarte
dconf-editor
dvgrab
fonts-tweak-tool
gconf-editor
gedit-plugins
gnote
libappindicator-gtk3
seahorse-nautilus
seahorse-sharing
vim-X11
xguest
"""
install = MagicMock()
with patch.dict(
yumpkg.__salt__,
{"cmd.run_stdout": MagicMock(return_value=groupinfo_output)},
yumpkg.__salt__, {"cmd.run_stdout": MagicMock(return_value=cmd_out)}
):
with patch.dict(yumpkg.__salt__, {"cmd.run": MagicMock(return_value="")}):
with patch.dict(
yumpkg.__salt__,
{"pkg_resource.format_pkg_list": MagicMock(return_value={})},
):
with patch.object(yumpkg, "install", install):
yumpkg.group_install("Printing Client")
install.assert_called_once_with(
pkgs=[
"cups",
"cups-pk-helper",
"enscript",
"ghostscript-cups",
"colord",
"gutenprint",
"gutenprint-cups",
"hpijs",
"paps",
"pnm2ppa",
"python-smbc",
"system-config-printer",
"system-config-printer-udev",
]
)
info = yumpkg.group_info("@gnome-desktop")
self.assertDictEqual(info, expected)
@skipIf(pytest is None, "PyTest is missing")

View file

@ -24,7 +24,6 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin):
"__env__": "base",
"__opts__": {"test": False},
"__salt__": {"environ.setenv": envmodule.setenv},
"__utils__": {"reg.read_value": salt.modules.reg.read_value},
}
return {envstate: loader_globals, envmodule: loader_globals}
@ -54,11 +53,15 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin):
@skipIf(not salt.utils.platform.is_windows(), "Windows only")
def test_setenv_permanent(self):
"""
test that we can set perminent environment variables (requires pywin32)
test that we can set permanent environment variables (requires pywin32)
"""
with patch.dict(
envmodule.__utils__,
{"reg.set_value": MagicMock(), "reg.delete_value": MagicMock()},
{
"reg.set_value": MagicMock(),
"reg.delete_value": MagicMock(),
"win_functions.broadcast_setting_change": MagicMock(),
},
):
ret = envstate.setenv("test", "value", permanent=True)
self.assertEqual(ret["changes"], {"test": "value"})

Some files were not shown because too many files have changed in this diff Show more