mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2017.7' into '2018.3'
Due to the many merge conflicts created from #47106 against the 2017.7 branch and #46002 against the 2018.3 branch, the changes from #47106 have been largely removed from this merge forward and the HEAD of 2018.3 was taken. A separate fix for Tornado 5.0 support will need to be made directly against the 2018.3 branch. Conflicts: - doc/topics/development/conventions/formulas.rst - salt/master.py - salt/minion.py - salt/netapi/rest_tornado/saltnado.py - salt/states/zfs.py - salt/transport/ipc.py - salt/transport/tcp.py - salt/transport/zeromq.py - salt/utils/async.py - tests/support/helpers.py - tests/support/parser/cover.py - tests/unit/grains/test_core.py - tests/unit/modules/test_ssh.py - tests/unit/test_minion.py - tests/unit/utils/test_safe_walk.py
This commit is contained in:
commit
13e8124031
40 changed files with 740 additions and 321 deletions
32
.github/CODEOWNERS
vendored
32
.github/CODEOWNERS
vendored
|
@ -9,43 +9,45 @@
|
|||
# See https://help.github.com/articles/about-codeowners/
|
||||
# for more info about the CODEOWNERS file
|
||||
|
||||
# This file uses an fnmatch-style matching pattern.
|
||||
|
||||
# Team Boto
|
||||
salt/**/*boto* @saltstack/team-boto
|
||||
|
||||
# Team Core
|
||||
salt/auth/ @saltstack/team-core
|
||||
salt/cache/ @saltstack/team-core
|
||||
salt/cli/ @saltstack/team-core
|
||||
salt/auth/* @saltstack/team-core
|
||||
salt/cache/* @saltstack/team-core
|
||||
salt/cli/* @saltstack/team-core
|
||||
salt/client/* @saltstack/team-core
|
||||
salt/config/* @saltstack/team-core
|
||||
salt/daemons/ @saltstack/team-core
|
||||
salt/pillar/ @saltstack/team-core
|
||||
salt/daemons/* @saltstack/team-core
|
||||
salt/pillar/* @saltstack/team-core
|
||||
salt/loader.py @saltstack/team-core
|
||||
salt/payload.py @saltstack/team-core
|
||||
salt/**/master* @saltstack/team-core
|
||||
salt/**/minion* @saltstack/team-core
|
||||
|
||||
# Team Cloud
|
||||
salt/cloud/ @saltstack/team-cloud
|
||||
salt/utils/openstack/ @saltstack/team-cloud
|
||||
salt/cloud/* @saltstack/team-cloud
|
||||
salt/utils/openstack/* @saltstack/team-cloud
|
||||
salt/utils/aws.py @saltstack/team-cloud
|
||||
salt/**/*cloud* @saltstack/team-cloud
|
||||
|
||||
# Team NetAPI
|
||||
salt/cli/api.py @saltstack/team-netapi
|
||||
salt/client/netapi.py @saltstack/team-netapi
|
||||
salt/netapi/ @saltstack/team-netapi
|
||||
salt/netapi/* @saltstack/team-netapi
|
||||
|
||||
# Team Network
|
||||
salt/proxy/ @saltstack/team-proxy
|
||||
salt/proxy/* @saltstack/team-proxy
|
||||
|
||||
# Team SPM
|
||||
salt/cli/spm.py @saltstack/team-spm
|
||||
salt/spm/ @saltstack/team-spm
|
||||
salt/spm/* @saltstack/team-spm
|
||||
|
||||
# Team SSH
|
||||
salt/cli/ssh.py @saltstack/team-ssh
|
||||
salt/client/ssh/ @saltstack/team-ssh
|
||||
salt/client/ssh/* @saltstack/team-ssh
|
||||
salt/runners/ssh.py @saltstack/team-ssh
|
||||
salt/**/thin.py @saltstack/team-ssh
|
||||
|
||||
|
@ -61,8 +63,12 @@ salt/**/*xfs* @saltstack/team-suse
|
|||
salt/**/*zypper* @saltstack/team-suse
|
||||
|
||||
# Team Transport
|
||||
salt/transport/ @saltstack/team-transport
|
||||
salt/transport/* @saltstack/team-transport
|
||||
salt/utils/zeromq.py @saltstack/team-transport
|
||||
|
||||
# Team Windows
|
||||
salt/**/*win* @saltstack/team-windows
|
||||
salt/*/*win* @saltstack/team-windows
|
||||
salt/modules/reg.py @saltstack/team-windows
|
||||
salt/states/reg.py @saltstack/team-windows
|
||||
tests/*/*win* @saltstack/team-windows
|
||||
tests/*/test_reg.py @saltstack/team-windows
|
||||
|
|
|
@ -323,6 +323,7 @@ rst_prolog = """\
|
|||
.. _`salt-users`: https://groups.google.com/forum/#!forum/salt-users
|
||||
.. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
|
||||
.. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
|
||||
.. _`salt-slack`: https://saltstackcommunity.herokuapp.com/
|
||||
.. |windownload| raw:: html
|
||||
|
||||
<p>Python2 x86: <a
|
||||
|
|
|
@ -60,7 +60,7 @@ Fork a Repo Guide_>`_ and is well worth reading.
|
|||
isolated into separate branches.
|
||||
|
||||
If you're working on a bug or documentation fix, create your branch from
|
||||
the oldest release branch that contains the bug or requires the documentation
|
||||
the oldest **supported** main release branch that contains the bug or requires the documentation
|
||||
update. See :ref:`Which Salt Branch? <which-salt-branch>`.
|
||||
|
||||
.. code-block:: bash
|
||||
|
@ -212,8 +212,11 @@ There are three different kinds of branches in use: develop, main release
|
|||
branches, and dot release branches.
|
||||
|
||||
- All feature work should go into the ``develop`` branch.
|
||||
- Bug fixes and documentation changes should go into the oldest supported
|
||||
**main** release branch affected by the the bug or documentation change.
|
||||
- Bug fixes and documentation changes should go into the oldest **supported
|
||||
main** release branch affected by the the bug or documentation change (you
|
||||
can use the blame button in github to figure out when the bug was introduced).
|
||||
Supported releases are the last 2 releases. For example, if the latest release
|
||||
is 2018.3, the last two release are 2018.3 and 2017.7.
|
||||
Main release branches are named after a year and month, such as
|
||||
``2016.11`` and ``2017.7``.
|
||||
- Hot fixes, as determined by SaltStack's release team, should be submitted
|
||||
|
@ -247,7 +250,7 @@ Main Release Branches
|
|||
=====================
|
||||
|
||||
The current release branch is the most recent stable release. Pull requests
|
||||
containing bug fixes or documentation changes should be made against the main
|
||||
containing bug fixes or documentation changes should be made against the oldest supported main
|
||||
release branch that is affected.
|
||||
|
||||
The branch name will be a date-based name such as ``2016.11``.
|
||||
|
|
|
@ -221,8 +221,9 @@ The best way to create new Formula repositories for now is to create a
|
|||
repository in your own account on GitHub and notify a SaltStack employee when
|
||||
it is ready. We will add you to the Contributors team on the
|
||||
`saltstack-formulas`_ organization and help you transfer the repository over.
|
||||
Ping a SaltStack employee on IRC (``#salt`` on Freenode) or send an email to
|
||||
the `salt-users`_ mailing list.
|
||||
Ping a SaltStack employee on IRC (``#salt`` on Freenode), join the
|
||||
``#formulas`` channel on the `salt-slack`_ or send an email to the
|
||||
`salt-users`_ mailing list.
|
||||
|
||||
There are a lot of repositories in that organization! Team members can manage
|
||||
which repositories they are subscribed to on GitHub's watching page:
|
||||
|
|
|
@ -20,6 +20,9 @@ Statistics:
|
|||
|
||||
Changes:
|
||||
|
||||
This release includes a CVE Fix:
|
||||
|
||||
CVE-2017-7893: Compromised salt-minions can impersonate the salt-master. (Discovery credit: Frank Spierings)
|
||||
|
||||
- **PR** `#39855`_: (*Foxlik*) Use regular expression instead of split when replacing authorized_keys
|
||||
@ *2017-03-22T18:28:32Z*
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-r base.txt
|
||||
-r base-py2.txt
|
||||
|
||||
mock>=2.0.0
|
||||
apache-libcloud>=0.14.0
|
||||
|
@ -6,7 +6,7 @@ boto>=2.32.1
|
|||
boto3>=1.2.1
|
||||
moto>=0.3.6
|
||||
SaltPyLint>=v2017.3.6
|
||||
pytest
|
||||
pytest>=3.5.0
|
||||
git+https://github.com/eisensheng/pytest-catchlog.git@develop#egg=Pytest-catchlog
|
||||
git+https://github.com/saltstack/pytest-salt.git@master#egg=pytest-salt
|
||||
testinfra>=1.7.0
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-r base.txt
|
||||
-r base-py3.txt
|
||||
|
||||
mock>=2.0.0
|
||||
apache-libcloud>=0.14.0
|
||||
|
@ -11,7 +11,7 @@ moto>=0.3.6
|
|||
# prevent it from being successfully installed (at least on Python 3.4).
|
||||
httpretty
|
||||
SaltPyLint>=v2017.2.29
|
||||
pytest
|
||||
pytest>=3.5.0
|
||||
git+https://github.com/saltstack/pytest-salt.git@master#egg=pytest-salt
|
||||
git+https://github.com/eisensheng/pytest-catchlog.git@develop#egg=Pytest-catchlog
|
||||
testinfra>=1.7.0
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
pytest
|
||||
pytest>=3.5.0
|
||||
pytest-helpers-namespace
|
||||
pytest-tempdir
|
||||
|
|
|
@ -2043,9 +2043,10 @@ def list_input_endpoints(kwargs=None, conn=None, call=None):
|
|||
|
||||
ret = {}
|
||||
for item in data:
|
||||
if 'Role' not in item:
|
||||
continue
|
||||
for role in item['Role']:
|
||||
if 'Role' in item:
|
||||
role = item['Role']
|
||||
if not isinstance(role, dict):
|
||||
return ret
|
||||
input_endpoint = role['ConfigurationSets']['ConfigurationSet'].get('InputEndpoints', {}).get('InputEndpoint')
|
||||
if not input_endpoint:
|
||||
continue
|
||||
|
@ -2053,6 +2054,7 @@ def list_input_endpoints(kwargs=None, conn=None, call=None):
|
|||
input_endpoint = [input_endpoint]
|
||||
for endpoint in input_endpoint:
|
||||
ret[endpoint['Name']] = endpoint
|
||||
return ret
|
||||
return ret
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#
|
||||
# BUGS: https://github.com/saltstack/salt-bootstrap/issues
|
||||
#
|
||||
# COPYRIGHT: (c) 2012-2017 by the SaltStack Team, see AUTHORS.rst for more
|
||||
# COPYRIGHT: (c) 2012-2018 by the SaltStack Team, see AUTHORS.rst for more
|
||||
# details.
|
||||
#
|
||||
# LICENSE: Apache 2.0
|
||||
|
@ -18,7 +18,7 @@
|
|||
#======================================================================================================================
|
||||
set -o nounset # Treat unset variables as an error
|
||||
|
||||
__ScriptVersion="2017.12.13"
|
||||
__ScriptVersion="2018.04.25"
|
||||
__ScriptName="bootstrap-salt.sh"
|
||||
|
||||
__ScriptFullName="$0"
|
||||
|
@ -249,7 +249,6 @@ _CURL_ARGS=${BS_CURL_ARGS:-}
|
|||
_FETCH_ARGS=${BS_FETCH_ARGS:-}
|
||||
_GPG_ARGS=${BS_GPG_ARGS:-}
|
||||
_WGET_ARGS=${BS_WGET_ARGS:-}
|
||||
_ENABLE_EXTERNAL_ZMQ_REPOS=${BS_ENABLE_EXTERNAL_ZMQ_REPOS:-$BS_FALSE}
|
||||
_SALT_MASTER_ADDRESS=${BS_SALT_MASTER_ADDRESS:-null}
|
||||
_SALT_MINION_ID="null"
|
||||
# _SIMPLIFY_VERSION is mostly used in Solaris based distributions
|
||||
|
@ -299,13 +298,13 @@ __usage() {
|
|||
Examples:
|
||||
- ${__ScriptName}
|
||||
- ${__ScriptName} stable
|
||||
- ${__ScriptName} stable 2016.3
|
||||
- ${__ScriptName} stable 2016.3.1
|
||||
- ${__ScriptName} stable 2017.7
|
||||
- ${__ScriptName} stable 2017.7.2
|
||||
- ${__ScriptName} daily
|
||||
- ${__ScriptName} testing
|
||||
- ${__ScriptName} git
|
||||
- ${__ScriptName} git 2016.3
|
||||
- ${__ScriptName} git v2016.3.1
|
||||
- ${__ScriptName} git 2017.7
|
||||
- ${__ScriptName} git v2017.7.2
|
||||
- ${__ScriptName} git 06f249901a2e2f1ed310d58ea3921a129f214358
|
||||
|
||||
Options:
|
||||
|
@ -355,8 +354,6 @@ __usage() {
|
|||
per -p flag. You're responsible for providing the proper package name.
|
||||
-H Use the specified HTTP proxy for all download URLs (including https://).
|
||||
For example: http://myproxy.example.com:3128
|
||||
-Z Enable additional package repository for newer ZeroMQ
|
||||
(only available for RHEL/CentOS/Fedora/Ubuntu based distributions)
|
||||
-b Assume that dependencies are already installed and software sources are
|
||||
set up. If git is selected, git tree is still checked out as dependency
|
||||
step.
|
||||
|
@ -395,7 +392,7 @@ __usage() {
|
|||
tested with Centos 6 and is considered experimental. This will install the
|
||||
ius repo on the box if disable repo is false. This must be used in conjunction
|
||||
with -x <pythonversion>. For example:
|
||||
sh bootstrap.sh -P -y -x python2.7 git v2016.11.3
|
||||
sh bootstrap.sh -P -y -x python2.7 git v2017.7.2
|
||||
The above will install python27 and install the git version of salt using the
|
||||
python2.7 executable. This only works for git and pip installations.
|
||||
|
||||
|
@ -438,7 +435,6 @@ do
|
|||
p ) _EXTRA_PACKAGES="$_EXTRA_PACKAGES $OPTARG" ;;
|
||||
d ) _DISABLE_SALT_CHECKS=$BS_TRUE ;;
|
||||
H ) _HTTP_PROXY="$OPTARG" ;;
|
||||
Z ) _ENABLE_EXTERNAL_ZMQ_REPOS=$BS_TRUE ;;
|
||||
b ) _NO_DEPS=$BS_TRUE ;;
|
||||
f ) _FORCE_SHALLOW_CLONE=$BS_TRUE ;;
|
||||
l ) _DISABLE_SSL=$BS_TRUE ;;
|
||||
|
@ -593,14 +589,14 @@ elif [ "$ITYPE" = "stable" ]; then
|
|||
if [ "$#" -eq 0 ];then
|
||||
STABLE_REV="latest"
|
||||
else
|
||||
if [ "$(echo "$1" | egrep '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7)$')" != "" ]; then
|
||||
if [ "$(echo "$1" | egrep '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7|2018\.3)$')" != "" ]; then
|
||||
STABLE_REV="$1"
|
||||
shift
|
||||
elif [ "$(echo "$1" | egrep '^([0-9]*\.[0-9]*\.[0-9]*)$')" != "" ]; then
|
||||
STABLE_REV="archive/$1"
|
||||
shift
|
||||
else
|
||||
echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, latest, \$MAJOR.\$MINOR.\$PATCH)"
|
||||
echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, 2018.3, latest, \$MAJOR.\$MINOR.\$PATCH)"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
@ -1331,10 +1327,10 @@ __check_dpkg_architecture() {
|
|||
if [ "${error_msg}" != "" ]; then
|
||||
echoerror "${error_msg}"
|
||||
if [ "$ITYPE" != "git" ]; then
|
||||
echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2016.11.5."
|
||||
echoerror "You can try git installation mode, i.e.: sh ${__ScriptName} git v2017.7.2."
|
||||
echoerror "It may be necessary to use git installation mode with pip and disable the SaltStack apt repository."
|
||||
echoerror "For example:"
|
||||
echoerror " sh ${__ScriptName} -r -P git v2016.11.5"
|
||||
echoerror " sh ${__ScriptName} -r -P git v2017.7.2"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -1372,16 +1368,10 @@ __ubuntu_codename_translation() {
|
|||
DISTRO_CODENAME="trusty"
|
||||
;;
|
||||
"16")
|
||||
if [ "$_april" ]; then
|
||||
DISTRO_CODENAME="xenial"
|
||||
else
|
||||
DISTRO_CODENAME="yakkety"
|
||||
fi
|
||||
DISTRO_CODENAME="xenial"
|
||||
;;
|
||||
"17")
|
||||
if [ "$_april" ]; then
|
||||
DISTRO_CODENAME="zesty"
|
||||
fi
|
||||
DISTRO_CODENAME="artful"
|
||||
;;
|
||||
*)
|
||||
DISTRO_CODENAME="trusty"
|
||||
|
@ -1500,9 +1490,12 @@ __check_end_of_life_versions() {
|
|||
# < 14.04
|
||||
# = 14.10
|
||||
# = 15.04, 15.10
|
||||
# = 16.10
|
||||
# = 17.04
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 14 ] || \
|
||||
[ "$DISTRO_MAJOR_VERSION" -eq 15 ] || \
|
||||
([ "$DISTRO_MAJOR_VERSION" -lt 16 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]); then
|
||||
([ "$DISTRO_MAJOR_VERSION" -eq 17 ] && [ "$DISTRO_MINOR_VERSION" -eq 04 ]) || \
|
||||
([ "$DISTRO_MAJOR_VERSION" -lt 17 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]); then
|
||||
echoerror "End of life distributions are not supported."
|
||||
echoerror "Please consider upgrading to the next stable. See:"
|
||||
echoerror " https://wiki.ubuntu.com/Releases"
|
||||
|
@ -1544,8 +1537,8 @@ __check_end_of_life_versions() {
|
|||
;;
|
||||
|
||||
fedora)
|
||||
# Fedora lower than 25 are no longer supported
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 25 ]; then
|
||||
# Fedora lower than 26 are no longer supported
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 26 ]; then
|
||||
echoerror "End of life distributions are not supported."
|
||||
echoerror "Please consider upgrading to the next stable. See:"
|
||||
echoerror " https://fedoraproject.org/wiki/Releases"
|
||||
|
@ -1765,12 +1758,41 @@ __function_defined() {
|
|||
}
|
||||
|
||||
|
||||
#--- FUNCTION -------------------------------------------------------------------------------------------------------
|
||||
# NAME: __wait_for_apt
|
||||
# DESCRIPTION: Check if any apt, apt-get, aptitude, or dpkg processes are running before
|
||||
# calling these again. This is useful when these process calls are part of
|
||||
# a boot process, such as on AWS AMIs. This func will wait until the boot
|
||||
# process is finished so the script doesn't exit on a locked proc.
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__wait_for_apt(){
|
||||
echodebug "Checking if apt process is currently running."
|
||||
|
||||
# Timeout set at 15 minutes
|
||||
WAIT_TIMEOUT=900
|
||||
|
||||
while ps -C apt,apt-get,aptitude,dpkg >/dev/null; do
|
||||
sleep 1
|
||||
WAIT_TIMEOUT=$((WAIT_TIMEOUT - 1))
|
||||
|
||||
# If timeout reaches 0, abort.
|
||||
if [ "$WAIT_TIMEOUT" -eq 0 ]; then
|
||||
echoerror "Apt, apt-get, aptitude, or dpkg process is taking too long."
|
||||
echoerror "Bootstrap script cannot proceed. Aborting."
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
echodebug "No apt processes are currently running."
|
||||
}
|
||||
|
||||
#--- FUNCTION -------------------------------------------------------------------------------------------------------
|
||||
# NAME: __apt_get_install_noinput
|
||||
# DESCRIPTION: (DRY) apt-get install with noinput options
|
||||
# PARAMETERS: packages
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_get_install_noinput() {
|
||||
__wait_for_apt
|
||||
apt-get install -y -o DPkg::Options::=--force-confold "${@}"; return $?
|
||||
} # ---------- end of function __apt_get_install_noinput ----------
|
||||
|
||||
|
@ -1780,6 +1802,7 @@ __apt_get_install_noinput() {
|
|||
# DESCRIPTION: (DRY) apt-get upgrade with noinput options
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_get_upgrade_noinput() {
|
||||
__wait_for_apt
|
||||
apt-get upgrade -y -o DPkg::Options::=--force-confold; return $?
|
||||
} # ---------- end of function __apt_get_upgrade_noinput ----------
|
||||
|
||||
|
@ -1790,6 +1813,7 @@ __apt_get_upgrade_noinput() {
|
|||
# PARAMETERS: url
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_key_fetch() {
|
||||
__wait_for_apt
|
||||
url=$1
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
|
@ -2544,7 +2568,7 @@ __enable_universe_repository() {
|
|||
|
||||
__install_saltstack_ubuntu_repository() {
|
||||
# Workaround for latest non-LTS ubuntu
|
||||
if [ "$DISTRO_VERSION" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
|
||||
if [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
|
||||
echowarn "Non-LTS Ubuntu detected, but stable packages requested. Trying packages from latest LTS release. You may experience problems."
|
||||
UBUNTU_VERSION=16.04
|
||||
UBUNTU_CODENAME="xenial"
|
||||
|
@ -2556,8 +2580,8 @@ __install_saltstack_ubuntu_repository() {
|
|||
__PACKAGES=''
|
||||
|
||||
# Install downloader backend for GPG keys fetching
|
||||
if [ "$DISTRO_VERSION" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
|
||||
__PACKAGES="${__PACKAGES} gnupg2 dirmngr"
|
||||
if [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
|
||||
__PACKAGES="${__PACKAGES} gnupg dirmngr"
|
||||
else
|
||||
__PACKAGES="${__PACKAGES} gnupg-curl"
|
||||
fi
|
||||
|
@ -2576,6 +2600,7 @@ __install_saltstack_ubuntu_repository() {
|
|||
|
||||
__apt_key_fetch "$SALTSTACK_UBUNTU_URL/SALTSTACK-GPG-KEY.pub" || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
}
|
||||
|
||||
|
@ -2588,6 +2613,7 @@ install_ubuntu_deps() {
|
|||
|
||||
__enable_universe_repository || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
fi
|
||||
|
||||
|
@ -2644,6 +2670,7 @@ install_ubuntu_stable_deps() {
|
|||
# No user interaction, libc6 restart services for example
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
|
||||
if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
|
||||
|
@ -2664,6 +2691,7 @@ install_ubuntu_stable_deps() {
|
|||
}
|
||||
|
||||
install_ubuntu_daily_deps() {
|
||||
__wait_for_apt
|
||||
install_ubuntu_stable_deps || return 1
|
||||
|
||||
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
|
||||
|
@ -2681,6 +2709,7 @@ install_ubuntu_daily_deps() {
|
|||
}
|
||||
|
||||
install_ubuntu_git_deps() {
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
|
||||
if ! __check_command_exists git; then
|
||||
|
@ -2711,8 +2740,8 @@ install_ubuntu_git_deps() {
|
|||
else
|
||||
install_ubuntu_stable_deps || return 1
|
||||
|
||||
__PACKAGES="${__PACKAGES} python-crypto python-jinja2 python-msgpack python-requests"
|
||||
__PACKAGES="${__PACKAGES} python-tornado python-yaml python-zmq"
|
||||
__PACKAGES="${__PACKAGES} python-crypto python-jinja2 python-m2crypto python-msgpack"
|
||||
__PACKAGES="${__PACKAGES} python-requests python-tornado python-yaml python-zmq"
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
# Install python-libcloud if asked to
|
||||
|
@ -2791,7 +2820,7 @@ install_ubuntu_stable_post() {
|
|||
/bin/systemctl preset salt-$fname.service > /dev/null 2>&1 &&
|
||||
/bin/systemctl enable salt-$fname.service > /dev/null 2>&1
|
||||
)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
/bin/systemctl daemon-reload
|
||||
elif [ -f /etc/init.d/salt-$fname ]; then
|
||||
update-rc.d salt-$fname defaults
|
||||
|
@ -2817,7 +2846,7 @@ install_ubuntu_git_post() {
|
|||
[ $fname = "api" ] && continue
|
||||
|
||||
systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
systemctl daemon-reload
|
||||
elif [ -f /sbin/initctl ]; then
|
||||
_upstart_conf="/etc/init/salt-$fname.conf"
|
||||
|
@ -2973,6 +3002,7 @@ __install_saltstack_debian_repository() {
|
|||
|
||||
__apt_key_fetch "$SALTSTACK_DEBIAN_URL/SALTSTACK-GPG-KEY.pub" || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
}
|
||||
|
||||
|
@ -2984,6 +3014,7 @@ install_debian_deps() {
|
|||
# No user interaction, libc6 restart services for example
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update
|
||||
|
||||
if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
|
||||
|
@ -3030,9 +3061,9 @@ install_debian_git_deps() {
|
|||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-backports.ssl-match-hostname python-crypto"
|
||||
__PACKAGES="${__PACKAGES} python-jinja2 python-msgpack python-requests"
|
||||
__PACKAGES="${__PACKAGES} python-tornado python-yaml python-zmq"
|
||||
__PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-backports.ssl-match-hostname"
|
||||
__PACKAGES="${__PACKAGES} python-crypto python-jinja2 python-msgpack python-m2crypto"
|
||||
__PACKAGES="${__PACKAGES} python-requests python-tornado python-yaml python-zmq"
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
# Install python-libcloud if asked to
|
||||
|
@ -3071,8 +3102,9 @@ install_debian_8_git_deps() {
|
|||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-crypto python-jinja2 python-msgpack"
|
||||
__PACKAGES="${__PACKAGES} python-requests python-systemd python-yaml python-zmq"
|
||||
__PACKAGES="libzmq3 libzmq3-dev lsb-release python-apt python-crypto python-jinja2"
|
||||
__PACKAGES="${__PACKAGES} python-m2crypto python-msgpack python-requests python-systemd"
|
||||
__PACKAGES="${__PACKAGES} python-yaml python-zmq"
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
# Install python-libcloud if asked to
|
||||
|
@ -3081,7 +3113,7 @@ install_debian_8_git_deps() {
|
|||
|
||||
__PIP_PACKAGES=''
|
||||
if (__check_pip_allowed >/dev/null 2>&1); then
|
||||
__PIP_PACKAGES='tornado'
|
||||
__PIP_PACKAGES='tornado<5.0'
|
||||
# Install development environment for building tornado Python module
|
||||
__PACKAGES="${__PACKAGES} build-essential python-dev"
|
||||
|
||||
|
@ -3096,6 +3128,7 @@ install_debian_8_git_deps() {
|
|||
/etc/apt/sources.list.d/backports.list
|
||||
fi
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
|
||||
# python-tornado package should be installed from backports repo
|
||||
|
@ -3135,8 +3168,8 @@ install_debian_9_git_deps() {
|
|||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="libzmq5 lsb-release python-apt python-backports-abc python-crypto"
|
||||
__PACKAGES="${__PACKAGES} python-jinja2 python-msgpack python-requests python-systemd"
|
||||
__PACKAGES="${__PACKAGES} python-tornado python-yaml python-zmq"
|
||||
__PACKAGES="${__PACKAGES} python-jinja2 python-m2crypto python-msgpack python-requests"
|
||||
__PACKAGES="${__PACKAGES} python-systemd python-tornado python-yaml python-zmq"
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
# Install python-libcloud if asked to
|
||||
|
@ -3330,15 +3363,8 @@ install_debian_check_services() {
|
|||
|
||||
install_fedora_deps() {
|
||||
|
||||
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
|
||||
if [ "$_ENABLE_EXTERNAL_ZMQ_REPOS" -eq $BS_TRUE ]; then
|
||||
__install_saltstack_copr_zeromq_repository || return 1
|
||||
fi
|
||||
|
||||
__install_saltstack_copr_salt_repository || return 1
|
||||
fi
|
||||
|
||||
__PACKAGES="PyYAML libyaml python-crypto python-jinja2 python-zmq python2-msgpack python2-requests"
|
||||
__PACKAGES="libyaml m2crypto PyYAML python-crypto python-jinja2"
|
||||
__PACKAGES="${__PACKAGES} python2-msgpack python2-requests python-zmq"
|
||||
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 26 ]; then
|
||||
__PACKAGES="${__PACKAGES} yum-utils"
|
||||
|
@ -3395,7 +3421,7 @@ install_fedora_stable_post() {
|
|||
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
|
||||
|
||||
systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
systemctl daemon-reload
|
||||
done
|
||||
}
|
||||
|
@ -3456,7 +3482,7 @@ install_fedora_git_post() {
|
|||
[ $fname = "api" ] && continue
|
||||
|
||||
systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
systemctl daemon-reload
|
||||
done
|
||||
}
|
||||
|
@ -3523,20 +3549,6 @@ __install_epel_repository() {
|
|||
return 0
|
||||
}
|
||||
|
||||
__install_saltstack_copr_zeromq_repository() {
|
||||
echoinfo "Installing Zeromq >=4 and PyZMQ>=14 from SaltStack's COPR repository"
|
||||
if [ ! -s /etc/yum.repos.d/saltstack-zeromq4.repo ]; then
|
||||
if [ "${DISTRO_NAME_L}" = "fedora" ]; then
|
||||
__REPOTYPE="${DISTRO_NAME_L}"
|
||||
else
|
||||
__REPOTYPE="epel"
|
||||
fi
|
||||
__fetch_url /etc/yum.repos.d/saltstack-zeromq4.repo \
|
||||
"${HTTP_VAL}://copr.fedorainfracloud.org/coprs/saltstack/zeromq4/repo/${__REPOTYPE}-${DISTRO_MAJOR_VERSION}/saltstack-zeromq4-${__REPOTYPE}-${DISTRO_MAJOR_VERSION}.repo" || return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
__install_saltstack_rhel_repository() {
|
||||
if [ "$ITYPE" = "stable" ]; then
|
||||
repo_rev="$STABLE_REV"
|
||||
|
@ -3550,7 +3562,7 @@ __install_saltstack_rhel_repository() {
|
|||
gpg_key="SALTSTACK-GPG-KEY.pub"
|
||||
repo_file="/etc/yum.repos.d/saltstack.repo"
|
||||
|
||||
if [ ! -s "$repo_file" ]; then
|
||||
if [ ! -s "$repo_file" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then
|
||||
cat <<_eof > "$repo_file"
|
||||
[saltstack]
|
||||
name=SaltStack ${repo_rev} Release Channel for RHEL/CentOS \$releasever
|
||||
|
@ -3564,26 +3576,10 @@ _eof
|
|||
|
||||
fetch_url="${HTTP_VAL}://${_REPO_URL}/yum/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${repo_rev}/"
|
||||
__rpm_import_gpg "${fetch_url}${gpg_key}" || return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
__install_saltstack_copr_salt_repository() {
|
||||
echoinfo "Adding SaltStack's COPR repository"
|
||||
|
||||
if [ "${DISTRO_NAME_L}" = "fedora" ]; then
|
||||
[ "$DISTRO_MAJOR_VERSION" -ge 22 ] && return 0
|
||||
__REPOTYPE="${DISTRO_NAME_L}"
|
||||
else
|
||||
__REPOTYPE="epel"
|
||||
fi
|
||||
|
||||
__REPO_FILENAME="saltstack-salt-${__REPOTYPE}-${DISTRO_MAJOR_VERSION}.repo"
|
||||
|
||||
if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then
|
||||
__fetch_url "/etc/yum.repos.d/${__REPO_FILENAME}" \
|
||||
"${HTTP_VAL}://copr.fedorainfracloud.org/coprs/saltstack/salt/repo/${__REPOTYPE}-${DISTRO_MAJOR_VERSION}/${__REPO_FILENAME}" || return 1
|
||||
yum clean metadata || return 1
|
||||
elif [ "$repo_rev" != "latest" ]; then
|
||||
echowarn "saltstack.repo already exists, ignoring salt version argument."
|
||||
echowarn "Use -F (forced overwrite) to install $repo_rev."
|
||||
fi
|
||||
|
||||
return 0
|
||||
|
@ -3688,7 +3684,8 @@ install_centos_git_deps() {
|
|||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="python-crypto python-futures python-msgpack python-zmq python-jinja2 python-requests python-tornado"
|
||||
__PACKAGES="m2crypto python-crypto python-futures python-jinja2 python-msgpack"
|
||||
__PACKAGES="${__PACKAGES} python-requests python-tornado python-zmq"
|
||||
|
||||
if [ "$DISTRO_MAJOR_VERSION" -ge 7 ]; then
|
||||
__PACKAGES="${__PACKAGES} systemd-python"
|
||||
|
@ -3705,7 +3702,12 @@ install_centos_git_deps() {
|
|||
|
||||
if [ "${_PY_EXE}" != "" ]; then
|
||||
# If "-x" is defined, install dependencies with pip based on the Python version given.
|
||||
_PIP_PACKAGES="jinja2 msgpack-python pycrypto PyYAML tornado zmq"
|
||||
_PIP_PACKAGES="m2crypto jinja2 msgpack-python pycrypto PyYAML tornado<5.0 zmq"
|
||||
|
||||
# install swig and openssl on cent6
|
||||
if [ "$DISTRO_MAJOR_VERSION" -eq 6 ]; then
|
||||
__yum_install_noinput openssl-devel swig || return 1
|
||||
fi
|
||||
|
||||
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
|
||||
for SINGLE_PACKAGE in $_PIP_PACKAGES; do
|
||||
|
@ -4275,7 +4277,7 @@ install_alpine_linux_stable_deps() {
|
|||
install_alpine_linux_git_deps() {
|
||||
install_alpine_linux_stable_deps || return 1
|
||||
|
||||
apk -U add python2 py-virtualenv py2-crypto py2-setuptools \
|
||||
apk -U add python2 py-virtualenv py2-crypto py2-m2crypto py2-setuptools \
|
||||
py2-jinja2 py2-yaml py2-markupsafe py2-msgpack py2-psutil \
|
||||
py2-zmq zeromq py2-requests || return 1
|
||||
|
||||
|
@ -4367,6 +4369,7 @@ install_alpine_linux_restart_daemons() {
|
|||
# Skip if not meant to be installed
|
||||
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
|
||||
|
||||
# Disable stdin to fix shell session hang on killing tee pipe
|
||||
/sbin/rc-service salt-$fname stop < /dev/null > /dev/null 2>&1
|
||||
|
@ -4382,6 +4385,7 @@ install_alpine_linux_check_services() {
|
|||
# Skip if not meant to be installed
|
||||
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
|
||||
|
||||
__check_services_alpine salt-$fname || return 1
|
||||
done
|
||||
|
@ -4400,6 +4404,7 @@ daemons_running_alpine_linux() {
|
|||
# Skip if not meant to be installed
|
||||
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
|
||||
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
|
||||
|
||||
# shellcheck disable=SC2009
|
||||
if [ "$(ps wwwaux | grep -v grep | grep salt-$fname)" = "" ]; then
|
||||
|
@ -4427,10 +4432,20 @@ install_amazon_linux_ami_deps() {
|
|||
_USEAWS=$BS_FALSE
|
||||
pkg_append="python"
|
||||
|
||||
repo_rev="$(echo "${STABLE_REV}" | sed 's|.*\/||g')"
|
||||
if [ "$ITYPE" = "stable" ]; then
|
||||
repo_rev="$STABLE_REV"
|
||||
else
|
||||
repo_rev="latest"
|
||||
fi
|
||||
|
||||
if echo $repo_rev | egrep -q '^archive'; then
|
||||
year=$(echo "$repo_rev" | cut -d '/' -f 2 | cut -c1-4)
|
||||
else
|
||||
year=$(echo "$repo_rev" | cut -c1-4)
|
||||
fi
|
||||
|
||||
if echo "$repo_rev" | egrep -q '^(latest|2016\.11)$' || \
|
||||
[ "$(echo "$repo_rev" | cut -c1-4)" -gt 2016 ]; then
|
||||
[ "$year" -gt 2016 ]; then
|
||||
_USEAWS=$BS_TRUE
|
||||
pkg_append="python27"
|
||||
fi
|
||||
|
@ -4477,7 +4492,8 @@ _eof
|
|||
|
||||
# Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64
|
||||
# which is already installed
|
||||
__PACKAGES="${pkg_append}-PyYAML ${pkg_append}-crypto ${pkg_append}-msgpack ${pkg_append}-zmq ${pkg_append}-jinja2 ${pkg_append}-requests"
|
||||
__PACKAGES="m2crypto ${pkg_append}-crypto ${pkg_append}-jinja2 ${pkg_append}-PyYAML"
|
||||
__PACKAGES="${__PACKAGES} ${pkg_append}-msgpack ${pkg_append}-requests ${pkg_append}-zmq"
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
__yum_install_noinput ${__PACKAGES} || return 1
|
||||
|
@ -4630,7 +4646,7 @@ install_arch_linux_git_deps() {
|
|||
fi
|
||||
pacman -R --noconfirm python2-distribute
|
||||
pacman -Su --noconfirm --needed python2-crypto python2-setuptools python2-jinja \
|
||||
python2-markupsafe python2-msgpack python2-psutil \
|
||||
python2-m2crypto python2-markupsafe python2-msgpack python2-psutil \
|
||||
python2-pyzmq zeromq python2-requests python2-systemd || return 1
|
||||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
@ -4704,7 +4720,7 @@ install_arch_linux_post() {
|
|||
/usr/bin/systemctl preset salt-$fname.service > /dev/null 2>&1 &&
|
||||
/usr/bin/systemctl enable salt-$fname.service > /dev/null 2>&1
|
||||
)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
/usr/bin/systemctl daemon-reload
|
||||
continue
|
||||
fi
|
||||
|
@ -4732,7 +4748,7 @@ install_arch_linux_git_post() {
|
|||
/usr/bin/systemctl preset salt-${fname}.service > /dev/null 2>&1 &&
|
||||
/usr/bin/systemctl enable salt-${fname}.service > /dev/null 2>&1
|
||||
)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
/usr/bin/systemctl daemon-reload
|
||||
continue
|
||||
fi
|
||||
|
@ -4885,9 +4901,9 @@ install_freebsd_9_stable_deps() {
|
|||
__configure_freebsd_pkg_details || return 1
|
||||
fi
|
||||
|
||||
# Now install swig
|
||||
# Now install swig30
|
||||
# shellcheck disable=SC2086
|
||||
/usr/local/sbin/pkg install ${FROM_FREEBSD} -y swig || return 1
|
||||
/usr/local/sbin/pkg install ${FROM_FREEBSD} -y swig30 || return 1
|
||||
|
||||
# YAML module is used for generating custom master/minion configs
|
||||
# shellcheck disable=SC2086
|
||||
|
@ -4934,7 +4950,7 @@ install_freebsd_git_deps() {
|
|||
# We're on the develop branch, install whichever tornado is on the requirements file
|
||||
__REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
|
||||
if [ "${__REQUIRED_TORNADO}" != "" ]; then
|
||||
/usr/local/sbin/pkg install -y www/py-tornado || return 1
|
||||
/usr/local/sbin/pkg install -y www/py-tornado4 || return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
|
@ -5098,35 +5114,11 @@ install_freebsd_restart_daemons() {
|
|||
# OpenBSD Install Functions
|
||||
#
|
||||
|
||||
__choose_openbsd_mirror() {
|
||||
OPENBSD_REPO=''
|
||||
MINTIME=''
|
||||
MIRROR_LIST=$(ftp -w 15 -Vao - 'https://ftp.openbsd.org/cgi-bin/ftplist.cgi?dbversion=1' | awk '/^http/ {print $1}')
|
||||
|
||||
for MIRROR in $MIRROR_LIST; do
|
||||
MIRROR_HOST=$(echo "$MIRROR" | sed -e 's|.*//||' -e 's|+*/.*$||')
|
||||
TIME=$(ping -c 1 -w 1 -q "$MIRROR_HOST" | awk -F/ '/round-trip/ { print $5 }')
|
||||
[ -z "$TIME" ] && continue
|
||||
|
||||
echodebug "ping time for $MIRROR_HOST is $TIME"
|
||||
if [ -z "$MINTIME" ]; then
|
||||
FASTER_MIRROR=1
|
||||
else
|
||||
FASTER_MIRROR=$(echo "$TIME < $MINTIME" | bc)
|
||||
fi
|
||||
if [ "$FASTER_MIRROR" -eq 1 ]; then
|
||||
MINTIME=$TIME
|
||||
OPENBSD_REPO="$MIRROR"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
install_openbsd_deps() {
|
||||
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
|
||||
__choose_openbsd_mirror || return 1
|
||||
echoinfo "setting package repository to $OPENBSD_REPO with ping time of $MINTIME"
|
||||
[ -n "$OPENBSD_REPO" ] || return 1
|
||||
echo "${OPENBSD_REPO}" >>/etc/installurl || return 1
|
||||
OPENBSD_REPO='https://cdn.openbsd.org/pub/OpenBSD'
|
||||
echoinfo "setting package repository to $OPENBSD_REPO"
|
||||
echo "${OPENBSD_REPO}" >/etc/installurl || return 1
|
||||
fi
|
||||
|
||||
if [ "${_EXTRA_PACKAGES}" != "" ]; then
|
||||
|
@ -5226,7 +5218,7 @@ install_openbsd_restart_daemons() {
|
|||
# SmartOS Install Functions
|
||||
#
|
||||
install_smartos_deps() {
|
||||
pkgin -y install zeromq py27-crypto py27-msgpack py27-yaml py27-jinja2 py27-zmq py27-requests || return 1
|
||||
pkgin -y install zeromq py27-crypto py27-m2crypto py27-msgpack py27-yaml py27-jinja2 py27-zmq py27-requests || return 1
|
||||
|
||||
# Set _SALT_ETC_DIR to SmartOS default if they didn't specify
|
||||
_SALT_ETC_DIR=${BS_SALT_ETC_DIR:-/opt/local/etc/salt}
|
||||
|
@ -5456,6 +5448,13 @@ __version_lte() {
|
|||
}
|
||||
|
||||
__zypper() {
|
||||
# Check if any zypper process is running before calling zypper again.
|
||||
# This is useful when a zypper call is part of a boot process and will
|
||||
# wait until the zypper process is finished, such as on AWS AMIs.
|
||||
while pgrep -l zypper; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
zypper --non-interactive "${@}"; return $?
|
||||
}
|
||||
|
||||
|
@ -5515,7 +5514,7 @@ install_opensuse_stable_deps() {
|
|||
}
|
||||
|
||||
install_opensuse_git_deps() {
|
||||
if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
|
||||
if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ] && ! __check_command_exists update-ca-certificates; then
|
||||
__zypper_install ca-certificates || return 1
|
||||
fi
|
||||
|
||||
|
@ -5529,7 +5528,7 @@ install_opensuse_git_deps() {
|
|||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="libzmq5 python-Jinja2 python-msgpack-python python-pycrypto python-pyzmq python-xml"
|
||||
__PACKAGES="libzmq5 python-Jinja2 python-m2crypto python-msgpack-python python-pycrypto python-pyzmq python-xml"
|
||||
|
||||
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
|
||||
# We're on the develop branch, install whichever tornado is on the requirements file
|
||||
|
@ -5594,7 +5593,7 @@ install_opensuse_stable_post() {
|
|||
|
||||
if [ -f /bin/systemctl ]; then
|
||||
systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
|
||||
sleep 0.1
|
||||
sleep 1
|
||||
systemctl daemon-reload
|
||||
continue
|
||||
fi
|
||||
|
@ -5723,6 +5722,12 @@ install_suse_12_stable_deps() {
|
|||
# shellcheck disable=SC2086,SC2090
|
||||
__zypper_install ${__PACKAGES} || return 1
|
||||
|
||||
# SLES 11 SP3 ships with both python-M2Crypto-0.22.* and python-m2crypto-0.21 and we will be asked which
|
||||
# we want to install, even with --non-interactive.
|
||||
# Let's try to install the higher version first and then the lower one in case of failure
|
||||
__zypper_install 'python-M2Crypto>=0.22' || __zypper_install 'python-M2Crypto>=0.21' || return 1
|
||||
|
||||
|
||||
if [ "${_EXTRA_PACKAGES}" != "" ]; then
|
||||
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
|
||||
# shellcheck disable=SC2086
|
||||
|
@ -5825,6 +5830,11 @@ install_suse_11_stable_deps() {
|
|||
# shellcheck disable=SC2086,SC2090
|
||||
__zypper_install ${__PACKAGES} || return 1
|
||||
|
||||
# SLES 11 SP3 ships with both python-M2Crypto-0.22.* and python-m2crypto-0.21 and we will be asked which
|
||||
# we want to install, even with --non-interactive.
|
||||
# Let's try to install the higher version first and then the lower one in case of failure
|
||||
__zypper_install 'python-M2Crypto>=0.22' || __zypper_install 'python-M2Crypto>=0.21' || return 1
|
||||
|
||||
if [ "${_EXTRA_PACKAGES}" != "" ]; then
|
||||
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
|
||||
# shellcheck disable=SC2086
|
||||
|
|
|
@ -56,7 +56,7 @@ def inet_pton(address_family, ip_string):
|
|||
addr_size = ctypes.c_int(ctypes.sizeof(addr))
|
||||
|
||||
if WSAStringToAddressA(
|
||||
ip_string,
|
||||
ip_string.encode('ascii'),
|
||||
address_family,
|
||||
None,
|
||||
ctypes.byref(addr),
|
||||
|
|
|
@ -763,13 +763,6 @@ def _virtual(osdata):
|
|||
grains['virtual'] = 'kvm'
|
||||
# Break out of the loop so the next log message is not issued
|
||||
break
|
||||
elif command == 'virt-what':
|
||||
# if 'virt-what' returns nothing, it's either an undetected platform
|
||||
# so we default just as virt-what to 'physical', otherwise use the
|
||||
# platform detected/returned by virt-what
|
||||
if output:
|
||||
grains['virtual'] = output.lower()
|
||||
break
|
||||
elif command == 'prtdiag':
|
||||
model = output.lower().split("\n")[0]
|
||||
if 'vmware' in model:
|
||||
|
|
|
@ -213,6 +213,14 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
To pass in jump options that doesn't take arguments, pass in an empty
|
||||
string.
|
||||
|
||||
.. note::
|
||||
|
||||
Whereas iptables will accept ``-p``, ``--proto[c[o[l]]]`` as synonyms
|
||||
of ``--protocol``, if ``--proto`` appears in an iptables command after
|
||||
the appearance of ``-m policy``, it is interpreted as the ``--proto``
|
||||
option of the policy extension (see the iptables-extensions(8) man
|
||||
page).
|
||||
|
||||
CLI Examples:
|
||||
|
||||
.. code-block:: bash
|
||||
|
@ -243,7 +251,6 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
salt '*' iptables.build_rule filter INPUT command=I position=3 \\
|
||||
full=True match=state connstate=RELATED,ESTABLISHED jump=ACCEPT \\
|
||||
family=ipv6
|
||||
|
||||
'''
|
||||
if 'target' in kwargs:
|
||||
kwargs['jump'] = kwargs.pop('target')
|
||||
|
@ -257,7 +264,7 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
del kwargs[ignore]
|
||||
|
||||
rule = []
|
||||
proto = False
|
||||
protocol = False
|
||||
bang_not_pat = re.compile(r'(!|not)\s?')
|
||||
|
||||
def maybe_add_negation(arg):
|
||||
|
@ -281,12 +288,15 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
rule.append('{0}-o {1}'.format(maybe_add_negation('of'), kwargs['of']))
|
||||
del kwargs['of']
|
||||
|
||||
for proto_arg in ('protocol', 'proto'):
|
||||
if proto_arg in kwargs:
|
||||
if not proto:
|
||||
rule.append('{0}-p {1}'.format(maybe_add_negation(proto_arg), kwargs[proto_arg]))
|
||||
proto = True
|
||||
del kwargs[proto_arg]
|
||||
if 'proto' in kwargs and kwargs.get('match') != 'policy':
|
||||
kwargs['protocol'] = kwargs['proto']
|
||||
del kwargs['proto']
|
||||
# Handle the case 'proto' in kwargs and kwargs.get('match') == 'policy' below
|
||||
if 'protocol' in kwargs:
|
||||
if not protocol:
|
||||
rule.append('{0}-p {1}'.format(maybe_add_negation('protocol'), kwargs['protocol']))
|
||||
protocol = True
|
||||
del kwargs['protocol']
|
||||
|
||||
if 'match' in kwargs:
|
||||
match_value = kwargs['match']
|
||||
|
@ -297,6 +307,9 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
if 'name_' in kwargs and match.strip() in ('pknock', 'quota2', 'recent'):
|
||||
rule.append('--name {0}'.format(kwargs['name_']))
|
||||
del kwargs['name_']
|
||||
if 'proto' in kwargs and kwargs.get('match') == 'policy':
|
||||
rule.append('{0}--proto {1}'.format(maybe_add_negation('proto'), kwargs['proto']))
|
||||
del kwargs['proto']
|
||||
del kwargs['match']
|
||||
|
||||
if 'match-set' in kwargs:
|
||||
|
@ -330,8 +343,8 @@ def build_rule(table='filter', chain=None, command=None, position='', full=None,
|
|||
if multiport_arg in kwargs:
|
||||
if '-m multiport' not in rule:
|
||||
rule.append('-m multiport')
|
||||
if not proto:
|
||||
return 'Error: proto must be specified'
|
||||
if not protocol:
|
||||
return 'Error: protocol must be specified'
|
||||
|
||||
mp_value = kwargs[multiport_arg]
|
||||
if isinstance(mp_value, list):
|
||||
|
@ -1042,9 +1055,9 @@ def _parse_conf(conf_file=None, in_mem=False, family='ipv4'):
|
|||
|
||||
def _parser():
|
||||
'''
|
||||
This function contains _all_ the options I could find in man 8 iptables,
|
||||
listed in the first section that I found them in. They will not all be used
|
||||
by all parts of the module; use them intelligently and appropriately.
|
||||
This function attempts to list all the options documented in the
|
||||
iptables(8) and iptables-extensions(8) man pages. They will not all be
|
||||
used by all parts of the module; use them intelligently and appropriately.
|
||||
'''
|
||||
add_arg = None
|
||||
if sys.version.startswith('2.6'):
|
||||
|
|
|
@ -50,6 +50,10 @@ def __virtual__():
|
|||
Only work on select distros which still use Red Hat's /usr/bin/service for
|
||||
management of either sysvinit or a hybrid sysvinit/upstart init system.
|
||||
'''
|
||||
# Disable when booted with systemd
|
||||
if __utils__['systemd.booted'](__context__):
|
||||
return (False, 'The rh_service execution module failed to load: this system was booted with systemd.')
|
||||
|
||||
# Enable on these platforms only.
|
||||
enable = set((
|
||||
'XenServer',
|
||||
|
@ -99,15 +103,6 @@ def __virtual__():
|
|||
'RedHat-based distros >= version 7 use systemd, will not '
|
||||
'load rh_service.py as virtual \'service\''
|
||||
)
|
||||
if __grains__['os'] == 'Amazon':
|
||||
if int(osrelease_major) in (2016, 2017):
|
||||
return __virtualname__
|
||||
else:
|
||||
return (
|
||||
False,
|
||||
'Amazon Linux >= version 2 uses systemd. Will not '
|
||||
'load rh_service.py as virtual \'service\''
|
||||
)
|
||||
return __virtualname__
|
||||
return (False, 'Cannot load rh_service module: OS not in {0}'.format(enable))
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ def _get_username(member):
|
|||
str: The username converted to domain\\username format
|
||||
'''
|
||||
return member.ADSPath.replace('WinNT://', '').replace(
|
||||
'/', '\\').encode('ascii', 'backslashreplace')
|
||||
'/', '\\')
|
||||
|
||||
|
||||
def add(name, **kwargs):
|
||||
|
|
|
@ -17,7 +17,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -32,7 +32,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- comment: "Allow HTTP"
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -48,7 +48,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- connstate: NEW
|
||||
- source: '127.0.0.1'
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -65,7 +65,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- connstate: NEW
|
||||
- source: '! 127.0.0.1'
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -81,7 +81,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- connstate: NEW
|
||||
- source: 'not 127.0.0.1'
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -94,7 +94,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -109,7 +109,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- dports:
|
||||
- 80
|
||||
- 443
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -122,7 +122,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -136,7 +136,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -148,7 +148,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -161,7 +161,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -174,7 +174,7 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- match: state
|
||||
- connstate: NEW
|
||||
- dport: 80
|
||||
- proto: tcp
|
||||
- protocol: tcp
|
||||
- sport: 1025:65535
|
||||
- save: True
|
||||
|
||||
|
@ -183,6 +183,55 @@ at some point be deprecated in favor of a more generic ``firewall`` state.
|
|||
- chain: INPUT
|
||||
- policy: ACCEPT
|
||||
|
||||
.. note::
|
||||
|
||||
Whereas iptables will accept ``-p``, ``--proto[c[o[l]]]`` as synonyms of
|
||||
``--protocol``, if ``--proto`` appears in an iptables command after the
|
||||
appearance of ``-m policy``, it is interpreted as the ``--proto`` option of
|
||||
the policy extension (see the iptables-extensions(8) man page).
|
||||
|
||||
Example rules for IPSec policy:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
accept_esp_in:
|
||||
iptables.append:
|
||||
- table: filter
|
||||
- chain: INPUT
|
||||
- jump: ACCEPT
|
||||
- source: 10.20.0.0/24
|
||||
- destination: 10.10.0.0/24
|
||||
- in-interface: eth0
|
||||
- match: policy
|
||||
- dir: in
|
||||
- pol: ipsec
|
||||
- reqid: 1
|
||||
- proto: esp
|
||||
accept_esp_forward_in:
|
||||
iptables.append:
|
||||
- use:
|
||||
- iptables: accept_esp_in
|
||||
- chain: FORWARD
|
||||
|
||||
accept_esp_out:
|
||||
iptables.append:
|
||||
- table: filter
|
||||
- chain: OUTPUT
|
||||
- jump: ACCEPT
|
||||
- source: 10.10.0.0/24
|
||||
- destination: 10.20.0.0/24
|
||||
- out-interface: eth0
|
||||
- match: policy
|
||||
- dir: out
|
||||
- pol: ipsec
|
||||
- reqid: 1
|
||||
- proto: esp
|
||||
accept_esp_forward_out:
|
||||
iptables.append:
|
||||
- use:
|
||||
- iptables: accept_esp_out
|
||||
- chain: FORWARD
|
||||
|
||||
.. note::
|
||||
|
||||
Various functions of the ``iptables`` module use the ``--check`` option. If
|
||||
|
|
|
@ -224,6 +224,13 @@ def managed(name,
|
|||
Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` containing the raw
|
||||
result after the template was rendered.
|
||||
|
||||
.. note::
|
||||
|
||||
This argument cannot be used directly on the command line. Instead,
|
||||
it can be passed through the ``pillar`` variable when executing one
|
||||
of the :ref:`salt.modules.state.sls` or :ref:`salt.modules.state.apply`
|
||||
functions (see an example below).
|
||||
|
||||
replace: False
|
||||
Load and replace the configuration. Default: ``False`` (will apply load merge).
|
||||
|
||||
|
@ -274,7 +281,7 @@ def managed(name,
|
|||
|
||||
$ sudo salt 'juniper.device' state.sls router.config test=True
|
||||
|
||||
$ sudo salt -N all-routers state.sls router.config debug=True
|
||||
$ sudo salt -N all-routers state.sls router.config pillar="{'debug': True}"
|
||||
|
||||
``router.config`` depends on the location of the SLS file (see above). Running this command, will be executed all
|
||||
five steps from above. These examples above are not meant to be used in a production environment, their sole purpose
|
||||
|
@ -342,11 +349,11 @@ def managed(name,
|
|||
|
||||
# the user can override the flags the equivalent CLI args
|
||||
# which have higher precedence
|
||||
test = __opts__.get('test', test)
|
||||
debug = __opts__.get('debug', debug)
|
||||
commit = __opts__.get('commit', commit)
|
||||
replace = __opts__.get('replace', replace) # this might be a bit risky
|
||||
skip_verify = __opts__.get('skip_verify', skip_verify)
|
||||
test = __salt__['config.merge']('test', test)
|
||||
debug = __salt__['config.merge']('debug', debug)
|
||||
commit = __salt__['config.merge']('commit', commit)
|
||||
replace = __salt__['config.merge']('replace', replace) # this might be a bit risky
|
||||
skip_verify = __salt__['config.merge']('skip_verify', skip_verify)
|
||||
|
||||
config_update_ret = _update_config(template_name,
|
||||
template_source=template_source,
|
||||
|
|
|
@ -759,6 +759,14 @@ def installed(name,
|
|||
ret['comment'] = out['comment']
|
||||
return ret
|
||||
|
||||
# No packages to install.
|
||||
if not target_pkgs:
|
||||
ret['result'] = True
|
||||
aicomms = '\n'.join(already_installed_comments)
|
||||
last_line = 'All specified packages are already installed' + (' and up-to-date' if upgrade else '')
|
||||
ret['comment'] = aicomms + ('\n' if aicomms else '') + last_line
|
||||
return ret
|
||||
|
||||
# Construct the string that will get passed to the install call
|
||||
pkgs_str = ','.join([state_name for _, state_name in target_pkgs])
|
||||
|
||||
|
@ -809,12 +817,7 @@ def installed(name,
|
|||
no_cache_dir=no_cache_dir
|
||||
)
|
||||
|
||||
# Check the retcode for success, but don't fail if using pip1 and the package is
|
||||
# already present. Pip1 returns a retcode of 1 (instead of 0 for pip2) if you run
|
||||
# "pip install" without any arguments. See issue #21845.
|
||||
if pip_install_call and \
|
||||
(pip_install_call.get('retcode', 1) == 0 or pip_install_call.get('stdout', '').startswith(
|
||||
'You must give at least one requirement to install')):
|
||||
if pip_install_call and pip_install_call.get('retcode', 1) == 0:
|
||||
ret['result'] = True
|
||||
|
||||
if requirements or editable:
|
||||
|
@ -822,6 +825,8 @@ def installed(name,
|
|||
if requirements:
|
||||
PIP_REQUIREMENTS_NOCHANGE = [
|
||||
'Requirement already satisfied',
|
||||
'Requirement already up-to-date',
|
||||
'Requirement not upgraded',
|
||||
'Collecting',
|
||||
'Cloning',
|
||||
'Cleaning up...',
|
||||
|
|
|
@ -523,7 +523,7 @@ def _find_install_targets(name=None,
|
|||
|
||||
if any((pkgs, sources)):
|
||||
if pkgs:
|
||||
desired = _repack_pkgs(pkgs)
|
||||
desired = _repack_pkgs(pkgs, normalize=normalize)
|
||||
elif sources:
|
||||
desired = __salt__['pkg_resource.pack_sources'](
|
||||
sources,
|
||||
|
|
|
@ -567,6 +567,11 @@ class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.tra
|
|||
raise exc
|
||||
self._socket.close()
|
||||
self._socket = None
|
||||
if hasattr(self.req_server, 'stop'):
|
||||
try:
|
||||
self.req_server.stop()
|
||||
except Exception as exc:
|
||||
log.exception('TCPReqServerChannel close generated an exception: %s', str(exc))
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
|
@ -753,15 +758,23 @@ if USE_LOAD_BALANCER:
|
|||
super(LoadBalancerWorker, self).__init__(
|
||||
message_handler, *args, **kwargs)
|
||||
self.socket_queue = socket_queue
|
||||
self._stop = threading.Event()
|
||||
self.thread = threading.Thread(target=self.socket_queue_thread)
|
||||
self.thread.start()
|
||||
|
||||
t = threading.Thread(target=self.socket_queue_thread)
|
||||
t.start()
|
||||
def stop(self):
|
||||
self._stop.set()
|
||||
self.thread.join()
|
||||
|
||||
def socket_queue_thread(self):
|
||||
try:
|
||||
while True:
|
||||
client_socket, address = self.socket_queue.get(True, None)
|
||||
|
||||
try:
|
||||
client_socket, address = self.socket_queue.get(True, 1)
|
||||
except queue.Empty:
|
||||
if self._stop.is_set():
|
||||
break
|
||||
continue
|
||||
# 'self.io_loop' initialized in super class
|
||||
# 'tornado.tcpserver.TCPServer'.
|
||||
# 'self._handle_connection' defined in same super class.
|
||||
|
|
|
@ -135,6 +135,12 @@ def vb_get_manager():
|
|||
'''
|
||||
global _virtualboxManager
|
||||
if _virtualboxManager is None and HAS_LIBS:
|
||||
try:
|
||||
from importlib import reload
|
||||
except ImportError:
|
||||
# If we get here, we are in py2 and reload is a built-in.
|
||||
pass
|
||||
|
||||
# Reloading the API extends sys.paths for subprocesses of multiprocessing, since they seem to share contexts
|
||||
reload(vboxapi)
|
||||
_virtualboxManager = vboxapi.VirtualBoxManager(None, None)
|
||||
|
@ -149,7 +155,13 @@ def vb_get_box():
|
|||
@rtype: IVirtualBox
|
||||
'''
|
||||
vb_get_manager()
|
||||
vbox = _virtualboxManager.vbox
|
||||
|
||||
try:
|
||||
# This works in older versions of the SDK, but does not seem to work anymore.
|
||||
vbox = _virtualboxManager.vbox
|
||||
except AttributeError:
|
||||
vbox = _virtualboxManager.getVirtualBox()
|
||||
|
||||
return vbox
|
||||
|
||||
|
||||
|
|
|
@ -55,13 +55,17 @@ import salt.log.setup
|
|||
from salt.utils.odict import OrderedDict
|
||||
|
||||
# Define the pytest plugins we rely on
|
||||
pytest_plugins = ['pytest_catchlog', 'tempdir', 'helpers_namespace'] # pylint: disable=invalid-name
|
||||
pytest_plugins = ['tempdir', 'helpers_namespace'] # pylint: disable=invalid-name
|
||||
|
||||
# Define where not to collect tests from
|
||||
collect_ignore = ['setup.py']
|
||||
|
||||
log = logging.getLogger('salt.testsuite')
|
||||
|
||||
# Reset logging root handlers
|
||||
for handler in logging.root.handlers:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
|
||||
def pytest_tempdir_basename():
|
||||
'''
|
||||
|
@ -197,25 +201,6 @@ def pytest_configure(config):
|
|||
called after command line options have been parsed
|
||||
and all plugins and initial conftest files been loaded.
|
||||
'''
|
||||
# Configure the console logger based on the catch_log settings.
|
||||
# Most importantly, shutdown Salt's null, store and temporary logging queue handlers
|
||||
catch_log = config.pluginmanager.getplugin('_catch_log')
|
||||
cli_logging_handler = catch_log.log_cli_handler
|
||||
# Add the pytest_catchlog CLI log handler to the logging root
|
||||
logging.root.addHandler(cli_logging_handler)
|
||||
cli_level = cli_logging_handler.level
|
||||
cli_level = config._catchlog_log_cli_level
|
||||
cli_format = cli_logging_handler.formatter._fmt
|
||||
cli_date_format = cli_logging_handler.formatter.datefmt
|
||||
# Setup the console logger which shuts down the null and the temporary queue handlers
|
||||
salt.log.setup_console_logger(
|
||||
log_level=salt.log.setup.LOG_VALUES_TO_LEVELS.get(cli_level, 'error'),
|
||||
log_format=cli_format,
|
||||
date_format=cli_date_format
|
||||
)
|
||||
# Disable the store logging queue handler
|
||||
salt.log.setup.setup_extended_logging({'extension_modules': ''})
|
||||
|
||||
config.addinivalue_line('norecursedirs', os.path.join(CODE_DIR, 'templates'))
|
||||
config.addinivalue_line(
|
||||
'markers',
|
||||
|
|
25
tests/integration/modules/test_autoruns.py
Normal file
25
tests/integration/modules/test_autoruns.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
@skipIf(not salt.utils.is_windows(), 'windows tests only')
|
||||
class AutoRunsModuleTest(ModuleCase):
|
||||
'''
|
||||
Test the autoruns module
|
||||
'''
|
||||
def test_win_autoruns_list(self):
|
||||
'''
|
||||
test win_autoruns.list module
|
||||
'''
|
||||
ret = self.run_function('autoruns.list')
|
||||
self.assertIn('HKLM', str(ret))
|
||||
self.assertTrue(isinstance(ret, dict))
|
110
tests/integration/modules/test_firewall.py
Normal file
110
tests/integration/modules/test_firewall.py
Normal file
|
@ -0,0 +1,110 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
@skipIf(not salt.utils.is_windows(), 'Tests for only Windows')
|
||||
class FirewallTest(ModuleCase):
|
||||
'''
|
||||
Validate windows firewall module
|
||||
'''
|
||||
def _pre_firewall_status(self, pre_run):
|
||||
post_run = self.run_function('firewall.get_config')
|
||||
network = ['Domain', 'Public', 'Private']
|
||||
# compare the status of the firewall before and after test
|
||||
# and re-enable or disable depending on status before test run
|
||||
for net in network:
|
||||
if post_run[net] != pre_run[net]:
|
||||
if pre_run[net]:
|
||||
self.assertTrue(self.run_function('firewall.enable', profile=net))
|
||||
else:
|
||||
self.assertTrue(self.run_function('firewall.disable', profile=net))
|
||||
|
||||
@destructiveTest
|
||||
def test_firewall_get_config(self):
|
||||
'''
|
||||
test firewall.get_config
|
||||
'''
|
||||
pre_run = self.run_function('firewall.get_config')
|
||||
# ensure all networks are enabled then test status
|
||||
self.assertTrue(self.run_function('firewall.enable', profile='allprofiles'))
|
||||
ret = self.run_function('firewall.get_config')
|
||||
network = ['Domain', 'Public', 'Private']
|
||||
for net in network:
|
||||
self.assertTrue(ret[net])
|
||||
self._pre_firewall_status(pre_run)
|
||||
|
||||
@destructiveTest
|
||||
def test_firewall_disable(self):
|
||||
'''
|
||||
test firewall.disable
|
||||
'''
|
||||
pre_run = self.run_function('firewall.get_config')
|
||||
network = 'Private'
|
||||
|
||||
ret = self.run_function('firewall.get_config')[network]
|
||||
if not ret:
|
||||
self.assertTrue(self.run_function('firewall.enable', profile=network))
|
||||
|
||||
self.assertTrue(self.run_function('firewall.disable', profile=network))
|
||||
ret = self.run_function('firewall.get_config')[network]
|
||||
self.assertFalse(ret)
|
||||
self._pre_firewall_status(pre_run)
|
||||
|
||||
@destructiveTest
|
||||
def test_firewall_enable(self):
|
||||
'''
|
||||
test firewall.enable
|
||||
'''
|
||||
pre_run = self.run_function('firewall.get_config')
|
||||
network = 'Private'
|
||||
|
||||
ret = self.run_function('firewall.get_config')[network]
|
||||
if ret:
|
||||
self.assertTrue(self.run_function('firewall.disable', profile=network))
|
||||
|
||||
self.assertTrue(self.run_function('firewall.enable', profile=network))
|
||||
ret = self.run_function('firewall.get_config')[network]
|
||||
self.assertTrue(ret)
|
||||
self._pre_firewall_status(pre_run)
|
||||
|
||||
def test_firewall_get_rule(self):
|
||||
'''
|
||||
test firewall.get_rule
|
||||
'''
|
||||
rule = 'Remote Event Log Management (NP-In)'
|
||||
|
||||
ret = self.run_function('firewall.get_rule', [rule])
|
||||
checks = ['Private', 'LocalPort', 'RemotePort']
|
||||
for check in checks:
|
||||
self.assertIn(check, ret[rule])
|
||||
|
||||
@destructiveTest
|
||||
def test_firewall_add_delete_rule(self):
|
||||
'''
|
||||
test firewall.add_rule and delete_rule
|
||||
'''
|
||||
rule = 'test rule'
|
||||
port = '8080'
|
||||
|
||||
# test adding firewall rule
|
||||
add_rule = self.run_function('firewall.add_rule', [rule, port])
|
||||
ret = self.run_function('firewall.get_rule', [rule])
|
||||
self.assertIn(rule, ret[rule])
|
||||
self.assertIn(port, ret[rule])
|
||||
|
||||
# test deleting firewall rule
|
||||
self.assertTrue(self.run_function('firewall.delete_rule', [rule, port]))
|
||||
ret = self.run_function('firewall.get_rule', [rule])
|
||||
self.assertNotIn(rule, ret)
|
||||
self.assertNotIn(port, ret)
|
||||
self.assertIn('No rules match the specified criteria.', ret)
|
59
tests/integration/modules/test_network.py
Normal file
59
tests/integration/modules/test_network.py
Normal file
|
@ -0,0 +1,59 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
|
||||
URL = 'repo.saltstack.com'
|
||||
|
||||
|
||||
class NetworkTest(ModuleCase):
|
||||
'''
|
||||
Validate network module
|
||||
'''
|
||||
def test_network_ping(self):
|
||||
'''
|
||||
network.ping
|
||||
'''
|
||||
ret = self.run_function('network.ping', [URL])
|
||||
exp_out = ['ping', URL, 'ttl', 'time']
|
||||
for out in exp_out:
|
||||
self.assertIn(out, ret.lower())
|
||||
|
||||
@skipIf(salt.utils.is_darwin(), 'not supported on macosx')
|
||||
def test_network_netstat(self):
|
||||
'''
|
||||
network.netstat
|
||||
'''
|
||||
ret = self.run_function('network.netstat')
|
||||
exp_out = ['proto', 'local-address']
|
||||
for val in ret:
|
||||
for out in exp_out:
|
||||
self.assertIn(out, val)
|
||||
|
||||
def test_network_traceroute(self):
|
||||
'''
|
||||
network.traceroute
|
||||
'''
|
||||
if not salt.utils.which('traceroute') and not salt.utils.is_windows():
|
||||
self.skipTest('traceroute not installed')
|
||||
ret = self.run_function('network.traceroute', [URL])
|
||||
exp_out = ['hostname', 'ip']
|
||||
for out in exp_out:
|
||||
self.assertIn(out, exp_out)
|
||||
|
||||
@skipIf(not salt.utils.is_windows(), 'windows only test')
|
||||
def test_network_nslookup(self):
|
||||
'''
|
||||
network.nslookup
|
||||
'''
|
||||
ret = self.run_function('network.nslookup', [URL])
|
||||
exp_out = ['Server', 'Address']
|
||||
for out in exp_out:
|
||||
self.assertIn(out, exp_out)
|
30
tests/integration/modules/test_ntp.py
Normal file
30
tests/integration/modules/test_ntp.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
@skipIf(not salt.utils.is_windows(), 'Tests for only Windows')
|
||||
class NTPTest(ModuleCase):
|
||||
'''
|
||||
Validate windows ntp module
|
||||
'''
|
||||
@destructiveTest
|
||||
def test_ntp_set_servers(self):
|
||||
'''
|
||||
test ntp get and set servers
|
||||
'''
|
||||
ntp_srv = 'pool.ntp.org'
|
||||
set_srv = self.run_function('ntp.set_servers', [ntp_srv])
|
||||
self.assertTrue(set_srv)
|
||||
|
||||
get_srv = self.run_function('ntp.get_servers')
|
||||
self.assertEqual(ntp_srv, get_srv[0])
|
|
@ -359,3 +359,65 @@ class SystemModuleTest(ModuleCase):
|
|||
if self.run_function('grains.get', ['os_family']) == 'NILinuxRT':
|
||||
self.assertTrue(self.run_function('system._has_settable_hwclock'))
|
||||
self.assertTrue(self._hwclock_has_compare())
|
||||
|
||||
|
||||
@skipIf(not salt.utils.is_windows(), 'These tests can only be run on windows')
|
||||
class WinSystemModuleTest(ModuleCase):
|
||||
'''
|
||||
Validate the date/time functions in the win_system module
|
||||
'''
|
||||
def test_get_computer_name(self):
|
||||
'''
|
||||
Test getting the computer name
|
||||
'''
|
||||
ret = self.run_function('system.get_computer_name')
|
||||
|
||||
self.assertTrue(isinstance(ret, str))
|
||||
import socket
|
||||
name = socket.gethostname()
|
||||
self.assertEqual(name, ret)
|
||||
|
||||
@destructiveTest
|
||||
def test_set_computer_desc(self):
|
||||
'''
|
||||
Test setting the computer description
|
||||
'''
|
||||
desc = 'test description'
|
||||
set_desc = self.run_function('system.set_computer_desc', [desc])
|
||||
self.assertTrue(set_desc)
|
||||
|
||||
get_desc = self.run_function('system.get_computer_desc')
|
||||
self.assertEqual(set_desc['Computer Description'], get_desc)
|
||||
|
||||
def test_get_system_time(self):
|
||||
'''
|
||||
Test getting the system time
|
||||
'''
|
||||
ret = self.run_function('system.get_system_time')
|
||||
now = datetime.datetime.now()
|
||||
self.assertEqual(now.strftime("%I:%M"), ret.rsplit(':', 1)[0])
|
||||
|
||||
@destructiveTest
|
||||
def test_set_system_time(self):
|
||||
'''
|
||||
Test setting the system time
|
||||
'''
|
||||
test_time = '10:55'
|
||||
set_time = self.run_function('system.set_system_time', [test_time + ' AM'])
|
||||
get_time = self.run_function('system.get_system_time').rsplit(':', 1)[0]
|
||||
self.assertEqual(get_time, test_time)
|
||||
|
||||
def test_get_system_date(self):
|
||||
'''
|
||||
Test getting system date
|
||||
'''
|
||||
ret = self.run_function('system.get_system_date')
|
||||
date = datetime.datetime.now().date().strftime("%m/%d/%Y")
|
||||
self.assertEqual(date, ret)
|
||||
|
||||
@destructiveTest
|
||||
def test_set_system_date(self):
|
||||
'''
|
||||
Test setting system date
|
||||
'''
|
||||
self.assertTrue(self.run_function('system.set_system_date', ['3/25/2018']))
|
||||
|
|
|
@ -592,7 +592,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertEqual(
|
||||
ret[key]['comment'],
|
||||
('Python package carbon < 1.3 was already installed\n'
|
||||
'All packages were successfully installed'))
|
||||
'All specified packages are already installed'))
|
||||
break
|
||||
else:
|
||||
raise Exception('Expected state did not run')
|
||||
|
|
|
@ -121,7 +121,7 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
data = '\n'.join(data)
|
||||
self.assertIn('minion', data)
|
||||
'''
|
||||
arg_str = '-c {0} {1}'.format(self.get_config_dir(), arg_str)
|
||||
arg_str = '-c {0} -t {1} {2}'.format(self.get_config_dir(), timeout, arg_str)
|
||||
return self.run_script('salt', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr, timeout=timeout)
|
||||
|
||||
def run_ssh(self, arg_str, with_retcode=False, timeout=25,
|
||||
|
|
|
@ -24,6 +24,7 @@ import shutil
|
|||
import signal
|
||||
import socket
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
|
@ -60,6 +61,31 @@ import salt.utils.files
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
HAS_SYMLINKS = None
|
||||
|
||||
|
||||
def no_symlinks():
|
||||
'''
|
||||
Check if git is installed and has symlinks enabled in the configuration.
|
||||
'''
|
||||
global HAS_SYMLINKS
|
||||
if HAS_SYMLINKS is not None:
|
||||
return not HAS_SYMLINKS
|
||||
output = ''
|
||||
try:
|
||||
output = subprocess.check_output('git config --get core.symlinks', shell=True)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
except subprocess.CalledProcessError:
|
||||
# git returned non-zero status
|
||||
pass
|
||||
HAS_SYMLINKS = False
|
||||
if output.strip() == 'true':
|
||||
HAS_SYMLINKS = True
|
||||
return not HAS_SYMLINKS
|
||||
|
||||
|
||||
def destructiveTest(caller):
|
||||
'''
|
||||
Mark a test case as a destructive test for example adding or removing users
|
||||
|
|
|
@ -39,6 +39,7 @@ import salt.utils.stringutils
|
|||
import salt.utils.yaml
|
||||
import salt.version
|
||||
import salt.exceptions
|
||||
import salt.utils.process
|
||||
from salt.utils.verify import verify_env
|
||||
from salt.utils.immutabletypes import freeze
|
||||
from salt._compat import ElementTree as etree
|
||||
|
@ -638,6 +639,29 @@ class SaltReturnAssertsMixin(object):
|
|||
self.assertNotEqual(saltret, comparison)
|
||||
|
||||
|
||||
def _fetch_events(q):
|
||||
'''
|
||||
Collect events and store them
|
||||
'''
|
||||
def _clean_queue():
|
||||
print('Cleaning queue!')
|
||||
while not q.empty():
|
||||
queue_item = q.get()
|
||||
queue_item.task_done()
|
||||
|
||||
atexit.register(_clean_queue)
|
||||
a_config = AdaptedConfigurationTestCaseMixin()
|
||||
event = salt.utils.event.get_event('minion', sock_dir=a_config.get_config('minion')['sock_dir'], opts=a_config.get_config('minion'))
|
||||
while True:
|
||||
try:
|
||||
events = event.get_event(full=False)
|
||||
except Exception:
|
||||
# This is broad but we'll see all kinds of issues right now
|
||||
# if we drop the proc out from under the socket while we're reading
|
||||
pass
|
||||
q.put(events)
|
||||
|
||||
|
||||
class SaltMinionEventAssertsMixin(object):
|
||||
'''
|
||||
Asserts to verify that a given event was seen
|
||||
|
@ -646,36 +670,15 @@ class SaltMinionEventAssertsMixin(object):
|
|||
def __new__(cls, *args, **kwargs):
|
||||
# We have to cross-call to re-gen a config
|
||||
cls.q = multiprocessing.Queue()
|
||||
cls.fetch_proc = multiprocessing.Process(target=cls._fetch, args=(cls.q,))
|
||||
cls.fetch_proc = salt.utils.process.SignalHandlingMultiprocessingProcess(
|
||||
target=_fetch_events, args=(cls.q,)
|
||||
)
|
||||
cls.fetch_proc.start()
|
||||
return object.__new__(cls)
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
self.fetch_proc.join()
|
||||
|
||||
@staticmethod
|
||||
def _fetch(q):
|
||||
'''
|
||||
Collect events and store them
|
||||
'''
|
||||
def _clean_queue():
|
||||
print('Cleaning queue!')
|
||||
while not q.empty():
|
||||
queue_item = q.get()
|
||||
queue_item.task_done()
|
||||
|
||||
atexit.register(_clean_queue)
|
||||
a_config = AdaptedConfigurationTestCaseMixin()
|
||||
event = salt.utils.event.get_event('minion', sock_dir=a_config.get_config('minion')['sock_dir'], opts=a_config.get_config('minion'))
|
||||
while True:
|
||||
try:
|
||||
events = event.get_event(full=False)
|
||||
except Exception:
|
||||
# This is broad but we'll see all kinds of issues right now
|
||||
# if we drop the proc out from under the socket while we're reading
|
||||
pass
|
||||
q.put(events)
|
||||
|
||||
def assertMinionEventFired(self, tag):
|
||||
#TODO
|
||||
raise salt.exceptions.NotImplemented('assertMinionEventFired() not implemented')
|
||||
|
|
|
@ -177,7 +177,7 @@ class SaltCoverageTestingParser(SaltTestingParser):
|
|||
# Update environ so that any subprocess started on tests are also
|
||||
# included in the report
|
||||
coverage_options['data_suffix'] = True
|
||||
os.environ['COVERAGE_PROCESS_START'] = '1'
|
||||
os.environ['COVERAGE_PROCESS_START'] = ''
|
||||
os.environ['COVERAGE_OPTIONS'] = salt.utils.json.dumps(coverage_options)
|
||||
|
||||
# Setup coverage
|
||||
|
|
|
@ -858,3 +858,21 @@ SwapTotal: 4789244 kB'''
|
|||
'options': []}}
|
||||
with patch.object(salt.utils.dns, 'parse_resolv', MagicMock(return_value=resolv_mock)):
|
||||
assert core.dns() == ret
|
||||
|
||||
def test_core_virtual(self):
|
||||
'''
|
||||
test virtual grain with cmd virt-what
|
||||
'''
|
||||
virt = 'kvm'
|
||||
with patch.object(salt.utils, 'is_windows',
|
||||
MagicMock(return_value=False)):
|
||||
with patch.object(salt.utils, 'which',
|
||||
MagicMock(return_value=True)):
|
||||
with patch.dict(core.__salt__, {'cmd.run_all':
|
||||
MagicMock(return_value={'pid': 78,
|
||||
'retcode': 0,
|
||||
'stderr': '',
|
||||
'stdout': virt})}):
|
||||
osdata = {'kernel': 'test', }
|
||||
ret = core._virtual(osdata)
|
||||
self.assertEqual(ret['virtual'], virt)
|
||||
|
|
|
@ -19,11 +19,10 @@
|
|||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import errno
|
||||
import subprocess
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.helpers import no_symlinks
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
|
@ -35,31 +34,6 @@ from tests.support.mock import (
|
|||
from salt.modules.inspectlib.collector import Inspector
|
||||
|
||||
|
||||
HAS_SYMLINKS = None
|
||||
|
||||
|
||||
def no_symlinks():
|
||||
'''
|
||||
Check if git is installed and has symlinks enabled in the configuration.
|
||||
'''
|
||||
global HAS_SYMLINKS
|
||||
if HAS_SYMLINKS is not None:
|
||||
return not HAS_SYMLINKS
|
||||
output = ''
|
||||
try:
|
||||
output = subprocess.check_output('git config --get core.symlinks', shell=True)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
except subprocess.CalledProcessError:
|
||||
# git returned non-zero status
|
||||
pass
|
||||
HAS_SYMLINKS = False
|
||||
if output.strip() == 'true':
|
||||
HAS_SYMLINKS = True
|
||||
return not HAS_SYMLINKS
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(no_symlinks(), "Git missing 'core.symlinks=true' config")
|
||||
class InspectorCollectorTestCase(TestCase):
|
||||
|
|
|
@ -60,38 +60,38 @@ class IptablesTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assertEqual(iptables.build_rule(**{'if': 'not eth0'}),
|
||||
'! -i eth0')
|
||||
|
||||
self.assertEqual(iptables.build_rule(**{'proto': 'tcp', 'syn': '!'}),
|
||||
self.assertEqual(iptables.build_rule(**{'protocol': 'tcp', 'syn': '!'}),
|
||||
'-p tcp ! --syn')
|
||||
|
||||
self.assertEqual(iptables.build_rule(dports=[80, 443], proto='tcp'),
|
||||
self.assertEqual(iptables.build_rule(dports=[80, 443], protocol='tcp'),
|
||||
'-p tcp -m multiport --dports 80,443')
|
||||
|
||||
self.assertEqual(iptables.build_rule(dports='80,443', proto='tcp'),
|
||||
self.assertEqual(iptables.build_rule(dports='80,443', protocol='tcp'),
|
||||
'-p tcp -m multiport --dports 80,443')
|
||||
|
||||
# Should it really behave this way?
|
||||
self.assertEqual(iptables.build_rule(dports=['!80', 443],
|
||||
proto='tcp'),
|
||||
protocol='tcp'),
|
||||
'-p tcp -m multiport ! --dports 80,443')
|
||||
|
||||
self.assertEqual(iptables.build_rule(dports='!80,443', proto='tcp'),
|
||||
self.assertEqual(iptables.build_rule(dports='!80,443', protocol='tcp'),
|
||||
'-p tcp -m multiport ! --dports 80,443')
|
||||
|
||||
self.assertEqual(iptables.build_rule(sports=[80, 443], proto='tcp'),
|
||||
self.assertEqual(iptables.build_rule(sports=[80, 443], protocol='tcp'),
|
||||
'-p tcp -m multiport --sports 80,443')
|
||||
|
||||
self.assertEqual(iptables.build_rule(sports='80,443', proto='tcp'),
|
||||
self.assertEqual(iptables.build_rule(sports='80,443', protocol='tcp'),
|
||||
'-p tcp -m multiport --sports 80,443')
|
||||
|
||||
self.assertEqual(iptables.build_rule('filter', 'INPUT', command='I',
|
||||
position='3', full=True,
|
||||
dports='proto', jump='ACCEPT'),
|
||||
'Error: proto must be specified')
|
||||
dports='protocol', jump='ACCEPT'),
|
||||
'Error: protocol must be specified')
|
||||
|
||||
self.assertEqual(iptables.build_rule('filter', 'INPUT', command='I',
|
||||
position='3', full=True,
|
||||
sports='proto', jump='ACCEPT'),
|
||||
'Error: proto must be specified')
|
||||
sports='protocol', jump='ACCEPT'),
|
||||
'Error: protocol must be specified')
|
||||
|
||||
self.assertEqual(iptables.build_rule('', 'INPUT', command='I',
|
||||
position='3', full='True',
|
||||
|
|
|
@ -94,18 +94,16 @@ class SSHAuthKeyTestCase(TestCase, LoaderModuleMockMixin):
|
|||
comment_line = '# this is a comment\n'
|
||||
|
||||
# Write out the authorized key to a temporary file
|
||||
if salt.utils.platform.is_windows():
|
||||
temp_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
else:
|
||||
temp_file = tempfile.NamedTemporaryFile(delete=False, mode='w+')
|
||||
|
||||
# Add comment
|
||||
temp_file.write(comment_line)
|
||||
# Add empty line for #41335
|
||||
temp_file.write(empty_line)
|
||||
temp_file.write('{0} {1} {2} {3}'.format(options, enc, key, email))
|
||||
temp_file = tempfile.NamedTemporaryFile(delete=False, mode='w+')
|
||||
temp_file.close()
|
||||
|
||||
with salt.utils.files.fopen(temp_file.name, 'w') as _fh:
|
||||
# Add comment
|
||||
_fh.write(comment_line)
|
||||
# Add empty line for #41335
|
||||
_fh.write(empty_line)
|
||||
_fh.write('{0} {1} {2} {3}'.format(options, enc, key, email))
|
||||
|
||||
with patch.dict(ssh.__salt__, {'user.info': MagicMock(return_value={})}):
|
||||
with patch('salt.modules.ssh._get_config_file', MagicMock(return_value=temp_file.name)):
|
||||
ssh._replace_auth_key('foo', key, config=temp_file.name)
|
||||
|
|
|
@ -11,6 +11,16 @@ from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
|||
import salt.pillar.mysql as mysql
|
||||
|
||||
|
||||
def sorted_result(result):
|
||||
sorted_result = {}
|
||||
for x in result:
|
||||
sorted_result[x] = sorted(result[x])
|
||||
for y in sorted_result[x]:
|
||||
for z in y:
|
||||
y[z] = sorted(y[z])
|
||||
return sorted_result
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(not mysql.HAS_MYSQL, 'MySQL-python module not installed')
|
||||
class MysqlPillarTestCase(TestCase):
|
||||
|
@ -541,7 +551,7 @@ class MysqlPillarTestCase(TestCase):
|
|||
]
|
||||
}
|
||||
]},
|
||||
return_data.result
|
||||
sorted_result(return_data.result)
|
||||
)
|
||||
|
||||
def test_302_process_results_with_lists_consecutive(self):
|
||||
|
@ -566,5 +576,5 @@ class MysqlPillarTestCase(TestCase):
|
|||
]
|
||||
]
|
||||
]},
|
||||
return_data.result
|
||||
sorted_result(return_data.result)
|
||||
)
|
||||
|
|
|
@ -207,7 +207,7 @@ class PipStateTest(TestCase, SaltReturnAssertsMixin, LoaderModuleMockMixin):
|
|||
)
|
||||
self.assertSaltTrueReturn({'test': ret})
|
||||
self.assertInSaltComment(
|
||||
'successfully installed',
|
||||
'packages are already installed',
|
||||
{'test': ret}
|
||||
)
|
||||
|
||||
|
@ -241,7 +241,7 @@ class PipStateTest(TestCase, SaltReturnAssertsMixin, LoaderModuleMockMixin):
|
|||
)
|
||||
self.assertSaltTrueReturn({'test': ret})
|
||||
self.assertInSaltComment(
|
||||
'were successfully installed',
|
||||
'packages are already installed',
|
||||
{'test': ret}
|
||||
)
|
||||
|
||||
|
@ -264,7 +264,7 @@ class PipStateTest(TestCase, SaltReturnAssertsMixin, LoaderModuleMockMixin):
|
|||
)
|
||||
self.assertSaltTrueReturn({'test': ret})
|
||||
self.assertInSaltComment(
|
||||
'were successfully installed',
|
||||
'packages are already installed',
|
||||
{'test': ret}
|
||||
)
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import os
|
|||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, MagicMock
|
||||
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
|
||||
from tests.support.helpers import skip_if_not_root
|
||||
# Import salt libs
|
||||
import salt.minion
|
||||
|
@ -24,7 +25,7 @@ __opts__ = {}
|
|||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class MinionTestCase(TestCase):
|
||||
class MinionTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
def test_invalid_master_address(self):
|
||||
with patch.dict(__opts__, {'ipv6': False, 'master': float('127.0'), 'master_port': '4555', 'retry_dns': False}):
|
||||
self.assertRaises(SaltSystemExit, salt.minion.resolve_dns, __opts__)
|
||||
|
@ -263,7 +264,7 @@ class MinionTestCase(TestCase):
|
|||
patch('salt.minion.Minion.sync_connect_master', MagicMock(side_effect=RuntimeError('stop execution'))), \
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.start', MagicMock(return_value=True)), \
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True)):
|
||||
mock_opts = copy.copy(salt.config.DEFAULT_MINION_OPTS)
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
mock_opts['beacons_before_connect'] = True
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=tornado.ioloop.IOLoop())
|
||||
try:
|
||||
|
@ -287,7 +288,7 @@ class MinionTestCase(TestCase):
|
|||
patch('salt.minion.Minion.sync_connect_master', MagicMock(side_effect=RuntimeError('stop execution'))), \
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.start', MagicMock(return_value=True)), \
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True)):
|
||||
mock_opts = copy.copy(salt.config.DEFAULT_MINION_OPTS)
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
mock_opts['scheduler_before_connect'] = True
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=tornado.ioloop.IOLoop())
|
||||
try:
|
||||
|
|
|
@ -4,22 +4,27 @@ integration.grains.test_core
|
|||
integration.loader.test_ext_grains
|
||||
integration.loader.test_ext_modules
|
||||
integration.modules.test_aliases
|
||||
integration.modules.test_autoruns
|
||||
integration.modules.test_beacons
|
||||
integration.modules.test_config
|
||||
integration.modules.test_cp
|
||||
integration.modules.test_data
|
||||
integration.modules.test_disk
|
||||
integration.modules.test_firewall
|
||||
integration.modules.test_git
|
||||
integration.modules.test_grains
|
||||
integration.modules.test_groupadd
|
||||
integration.modules.test_hosts
|
||||
integration.modules.test_mine
|
||||
integration.modules.test_network
|
||||
integration.modules.test_ntp
|
||||
integration.modules.test_pillar
|
||||
integration.modules.test_pkg
|
||||
integration.modules.test_publish
|
||||
integration.modules.test_state
|
||||
integration.modules.test_status
|
||||
integration.modules.test_sysmod
|
||||
integration.modules.test_system
|
||||
integration.modules.test_test
|
||||
integration.modules.test_useradd
|
||||
integration.reactor.test_reactor
|
||||
|
|
Loading…
Add table
Reference in a new issue