Merge pull request #45131 from rallytime/merge-oxygen

[oxygen] Merge forward from 2017.7 to oxygen
This commit is contained in:
Nicole Thomas 2017-12-22 09:35:02 -05:00 committed by GitHub
commit f978bf3944
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 479 additions and 468 deletions

View file

@ -24,7 +24,7 @@ transport:
name: sftp
<% end %>
sudo: false
sudo: true
provisioner:
name: salt_solo
salt_install: bootstrap
@ -184,10 +184,9 @@ suites:
verifier:
name: shell
remote_exec: true
sudo: false
live_stream: {}
<% if ENV['TESTOPTS'].nil? %>
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --sysinfo --transport=zeromq --output-columns=80 --ssh --coverage-xml=/tmp/coverage.xml --xml=/tmp/xml-unittests-output'
command: 'sudo -E $(kitchen) /tmp/kitchen/testing/tests/runtests.py -v --run-destructive --sysinfo --transport=zeromq --output-columns=80 --ssh --coverage-xml=/tmp/coverage.xml --xml=/tmp/xml-unittests-output'
<% else %>
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --output-columns 80 <%= ENV["TESTOPTS"] %>'
command: 'sudo -E $(kitchen) /tmp/kitchen/testing/tests/runtests.py -v --run-destructive --output-columns 80 <%= ENV["TESTOPTS"] %>'
<% end %>

View file

@ -22,3 +22,7 @@ group :windows do
gem 'winrm', '~>2.0'
gem 'winrm-fs', '~>1.0'
end
group :ec2 do
gem 'kitchen-ec2'
end

View file

@ -235,13 +235,13 @@
# cause sub minion process to restart.
#auth_safemode: False
# Ping Master to ensure connection is alive (seconds).
# Ping Master to ensure connection is alive (minutes).
#ping_interval: 0
# To auto recover minions if master changes IP address (DDNS)
# auth_tries: 10
# auth_safemode: False
# ping_interval: 90
# ping_interval: 2
#
# Minions won't know master is missing until a ping fails. After the ping fail,
# the minion will attempt authentication and likely fails out and cause a restart.

View file

@ -1039,7 +1039,7 @@ restart.
Default: ``0``
Instructs the minion to ping its master(s) every n number of seconds. Used
Instructs the minion to ping its master(s) every n number of minutes. Used
primarily as a mitigation technique against minion disconnects.
.. code-block:: yaml

View file

@ -11,6 +11,7 @@ This section contains a list of the Python modules that are used to extend the v
../ref/auth/all/index
../ref/beacons/all/index
../ref/cache/all/index
../ref/clouds/all/index
../ref/engines/all/index
../ref/executors/all/index
../ref/file_server/all/index

View file

@ -362,7 +362,7 @@ Section -Prerequisites
# /qb! used by 2008 installer
# It just ignores the unrecognized switches...
ClearErrors
ExecWait '"$INSTDIR\vcredist.exe" /qb! /passive /norestart' $0
ExecWait '"$INSTDIR\vcredist.exe" /qb! /quiet /norestart' $0
IfErrors 0 CheckVcRedistErrorCode
MessageBox MB_OK \
"$VcRedistName failed to install. Try installing the package manually." \

View file

@ -18,7 +18,7 @@
#======================================================================================================================
set -o nounset # Treat unset variables as an error
__ScriptVersion="2017.08.17"
__ScriptVersion="2017.12.13"
__ScriptName="bootstrap-salt.sh"
__ScriptFullName="$0"
@ -710,10 +710,24 @@ else
SETUP_PY_INSTALL_ARGS=""
fi
# Handle the insecure flags
if [ "$_INSECURE_DL" -eq $BS_TRUE ]; then
_CURL_ARGS="${_CURL_ARGS} --insecure"
_FETCH_ARGS="${_FETCH_ARGS} --no-verify-peer"
_GPG_ARGS="${_GPG_ARGS} --keyserver-options no-check-cert"
_WGET_ARGS="${_WGET_ARGS} --no-check-certificate"
else
_GPG_ARGS="${_GPG_ARGS} --keyserver-options ca-cert-file=/etc/ssl/certs/ca-certificates.crt"
fi
# Export the http_proxy configuration to our current environment
if [ "${_HTTP_PROXY}" != "" ]; then
export http_proxy="$_HTTP_PROXY"
export https_proxy="$_HTTP_PROXY"
export http_proxy="${_HTTP_PROXY}"
export https_proxy="${_HTTP_PROXY}"
# Using "deprecated" option here, but that appears the only way to make it work.
# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=818802
# and https://bugs.launchpad.net/ubuntu/+source/gnupg2/+bug/1625848
_GPG_ARGS="${_GPG_ARGS},http-proxy=${_HTTP_PROXY}"
fi
# Work around for 'Docker + salt-bootstrap failure' https://github.com/saltstack/salt-bootstrap/issues/394
@ -737,15 +751,6 @@ if [ -d "${_VIRTUALENV_DIR}" ]; then
exit 1
fi
# Handle the insecure flags
if [ "$_INSECURE_DL" -eq $BS_TRUE ]; then
_CURL_ARGS="${_CURL_ARGS} --insecure"
_FETCH_ARGS="${_FETCH_ARGS} --no-verify-peer"
_GPG_ARGS="${_GPG_ARGS} --keyserver-options no-check-cert"
_WGET_ARGS="${_WGET_ARGS} --no-check-certificate"
else
_GPG_ARGS="${_GPG_ARGS} --keyserver-options ca-cert-file=/etc/ssl/certs/ca-certificates.crt"
fi
#--- FUNCTION -------------------------------------------------------------------------------------------------------
# NAME: __fetch_url
@ -915,7 +920,7 @@ __sort_release_files() {
done
# Now let's sort by know files importance, max important goes last in the max_prio list
max_prio="redhat-release centos-release oracle-release"
max_prio="redhat-release centos-release oracle-release fedora-release"
for entry in $max_prio; do
if [ "$(echo "${primary_release_files}" | grep "$entry")" != "" ]; then
primary_release_files=$(echo "${primary_release_files}" | sed -e "s:\(.*\)\($entry\)\(.*\):\2 \1 \3:g")
@ -1001,7 +1006,6 @@ __gather_linux_system_info() {
echo redhat-release lsb-release
)"); do
[ -L "/etc/${rsource}" ] && continue # Don't follow symlinks
[ ! -f "/etc/${rsource}" ] && continue # Does not exist
n=$(echo "${rsource}" | sed -e 's/[_-]release$//' -e 's/[_-]version$//')
@ -1397,13 +1401,16 @@ __debian_derivatives_translation() {
# If the file does not exist, return
[ ! -f /etc/os-release ] && return
DEBIAN_DERIVATIVES="(cumulus_.+|kali|linuxmint|raspbian)"
DEBIAN_DERIVATIVES="(cumulus_.+|devuan|kali|linuxmint|raspbian)"
# Mappings
cumulus_2_debian_base="7.0"
cumulus_3_debian_base="8.0"
devuan_1_debian_base="8.0"
devuan_2_debian_base="9.0"
kali_1_debian_base="7.0"
linuxmint_1_debian_base="8.0"
raspbian_8_debian_base="8.0"
raspbian_9_debian_base="9.0"
# Translate Debian derivatives to their base Debian version
match=$(echo "$DISTRO_NAME_L" | egrep ${DEBIAN_DERIVATIVES})
@ -1414,6 +1421,10 @@ __debian_derivatives_translation() {
_major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g')
_debian_derivative="cumulus"
;;
devuan)
_major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g')
_debian_derivative="devuan"
;;
kali)
_major=$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g')
_debian_derivative="kali"
@ -1428,12 +1439,13 @@ __debian_derivatives_translation() {
;;
esac
_debian_version=$(eval echo "\$${_debian_derivative}_${_major}_debian_base")
_debian_version=$(eval echo "\$${_debian_derivative}_${_major}_debian_base" 2>/dev/null)
if [ "$_debian_version" != "" ]; then
echodebug "Detected Debian $_debian_version derivative"
DISTRO_NAME_L="debian"
DISTRO_VERSION="$_debian_version"
DISTRO_MAJOR_VERSION="$(echo "$DISTRO_VERSION" | sed 's/^\([0-9]*\).*/\1/g')"
fi
fi
}
@ -1489,8 +1501,8 @@ __check_end_of_life_versions() {
# = 14.10
# = 15.04, 15.10
if [ "$DISTRO_MAJOR_VERSION" -lt 14 ] || \
[ "$DISTRO_MAJOR_VERSION" -eq 15 ] || \
([ "$DISTRO_MAJOR_VERSION" -lt 16 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]); then
[ "$DISTRO_MAJOR_VERSION" -eq 15 ] || \
([ "$DISTRO_MAJOR_VERSION" -lt 16 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]); then
echoerror "End of life distributions are not supported."
echoerror "Please consider upgrading to the next stable. See:"
echoerror " https://wiki.ubuntu.com/Releases"
@ -1501,8 +1513,10 @@ __check_end_of_life_versions() {
opensuse)
# openSUSE versions not supported
#
# <= 12.1
if ([ "$DISTRO_MAJOR_VERSION" -eq 12 ] && [ "$DISTRO_MINOR_VERSION" -eq 1 ]) || [ "$DISTRO_MAJOR_VERSION" -lt 12 ]; then
# <= 13.X
# <= 42.1
if [ "$DISTRO_MAJOR_VERSION" -le 13 ] || \
([ "$DISTRO_MAJOR_VERSION" -eq 42 ] && [ "$DISTRO_MINOR_VERSION" -le 1 ]); then
echoerror "End of life distributions are not supported."
echoerror "Please consider upgrading to the next stable. See:"
echoerror " http://en.opensuse.org/Lifetime"
@ -1513,21 +1527,25 @@ __check_end_of_life_versions() {
suse)
# SuSE versions not supported
#
# < 11 SP2
# < 11 SP4
# < 12 SP2
SUSE_PATCHLEVEL=$(awk '/PATCHLEVEL/ {print $3}' /etc/SuSE-release )
if [ "${SUSE_PATCHLEVEL}" = "" ]; then
SUSE_PATCHLEVEL="00"
fi
if ([ "$DISTRO_MAJOR_VERSION" -eq 11 ] && [ "$SUSE_PATCHLEVEL" -lt 02 ]) || [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then
echoerror "Versions lower than SuSE 11 SP2 are not supported."
if [ "$DISTRO_MAJOR_VERSION" -lt 11 ] || \
([ "$DISTRO_MAJOR_VERSION" -eq 11 ] && [ "$SUSE_PATCHLEVEL" -lt 04 ]) || \
([ "$DISTRO_MAJOR_VERSION" -eq 12 ] && [ "$SUSE_PATCHLEVEL" -lt 02 ]); then
echoerror "Versions lower than SuSE 11 SP4 or 12 SP2 are not supported."
echoerror "Please consider upgrading to the next stable"
echoerror " https://www.suse.com/lifecycle/"
exit 1
fi
;;
fedora)
# Fedora lower than 24 are no longer supported
if [ "$DISTRO_MAJOR_VERSION" -lt 24 ]; then
# Fedora lower than 25 are no longer supported
if [ "$DISTRO_MAJOR_VERSION" -lt 25 ]; then
echoerror "End of life distributions are not supported."
echoerror "Please consider upgrading to the next stable. See:"
echoerror " https://fedoraproject.org/wiki/Releases"
@ -1729,18 +1747,6 @@ if ([ "${DISTRO_NAME_L}" != "ubuntu" ] && [ $_PIP_ALL -eq $BS_TRUE ]); then
exit 1
fi
# Starting from Debian 9 and Ubuntu 16.10, gnupg-curl has been renamed to gnupg1-curl.
GNUPG_CURL="gnupg-curl"
if [ "$DISTRO_NAME_L" = "debian" ]; then
if [ "$DISTRO_MAJOR_VERSION" -gt 8 ]; then
GNUPG_CURL="gnupg1-curl"
fi
elif [ "$DISTRO_NAME_L" = "ubuntu" ]; then
if [ "${DISTRO_VERSION}" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
GNUPG_CURL="gnupg1-curl"
fi
fi
#--- FUNCTION -------------------------------------------------------------------------------------------------------
# NAME: __function_defined
@ -1786,8 +1792,6 @@ __apt_get_upgrade_noinput() {
__apt_key_fetch() {
url=$1
__apt_get_install_noinput ${GNUPG_CURL} || return 1
# shellcheck disable=SC2086
apt-key adv ${_GPG_ARGS} --fetch-keys "$url"; return $?
} # ---------- end of function __apt_key_fetch ----------
@ -2539,7 +2543,6 @@ __enable_universe_repository() {
}
__install_saltstack_ubuntu_repository() {
# Workaround for latest non-LTS ubuntu
if [ "$DISTRO_VERSION" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
echowarn "Non-LTS Ubuntu detected, but stable packages requested. Trying packages from latest LTS release. You may experience problems."
@ -2550,15 +2553,27 @@ __install_saltstack_ubuntu_repository() {
UBUNTU_CODENAME=$DISTRO_CODENAME
fi
# SaltStack's stable Ubuntu repository:
SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/apt/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}"
echo "deb $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/saltstack.list
__PACKAGES=''
# Install downloader backend for GPG keys fetching
if [ "$DISTRO_VERSION" = "16.10" ] || [ "$DISTRO_MAJOR_VERSION" -gt 16 ]; then
__PACKAGES="${__PACKAGES} gnupg2 dirmngr"
else
__PACKAGES="${__PACKAGES} gnupg-curl"
fi
# Make sure https transport is available
if [ "$HTTP_VAL" = "https" ] ; then
__apt_get_install_noinput apt-transport-https ca-certificates || return 1
__PACKAGES="${__PACKAGES} apt-transport-https ca-certificates"
fi
# shellcheck disable=SC2086,SC2090
__apt_get_install_noinput ${__PACKAGES} || return 1
# SaltStack's stable Ubuntu repository:
SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/apt/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}"
echo "deb $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/saltstack.list
__apt_key_fetch "$SALTSTACK_UBUNTU_URL/SALTSTACK-GPG-KEY.pub" || return 1
apt-get update
@ -2640,9 +2655,8 @@ install_ubuntu_stable_deps() {
__apt_get_upgrade_noinput || return 1
fi
__check_dpkg_architecture || return 1
if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
__check_dpkg_architecture || return 1
__install_saltstack_ubuntu_repository || return 1
fi
@ -2936,18 +2950,30 @@ __install_saltstack_debian_repository() {
DEBIAN_CODENAME="$DISTRO_CODENAME"
fi
__PACKAGES=''
# Install downloader backend for GPG keys fetching
if [ "$DISTRO_MAJOR_VERSION" -ge 9 ]; then
__PACKAGES="${__PACKAGES} gnupg2 dirmngr"
else
__PACKAGES="${__PACKAGES} gnupg-curl"
fi
# Make sure https transport is available
if [ "$HTTP_VAL" = "https" ] ; then
__PACKAGES="${__PACKAGES} apt-transport-https ca-certificates"
fi
# shellcheck disable=SC2086,SC2090
__apt_get_install_noinput ${__PACKAGES} || return 1
# amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location
SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/apt/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/${STABLE_REV}"
echo "deb $SALTSTACK_DEBIAN_URL $DEBIAN_CODENAME main" > "/etc/apt/sources.list.d/saltstack.list"
if [ "$HTTP_VAL" = "https" ] ; then
__apt_get_install_noinput apt-transport-https ca-certificates || return 1
fi
__apt_key_fetch "$SALTSTACK_DEBIAN_URL/SALTSTACK-GPG-KEY.pub" || return 1
apt-get update
}
install_debian_deps() {
@ -2970,23 +2996,17 @@ install_debian_deps() {
__apt_get_upgrade_noinput || return 1
fi
__check_dpkg_architecture || return 1
# Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813
__PACKAGES='procps pciutils'
# YAML module is used for generating custom master/minion configs
__PACKAGES="${__PACKAGES} python-yaml"
# Debian 9 needs the dirmgr package in order to import the GPG key later
if [ "$DISTRO_MAJOR_VERSION" -ge 9 ]; then
__PACKAGES="${__PACKAGES} dirmngr"
fi
# shellcheck disable=SC2086
__apt_get_install_noinput ${__PACKAGES} || return 1
if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
__check_dpkg_architecture || return 1
__install_saltstack_debian_repository || return 1
fi
@ -3318,7 +3338,13 @@ install_fedora_deps() {
__install_saltstack_copr_salt_repository || return 1
fi
__PACKAGES="yum-utils PyYAML libyaml python-crypto python-jinja2 python-zmq python2-msgpack python2-requests"
__PACKAGES="PyYAML libyaml python-crypto python-jinja2 python-zmq python2-msgpack python2-requests"
if [ "$DISTRO_MAJOR_VERSION" -lt 26 ]; then
__PACKAGES="${__PACKAGES} yum-utils"
else
__PACKAGES="${__PACKAGES} dnf-utils"
fi
# shellcheck disable=SC2086
dnf install -y ${__PACKAGES} || return 1
@ -4404,7 +4430,7 @@ install_amazon_linux_ami_deps() {
repo_rev="$(echo "${STABLE_REV}" | sed 's|.*\/||g')"
if echo "$repo_rev" | egrep -q '^(latest|2016\.11)$' || \
( echo "$repo_rev" | egrep -q '^[0-9]+$' && [ "$(echo "$repo_rev" | cut -c1-4)" -gt 2016 ] ); then
[ "$(echo "$repo_rev" | cut -c1-4)" -gt 2016 ]; then
_USEAWS=$BS_TRUE
pkg_append="python27"
fi
@ -5075,7 +5101,7 @@ install_freebsd_restart_daemons() {
__choose_openbsd_mirror() {
OPENBSD_REPO=''
MINTIME=''
MIRROR_LIST=$(ftp -w 15 -Vao - 'http://ftp.openbsd.org/cgi-bin/ftplist.cgi?dbversion=1' | awk '/^http/ {print $1}')
MIRROR_LIST=$(ftp -w 15 -Vao - 'https://ftp.openbsd.org/cgi-bin/ftplist.cgi?dbversion=1' | awk '/^http/ {print $1}')
for MIRROR in $MIRROR_LIST; do
MIRROR_HOST=$(echo "$MIRROR" | sed -e 's|.*//||' -e 's|+*/.*$||')
@ -5096,10 +5122,12 @@ __choose_openbsd_mirror() {
}
install_openbsd_deps() {
__choose_openbsd_mirror || return 1
echoinfo "setting package repository to $OPENBSD_REPO with ping time of $MINTIME"
[ -n "$OPENBSD_REPO" ] || return 1
echo "${OPENBSD_REPO}" >>/etc/installurl || return 1
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
__choose_openbsd_mirror || return 1
echoinfo "setting package repository to $OPENBSD_REPO with ping time of $MINTIME"
[ -n "$OPENBSD_REPO" ] || return 1
echo "${OPENBSD_REPO}" >>/etc/installurl || return 1
fi
if [ "${_EXTRA_PACKAGES}" != "" ]; then
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
@ -5383,9 +5411,30 @@ install_smartos_restart_daemons() {
#
__ZYPPER_REQUIRES_REPLACE_FILES=-1
__set_suse_pkg_repo() {
# Set distro repo variable
if [ "${DISTRO_MAJOR_VERSION}" -gt 2015 ]; then
DISTRO_REPO="openSUSE_Tumbleweed"
elif [ "${DISTRO_MAJOR_VERSION}" -ge 42 ]; then
DISTRO_REPO="openSUSE_Leap_${DISTRO_MAJOR_VERSION}.${DISTRO_MINOR_VERSION}"
elif [ "${DISTRO_MAJOR_VERSION}" -lt 42 ]; then
DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}_SP${SUSE_PATCHLEVEL}"
fi
if [ "$_DOWNSTREAM_PKG_REPO" -eq $BS_TRUE ]; then
suse_pkg_url_base="https://download.opensuse.org/repositories/systemsmanagement:/saltstack"
suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack.repo"
else
suse_pkg_url_base="${HTTP_VAL}://repo.saltstack.com/opensuse"
suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack:products.repo"
fi
SUSE_PKG_URL="$suse_pkg_url_base/$suse_pkg_url_path"
}
__check_and_refresh_suse_pkg_repo() {
# Check to see if systemsmanagement_saltstack exists
__zypper repos | grep systemsmanagement_saltstack >/dev/null 2>&1
__zypper repos | grep -q systemsmanagement_saltstack
if [ $? -eq 1 ]; then
# zypper does not yet know anything about systemsmanagement_saltstack
@ -5393,16 +5442,6 @@ __check_and_refresh_suse_pkg_repo() {
fi
}
__set_suse_pkg_repo() {
suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack.repo"
if [ "$_DOWNSTREAM_PKG_REPO" -eq $BS_TRUE ]; then
suse_pkg_url_base="http://download.opensuse.org/repositories/systemsmanagement:/saltstack"
else
suse_pkg_url_base="${HTTP_VAL}://repo.saltstack.com/opensuse"
fi
SUSE_PKG_URL="$suse_pkg_url_base/$suse_pkg_url_path"
}
__version_lte() {
if ! __check_command_exists python; then
zypper zypper --non-interactive install --replacefiles --auto-agree-with-licenses python || \
@ -5435,14 +5474,6 @@ __zypper_install() {
}
install_opensuse_stable_deps() {
if [ "${DISTRO_MAJOR_VERSION}" -gt 2015 ]; then
DISTRO_REPO="openSUSE_Tumbleweed"
elif [ "${DISTRO_MAJOR_VERSION}" -ge 42 ]; then
DISTRO_REPO="openSUSE_Leap_${DISTRO_MAJOR_VERSION}.${DISTRO_MINOR_VERSION}"
elif [ "${DISTRO_MAJOR_VERSION}" -lt 42 ]; then
DISTRO_REPO="openSUSE_${DISTRO_MAJOR_VERSION}.${DISTRO_MINOR_VERSION}"
fi
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
# Is the repository already known
__set_suse_pkg_repo
@ -5466,25 +5497,14 @@ install_opensuse_stable_deps() {
__zypper --gpg-auto-import-keys update || return 1
fi
# YAML module is used for generating custom master/minion configs
# requests is still used by many salt modules
# Salt needs python-zypp installed in order to use the zypper module
__PACKAGES="python-zypp"
__PACKAGES="${__PACKAGES} python python-Jinja2 python-M2Crypto python-PyYAML python-requests"
__PACKAGES="${__PACKAGES} python-msgpack-python python-pycrypto python-pyzmq python-xml"
if [ "$DISTRO_MAJOR_VERSION" -lt 13 ]; then
__PACKAGES="${__PACKAGES} libzmq3"
elif [ "$DISTRO_MAJOR_VERSION" -eq 13 ]; then
__PACKAGES="${__PACKAGES} libzmq3"
elif [ "$DISTRO_MAJOR_VERSION" -gt 13 ]; then
__PACKAGES="${__PACKAGES} libzmq5"
fi
__PACKAGES="python-PyYAML python-requests python-zypp"
# shellcheck disable=SC2086
__zypper_install ${__PACKAGES} || return 1
# Fix for OpenSUSE 13.2 and 2015.8 - gcc should not be required. Work around until package is fixed by SuSE
_EXTRA_PACKAGES="${_EXTRA_PACKAGES} gcc python-devel libgit2-devel"
if [ "${_EXTRA_PACKAGES}" != "" ]; then
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
# shellcheck disable=SC2086
@ -5509,7 +5529,7 @@ install_opensuse_git_deps() {
__git_clone_and_checkout || return 1
__PACKAGES=""
__PACKAGES="libzmq5 python-Jinja2 python-msgpack-python python-pycrypto python-pyzmq python-xml"
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
# We're on the develop branch, install whichever tornado is on the requirements file
@ -5678,18 +5698,6 @@ install_opensuse_check_services() {
#
install_suse_12_stable_deps() {
SUSE_PATCHLEVEL=$(awk '/PATCHLEVEL/ {print $3}' /etc/SuSE-release )
if [ "${SUSE_PATCHLEVEL}" != "" ]; then
DISTRO_PATCHLEVEL="_SP${SUSE_PATCHLEVEL}"
fi
DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}${DISTRO_PATCHLEVEL}"
# SLES 12 repo name does not use a patch level so PATCHLEVEL will need to be updated with SP1
#DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}${DISTRO_PATCHLEVEL}"
DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}"
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
# Is the repository already known
__set_suse_pkg_repo
@ -5703,18 +5711,10 @@ install_suse_12_stable_deps() {
__zypper --gpg-auto-import-keys update || return 1
fi
# YAML module is used for generating custom master/minion configs
# requests is still used by many salt modules
# Salt needs python-zypp installed in order to use the zypper module
__PACKAGES="python-zypp"
# shellcheck disable=SC2089
__PACKAGES="${__PACKAGES} libzmq5 python python-Jinja2 python-msgpack-python"
__PACKAGES="${__PACKAGES} python-pycrypto python-pyzmq python-pip python-xml python-requests"
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
__check_pip_allowed
echowarn "PyYaml will be installed using pip"
else
__PACKAGES="${__PACKAGES} python-PyYAML"
fi
__PACKAGES="python-PyYAML python-requests python-zypp"
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
__PACKAGES="${__PACKAGES} python-apache-libcloud"
@ -5723,41 +5723,6 @@ install_suse_12_stable_deps() {
# shellcheck disable=SC2086,SC2090
__zypper_install ${__PACKAGES} || return 1
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
# There's no python-PyYaml in SP1, let's install it using pip
pip install PyYaml || return 1
fi
# PIP based installs need to copy configuration files "by hand".
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
# Let's trigger config_salt()
if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
# Let's set the configuration directory to /tmp
_TEMP_CONFIG_DIR="/tmp"
CONFIG_SALT_FUNC="config_salt"
for fname in api master minion syndic; do
# Skip salt-api since there is no example config for it in the Salt git repo
[ $fname = "api" ] && continue
# Skip if not meant to be installed
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
# Syndic uses the same configuration file as the master
[ $fname = "syndic" ] && fname=master
# Let's download, since they were not provided, the default configuration files
if [ ! -f "$_SALT_ETC_DIR/$fname" ] && [ ! -f "$_TEMP_CONFIG_DIR/$fname" ]; then
# shellcheck disable=SC2086
curl $_CURL_ARGS -s -o "$_TEMP_CONFIG_DIR/$fname" -L \
"https://raw.githubusercontent.com/saltstack/salt/develop/conf/$fname" || return 1
fi
done
fi
fi
if [ "${_EXTRA_PACKAGES}" != "" ]; then
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
# shellcheck disable=SC2086
@ -5777,6 +5742,9 @@ install_suse_12_git_deps() {
__git_clone_and_checkout || return 1
__PACKAGES=""
# shellcheck disable=SC2089
__PACKAGES="${__PACKAGES} libzmq3 python-Jinja2 python-msgpack-python python-pycrypto"
__PACKAGES="${__PACKAGES} python-pyzmq python-xml"
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
# We're on the develop branch, install whichever tornado is on the requirements file
@ -5803,14 +5771,7 @@ install_suse_12_git_deps() {
}
install_suse_12_stable() {
if [ "$SUSE_PATCHLEVEL" -gt 1 ]; then
install_opensuse_stable || return 1
else
# USE_SETUPTOOLS=1 To work around
# error: option --single-version-externally-managed not recognized
USE_SETUPTOOLS=1 pip install salt || return 1
fi
install_opensuse_stable || return 1
return 0
}
@ -5820,34 +5781,7 @@ install_suse_12_git() {
}
install_suse_12_stable_post() {
if [ "$SUSE_PATCHLEVEL" -gt 1 ]; then
install_opensuse_stable_post || return 1
else
for fname in api master minion syndic; do
# Skip if not meant to be installed
[ $fname = "api" ] && \
([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
if [ -f /bin/systemctl ]; then
# shellcheck disable=SC2086
curl $_CURL_ARGS -L "https://github.com/saltstack/salt/raw/develop/pkg/salt-$fname.service" \
-o "/usr/lib/systemd/system/salt-$fname.service" || return 1
fi
# Skip salt-api since the service should be opt-in and not necessarily started on boot
[ $fname = "api" ] && continue
if [ -f /bin/systemctl ]; then
systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service)
sleep 0.1
systemctl daemon-reload
fi
done
fi
install_opensuse_stable_post || return 1
return 0
}
@ -5872,16 +5806,6 @@ install_suse_12_restart_daemons() {
#
install_suse_11_stable_deps() {
SUSE_PATCHLEVEL=$(awk '/PATCHLEVEL/ {print $3}' /etc/SuSE-release )
if [ "${SUSE_PATCHLEVEL}" != "" ]; then
if [ "${SUSE_PATCHLEVEL}" != "4" ]; then
echowarn "Salt packages for SLE 11 are only build for SP4."
echowarn "Attempting to install SP4 packages on SP${SUSE_PATCHLEVEL}."
fi
DISTRO_PATCHLEVEL="_SP4"
fi
DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}${DISTRO_PATCHLEVEL}"
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
# Is the repository already known
__set_suse_pkg_repo
@ -5895,57 +5819,12 @@ install_suse_11_stable_deps() {
__zypper --gpg-auto-import-keys update || return 1
fi
# Salt needs python-zypp installed in order to use the zypper module
__PACKAGES="python-zypp"
# shellcheck disable=SC2089
__PACKAGES="${__PACKAGES} libzmq5 python python-Jinja2 python-msgpack-python"
__PACKAGES="${__PACKAGES} python-pycrypto python-pyzmq python-pip python-xml python-requests"
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
__check_pip_allowed
echowarn "PyYaml will be installed using pip"
else
__PACKAGES="${__PACKAGES} python-PyYAML"
fi
# YAML module is used for generating custom master/minion configs
__PACKAGES="python-PyYAML"
# shellcheck disable=SC2086,SC2090
__zypper_install ${__PACKAGES} || return 1
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
# There's no python-PyYaml in SP1, let's install it using pip
pip install PyYaml || return 1
fi
# PIP based installs need to copy configuration files "by hand".
if [ "$SUSE_PATCHLEVEL" -eq 1 ]; then
# Let's trigger config_salt()
if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
# Let's set the configuration directory to /tmp
_TEMP_CONFIG_DIR="/tmp"
CONFIG_SALT_FUNC="config_salt"
for fname in api master minion syndic; do
# Skip salt-api since there is no example config for it in the Salt git repo
[ $fname = "api" ] && continue
# Skip if not meant to be installed
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
# Syndic uses the same configuration file as the master
[ $fname = "syndic" ] && fname=master
# Let's download, since they were not provided, the default configuration files
if [ ! -f "$_SALT_ETC_DIR/$fname" ] && [ ! -f "$_TEMP_CONFIG_DIR/$fname" ]; then
# shellcheck disable=SC2086
curl $_CURL_ARGS -s -o "$_TEMP_CONFIG_DIR/$fname" -L \
"https://raw.githubusercontent.com/saltstack/salt/develop/conf/$fname" || return 1
fi
done
fi
fi
if [ "${_EXTRA_PACKAGES}" != "" ]; then
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
# shellcheck disable=SC2086
@ -5965,6 +5844,9 @@ install_suse_11_git_deps() {
__git_clone_and_checkout || return 1
__PACKAGES=""
# shellcheck disable=SC2089
__PACKAGES="${__PACKAGES} libzmq4 python-Jinja2 python-msgpack-python python-pycrypto"
__PACKAGES="${__PACKAGES} python-pyzmq python-xml python-zypp"
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
# We're on the develop branch, install whichever tornado is on the requirements file
@ -5991,13 +5873,7 @@ install_suse_11_git_deps() {
}
install_suse_11_stable() {
if [ "$SUSE_PATCHLEVEL" -gt 1 ]; then
install_opensuse_stable || return 1
else
# USE_SETUPTOOLS=1 To work around
# error: option --single-version-externally-managed not recognized
USE_SETUPTOOLS=1 pip install salt || return 1
fi
install_opensuse_stable || return 1
return 0
}
@ -6007,32 +5883,7 @@ install_suse_11_git() {
}
install_suse_11_stable_post() {
if [ "$SUSE_PATCHLEVEL" -gt 1 ]; then
install_opensuse_stable_post || return 1
else
for fname in api master minion syndic; do
# Skip if not meant to be installed
[ $fname = "api" ] && \
([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue
[ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue
[ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue
[ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue
if [ -f /bin/systemctl ]; then
# shellcheck disable=SC2086
curl $_CURL_ARGS -L "https://github.com/saltstack/salt/raw/develop/pkg/salt-$fname.service" \
-o "/lib/systemd/system/salt-$fname.service" || return 1
continue
fi
# shellcheck disable=SC2086
curl $_CURL_ARGS -L "https://github.com/saltstack/salt/raw/develop/pkg/rpm/salt-$fname" \
-o "/etc/init.d/salt-$fname" || return 1
chmod +x "/etc/init.d/salt-$fname"
done
fi
install_opensuse_stable_post || return 1
return 0
}
@ -6050,6 +5901,8 @@ install_suse_11_restart_daemons() {
#
# End of SUSE Enterprise 11
#
#######################################################################################################################
#######################################################################################################################
#
# SUSE Enterprise General Functions
@ -6078,7 +5931,7 @@ install_suse_check_services() {
}
#
# SUSE Enterprise General Functions
# End of SUSE Enterprise General Functions
#
#######################################################################################################################
@ -6698,7 +6551,7 @@ for FUNC_NAME in $(__strip_duplicates "$CHECK_SERVICES_FUNC_NAMES"); do
done
echodebug "CHECK_SERVICES_FUNC=${CHECK_SERVICES_FUNC}"
if [ "$DEPS_INSTALL_FUNC" = "null" ]; then
if [ ${_NO_DEPS} -eq $BS_FALSE ] && [ "$DEPS_INSTALL_FUNC" = "null" ]; then
echoerror "No dependencies installation function found. Exiting..."
exit 1
fi
@ -6709,7 +6562,7 @@ if [ "$INSTALL_FUNC" = "null" ]; then
fi
# Install dependencies
if [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then
if [ ${_NO_DEPS} -eq $BS_FALSE ] && [ $_CONFIG_ONLY -eq $BS_FALSE ]; then
# Only execute function is not in config mode only
echoinfo "Running ${DEPS_INSTALL_FUNC}()"
$DEPS_INSTALL_FUNC
@ -6725,7 +6578,7 @@ if [ "$_CUSTOM_MASTER_CONFIG" != "null" ] || [ "$_CUSTOM_MINION_CONFIG" != "null
_TEMP_CONFIG_DIR="$_SALT_ETC_DIR"
fi
if [ "$_CONFIG_ONLY" -eq $BS_TRUE ]; then
if [ ${_NO_DEPS} -eq $BS_FALSE ] && [ $_CONFIG_ONLY -eq $BS_TRUE ]; then
# Execute function to satisfy dependencies for configuration step
echoinfo "Running ${DEPS_INSTALL_FUNC}()"
$DEPS_INSTALL_FUNC

View file

@ -1005,7 +1005,7 @@ VALID_OPTS = {
'queue_dirs': list,
# Instructs the minion to ping its master(s) every n number of seconds. Used
# Instructs the minion to ping its master(s) every n number of minutes. Used
# primarily as a mitigation technique against minion disconnects.
'ping_interval': int,
@ -1180,6 +1180,9 @@ VALID_OPTS = {
# SSDP discovery pause between the attempts
'pause': int,
# Scheduler should be a dictionary
'schedule': dict,
}
# default configurations
@ -1471,6 +1474,7 @@ DEFAULT_MINION_OPTS = {
'match': 'any',
'mapping': {},
},
'schedule': {},
}
DEFAULT_MASTER_OPTS = {
@ -1793,6 +1797,7 @@ DEFAULT_MASTER_OPTS = {
'port': 4520,
'mapping': {},
},
'schedule': {},
}
@ -3754,10 +3759,6 @@ def apply_minion_config(overrides=None,
if 'ipc_write_buffer' not in overrides:
opts['ipc_write_buffer'] = 0
# if there is no schedule option yet, add an empty scheduler
if 'schedule' not in opts:
opts['schedule'] = {}
# Make sure hash_type is lowercase
opts['hash_type'] = opts['hash_type'].lower()

View file

@ -810,12 +810,14 @@ def _virtual(osdata):
pass
if os.path.isfile('/proc/1/cgroup'):
try:
with salt.utils.files.fopen('/proc/1/cgroup', 'r') as fhr:
if ':/lxc/' in fhr.read():
grains['virtual_subtype'] = 'LXC'
with salt.utils.files.fopen('/proc/1/cgroup', 'r') as fhr:
fhr_contents = fhr.read()
if ':/docker/' in fhr_contents or ':/system.slice/docker' in fhr_contents:
if ':/lxc/' in fhr_contents:
grains['virtual_subtype'] = 'LXC'
else:
if any(x in fhr_contents
for x in (':/system.slice/docker', ':/docker/',
':/docker-ce/')):
grains['virtual_subtype'] = 'Docker'
except IOError:
pass

View file

@ -533,14 +533,14 @@ def tar(options, tarfile, sources=None, dest=None,
raise SaltInvocationError('Tar options can not be empty')
cmd = ['tar']
if dest:
cmd.extend(['-C', '{0}'.format(dest)])
if options:
cmd.extend(options.split())
cmd.extend(['{0}'.format(tarfile)])
cmd.extend(_expand_sources(sources))
if dest:
cmd.extend(['-C', '{0}'.format(dest)])
return __salt__['cmd.run'](cmd,
cwd=cwd,
template=template,

View file

@ -3128,12 +3128,13 @@ def _getDataFromRegPolData(search_string, policy_data, return_value_name=False):
'''
value = None
values = []
encoded_semicolon = ';'.encode('utf-16-le')
if return_value_name:
values = {}
if search_string:
registry = Registry()
if len(search_string.split('{0};'.format(chr(0)))) >= 3:
vtype = registry.vtype_reverse[ord(search_string.split('{0};'.format(chr(0)))[2])]
if len(search_string.split(encoded_semicolon)) >= 3:
vtype = registry.vtype_reverse[ord(search_string.split(encoded_semicolon)[2].decode('utf-32-le'))]
else:
vtype = None
search_string = re.escape(search_string)
@ -3141,29 +3142,28 @@ def _getDataFromRegPolData(search_string, policy_data, return_value_name=False):
matches = [m for m in matches]
if matches:
for match in matches:
pol_entry = policy_data[match.start():(policy_data.index(']',
pol_entry = policy_data[match.start():(policy_data.index(']'.encode('utf-16-le'),
match.end())
)
].split('{0};'.format(chr(0)))
].split(encoded_semicolon)
if len(pol_entry) >= 2:
valueName = pol_entry[1]
if len(pol_entry) >= 5:
value = pol_entry[4]
if vtype == 'REG_DWORD' or vtype == 'REG_QWORD':
if value:
vlist = list(ord(v) for v in value)
if vtype == 'REG_DWORD':
for v in struct.unpack('I', struct.pack('2H', *vlist)):
for v in struct.unpack('I', value):
value = v
elif vtype == 'REG_QWORD':
for v in struct.unpack('I', struct.pack('4H', *vlist)):
for v in struct.unpack('Q', value):
value = v
else:
value = 0
elif vtype == 'REG_MULTI_SZ':
value = value.rstrip(chr(0)).split(chr(0))
value = value.decode('utf-16-le').rstrip(chr(0)).split(chr(0))
else:
value = value.rstrip(chr(0))
value = value.decode('utf-16-le').rstrip(chr(0))
if return_value_name:
log.debug('we want value names and the value')
values[valueName] = value
@ -3274,35 +3274,52 @@ def _buildKnownDataSearchString(reg_key, reg_valueName, reg_vtype, reg_data,
'''
registry = Registry()
this_element_value = None
expected_string = ''
expected_string = b''
encoded_semicolon = ';'.encode('utf-16-le')
encoded_null = chr(0).encode('utf-16-le')
if reg_key:
reg_key = reg_key.encode('utf-16-le')
if reg_valueName:
reg_valueName = reg_valueName.encode('utf-16-le')
if reg_data and not check_deleted:
if reg_vtype == 'REG_DWORD':
this_element_value = ''
for v in struct.unpack('2H', struct.pack('I', int(reg_data))):
this_element_value = this_element_value + six.unichr(v)
elif reg_vtype == 'REG_QWORD':
this_element_value = ''
for v in struct.unpack('4H', struct.pack('I', int(reg_data))):
this_element_value = this_element_value + six.unichr(v)
this_element_value = struct.pack('I', int(reg_data))
elif reg_vtype == "REG_QWORD":
this_element_value = struct.pack('Q', int(reg_data))
elif reg_vtype == 'REG_SZ':
this_element_value = '{0}{1}'.format(reg_data, chr(0))
this_element_value = b''.join([reg_data.encode('utf-16-le'),
encoded_null])
if check_deleted:
reg_vtype = 'REG_SZ'
expected_string = u'[{1}{0};**del.{2}{0};{3}{0};{4}{0};{5}{0}]'.format(
chr(0),
reg_key,
reg_valueName,
chr(registry.vtype[reg_vtype]),
six.unichr(len(' {0}'.format(chr(0)).encode('utf-16-le'))),
' ')
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
'**del.'.encode('utf-16-le'),
reg_valueName,
encoded_null,
encoded_semicolon,
chr(registry.vtype[reg_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len(' {0}'.format(chr(0)).encode('utf-16-le'))).encode('utf-32-le'),
encoded_semicolon,
' '.encode('utf-16-le'),
encoded_null,
']'.encode('utf-16-le')])
else:
expected_string = u'[{1}{0};{2}{0};{3}{0};{4}{0};{5}]'.format(
chr(0),
reg_key,
reg_valueName,
chr(registry.vtype[reg_vtype]),
six.unichr(len(this_element_value.encode('utf-16-le'))),
this_element_value)
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
reg_valueName,
encoded_null,
encoded_semicolon,
chr(registry.vtype[reg_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len(this_element_value)).encode('utf-32-le'),
encoded_semicolon,
this_element_value,
']'.encode('utf-16-le')])
return expected_string
@ -3330,13 +3347,16 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
expected_string = None
# https://msdn.microsoft.com/en-us/library/dn606006(v=vs.85).aspx
this_vtype = 'REG_SZ'
standard_layout = u'[{1}{0};{2}{0};{3}{0};{4}{0};{5}]'
encoded_semicolon = ';'.encode('utf-16-le')
encoded_null = chr(0).encode('utf-16-le')
if reg_key:
reg_key = reg_key.encode('utf-16-le')
if reg_valuename:
reg_valuename = reg_valuename.encode('utf-16-le')
if etree.QName(element).localname == 'decimal' and etree.QName(parent_element).localname != 'elements':
this_vtype = 'REG_DWORD'
if 'value' in element.attrib:
this_element_value = ''
for val in struct.unpack('2H', struct.pack('I', int(element.attrib['value']))):
this_element_value = this_element_value + six.unichr(val)
this_element_value = struct.pack('I', int(element.attrib['value']))
else:
msg = ('The {2} child {1} element for the policy with attributes: '
'{0} does not have the required "value" attribute. The '
@ -3351,9 +3371,7 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
# server, so untested/assumed
this_vtype = 'REG_QWORD'
if 'value' in element.attrib:
this_element_value = ''
for val in struct.unpack('4H', struct.pack('I', int(element.attrib['value']))):
this_element_value = this_element_value + six.unichr(val)
this_element_value = struct.pack('Q', int(element.attrib['value']))
else:
msg = ('The {2} child {1} element for the policy with attributes: '
'{0} does not have the required "value" attribute. The '
@ -3365,7 +3383,8 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
return None
elif etree.QName(element).localname == 'string':
this_vtype = 'REG_SZ'
this_element_value = '{0}{1}'.format(element.text, chr(0))
this_element_value = b''.join([element.text.encode('utf-16-le'),
encoded_null])
elif etree.QName(parent_element).localname == 'elements':
standard_element_expected_string = True
if etree.QName(element).localname == 'boolean':
@ -3376,22 +3395,19 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
check_deleted = True
if not check_deleted:
this_vtype = 'REG_DWORD'
this_element_value = chr(1)
this_element_value = chr(1).encode('utf-16-le')
standard_element_expected_string = False
elif etree.QName(element).localname == 'decimal':
# https://msdn.microsoft.com/en-us/library/dn605987(v=vs.85).aspx
this_vtype = 'REG_DWORD'
requested_val = this_element_value
if this_element_value is not None:
temp_val = ''
for v in struct.unpack('2H', struct.pack('I', int(this_element_value))):
temp_val = temp_val + six.unichr(v)
this_element_value = temp_val
this_element_value = struct.pack('I', int(this_element_value))
if 'storeAsText' in element.attrib:
if element.attrib['storeAsText'].lower() == 'true':
this_vtype = 'REG_SZ'
if requested_val is not None:
this_element_value = str(requested_val)
this_element_value = str(requested_val).encode('utf-16-le')
if check_deleted:
this_vtype = 'REG_SZ'
elif etree.QName(element).localname == 'longDecimal':
@ -3399,15 +3415,12 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
this_vtype = 'REG_QWORD'
requested_val = this_element_value
if this_element_value is not None:
temp_val = ''
for v in struct.unpack('4H', struct.pack('I', int(this_element_value))):
temp_val = temp_val + six.unichr(v)
this_element_value = temp_val
this_element_value = struct.pack('Q', int(this_element_value))
if 'storeAsText' in element.attrib:
if element.attrib['storeAsText'].lower() == 'true':
this_vtype = 'REG_SZ'
if requested_val is not None:
this_element_value = str(requested_val)
this_element_value = str(requested_val).encode('utf-16-le')
elif etree.QName(element).localname == 'text':
# https://msdn.microsoft.com/en-us/library/dn605969(v=vs.85).aspx
this_vtype = 'REG_SZ'
@ -3415,14 +3428,15 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
if element.attrib['expandable'].lower() == 'true':
this_vtype = 'REG_EXPAND_SZ'
if this_element_value is not None:
this_element_value = '{0}{1}'.format(this_element_value, chr(0))
this_element_value = b''.join([this_element_value.encode('utf-16-le'),
encoded_null])
elif etree.QName(element).localname == 'multiText':
this_vtype = 'REG_MULTI_SZ'
if this_element_value is not None:
this_element_value = '{0}{1}{1}'.format(chr(0).join(this_element_value), chr(0))
elif etree.QName(element).localname == 'list':
standard_element_expected_string = False
del_keys = ''
del_keys = b''
element_valuenames = []
element_values = this_element_value
if this_element_value is not None:
@ -3431,12 +3445,20 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
if element.attrib['additive'].lower() == 'false':
# a delete values will be added before all the other
# value = data pairs
del_keys = u'[{1}{0};**delvals.{0};{2}{0};{3}{0};{4}{0}]'.format(
chr(0),
reg_key,
chr(registry.vtype[this_vtype]),
chr(len(' {0}'.format(chr(0)).encode('utf-16-le'))),
' ')
del_keys = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
'**delvals.'.encode('utf-16-le'),
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
chr(len(' {0}'.format(chr(0)).encode('utf-16-le'))).encode('utf-32-le'),
encoded_semicolon,
' '.encode('utf-16-le'),
encoded_null,
']'.encode('utf-16-le')])
if 'expandable' in element.attrib:
this_vtype = 'REG_EXPAND_SZ'
if 'explicitValue' in element.attrib and element.attrib['explicitValue'].lower() == 'true':
@ -3455,61 +3477,103 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
log.debug('element_valuenames == {0} and element_values == {1}'.format(element_valuenames,
element_values))
for i, item in enumerate(element_valuenames):
expected_string = expected_string + standard_layout.format(
chr(0),
reg_key,
element_valuenames[i],
chr(registry.vtype[this_vtype]),
six.unichr(len('{0}{1}'.format(element_values[i],
chr(0)).encode('utf-16-le'))),
'{0}{1}'.format(element_values[i], chr(0)))
expected_string = expected_string + b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
element_valuenames[i].encode('utf-16-le'),
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len('{0}{1}'.format(element_values[i],
chr(0)).encode('utf-16-le'))).encode('utf-32-le'),
encoded_semicolon,
b''.join([element_values[i].encode('utf-16-le'),
encoded_null]),
']'.encode('utf-16-le')])
else:
expected_string = del_keys + r'[{1}{0};'.format(chr(0),
reg_key)
expected_string = del_keys + b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon])
else:
expected_string = u'[{1}{0};**delvals.{0};{2}{0};{3}{0};{4}{0}]'.format(
chr(0),
reg_key,
chr(registry.vtype[this_vtype]),
chr(len(' {0}'.format(chr(0)).encode('utf-16-le'))),
' ')
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
'**delvals.'.encode('utf-16-le'),
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
chr(len(' {0}'.format(chr(0)))).encode('utf-32-le'),
encoded_semicolon,
' '.encode('utf-16-le'),
encoded_null,
']'.encode('utf-16-le')])
elif etree.QName(element).localname == 'enum':
if this_element_value is not None:
pass
if standard_element_expected_string and not check_deleted:
if this_element_value is not None:
expected_string = standard_layout.format(
chr(0),
reg_key,
reg_valuename,
chr(registry.vtype[this_vtype]),
six.unichr(len(this_element_value.encode('utf-16-le'))),
this_element_value)
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
reg_valuename,
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len(this_element_value)).encode('utf-32-le'),
encoded_semicolon,
this_element_value,
']'.encode('utf-16-le')])
else:
expected_string = u'[{1}{0};{2}{0};{3}{0};'.format(chr(0),
reg_key,
reg_valuename,
chr(registry.vtype[this_vtype]))
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
reg_valuename,
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon])
if not expected_string:
if etree.QName(element).localname == "delete" or check_deleted:
# delete value
expected_string = u'[{1}{0};**del.{2}{0};{3}{0};{4}{0};{5}{0}]'.format(
chr(0),
reg_key,
reg_valuename,
chr(registry.vtype[this_vtype]),
six.unichr(len(' {0}'.format(chr(0)).encode('utf-16-le'))),
' ')
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
'**del.'.encode('utf-16-le'),
reg_valuename,
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len(' {0}'.format(chr(0)).encode('utf-16-le'))).encode('utf-32-le'),
encoded_semicolon,
' '.encode('utf-16-le'),
encoded_null,
']'.encode('utf-16-le')])
else:
expected_string = standard_layout.format(
chr(0),
reg_key,
reg_valuename,
chr(registry.vtype[this_vtype]),
six.unichr(len(this_element_value.encode('utf-16-le', '' if six.PY2 else 'surrogatepass'))),
this_element_value)
expected_string = b''.join(['['.encode('utf-16-le'),
reg_key,
encoded_null,
encoded_semicolon,
reg_valuename,
encoded_null,
encoded_semicolon,
chr(registry.vtype[this_vtype]).encode('utf-32-le'),
encoded_semicolon,
six.unichr(len(this_element_value)).encode('utf-32-le'),
encoded_semicolon,
this_element_value,
']'.encode('utf-16-le')])
return expected_string
@ -3534,17 +3598,16 @@ def _checkAllAdmxPolicies(policy_class,
full_names = {}
if policy_filedata:
log.debug('POLICY CLASS {0} has file data'.format(policy_class))
policy_filedata_split = re.sub(r'\]$',
'',
re.sub(r'^\[',
'',
policy_filedata.replace(module_policy_data.reg_pol_header, ''))
).split('][')
policy_filedata_split = re.sub(salt.utils.to_bytes(r'\]{0}$'.format(chr(0))),
b'',
re.sub(salt.utils.to_bytes(r'^\[{0}'.format(chr(0))),
b'',
re.sub(re.escape(module_policy_data.reg_pol_header.encode('utf-16-le')), b'', policy_filedata))
).split(']['.encode('utf-16-le'))
for policy_item in policy_filedata_split:
policy_item_key = policy_item.split('{0};'.format(chr(0)))[0]
policy_item_key = policy_item.split('{0};'.format(chr(0)).encode('utf-16-le'))[0].decode('utf-16-le').lower()
if policy_item_key:
for admx_item in REGKEY_XPATH(admx_policy_definitions, keyvalue=policy_item_key.lower()):
for admx_item in REGKEY_XPATH(admx_policy_definitions, keyvalue=policy_item_key):
if etree.QName(admx_item).localname == 'policy':
if admx_item not in admx_policies:
admx_policies.append(admx_item)
@ -3607,8 +3670,11 @@ def _checkAllAdmxPolicies(policy_class,
break
this_policynamespace = admx_policy.nsmap[admx_policy.prefix]
if ENABLED_VALUE_XPATH(admx_policy) and this_policy_setting == 'Not Configured':
element_only_enabled_disabled = False
explicit_enable_disable_value_setting = True
# some policies have a disabled list but not an enabled list
# added this to address those issues
if DISABLED_LIST_XPATH(admx_policy):
element_only_enabled_disabled = False
explicit_enable_disable_value_setting = True
if _checkValueItemParent(admx_policy,
this_policyname,
this_key,
@ -3621,8 +3687,11 @@ def _checkAllAdmxPolicies(policy_class,
policy_vals[this_policynamespace] = {}
policy_vals[this_policynamespace][this_policyname] = this_policy_setting
if DISABLED_VALUE_XPATH(admx_policy) and this_policy_setting == 'Not Configured':
element_only_enabled_disabled = False
explicit_enable_disable_value_setting = True
# some policies have a disabled list but not an enabled list
# added this to address those issues
if ENABLED_LIST_XPATH(admx_policy):
element_only_enabled_disabled = False
explicit_enable_disable_value_setting = True
if _checkValueItemParent(admx_policy,
this_policyname,
this_key,
@ -3847,7 +3916,7 @@ def _checkAllAdmxPolicies(policy_class,
admx_policy,
elements_item,
check_deleted=False)
) + r'(?!\*\*delvals\.)',
) + salt.utils.to_bytes(r'(?!\*\*delvals\.)'),
policy_filedata):
configured_value = _getDataFromRegPolData(_processValueItem(child_item,
child_key,
@ -4040,7 +4109,6 @@ def _read_regpol_file(reg_pol_path):
if os.path.exists(reg_pol_path):
with salt.utils.files.fopen(reg_pol_path, 'rb') as pol_file:
returndata = pol_file.read()
returndata = returndata.decode('utf-16-le')
return returndata
@ -4050,12 +4118,13 @@ def _regexSearchKeyValueCombo(policy_data, policy_regpath, policy_regkey):
for a policy_regpath and policy_regkey combo
'''
if policy_data:
specialValueRegex = r'(\*\*Del\.|\*\*DelVals\.){0,1}'
_thisSearch = r'\[{1}{0};{3}{2}{0};'.format(
chr(0),
re.escape(policy_regpath),
re.escape(policy_regkey),
specialValueRegex)
specialValueRegex = salt.utils.to_bytes(r'(\*\*Del\.|\*\*DelVals\.){0,1}')
_thisSearch = b''.join([salt.utils.to_bytes(r'\['),
re.escape(policy_regpath),
b'\00;',
specialValueRegex,
re.escape(policy_regkey),
b'\00;'])
match = re.search(_thisSearch, policy_data, re.IGNORECASE)
if match:
return policy_data[match.start():(policy_data.index(']', match.end())) + 1]
@ -4086,9 +4155,9 @@ def _write_regpol_data(data_to_write,
if not os.path.exists(policy_file_path):
ret = __salt__['file.makedirs'](policy_file_path)
with salt.utils.files.fopen(policy_file_path, 'wb') as pol_file:
if not data_to_write.startswith(reg_pol_header):
if not data_to_write.startswith(reg_pol_header.encode('utf-16-le')):
pol_file.write(reg_pol_header.encode('utf-16-le'))
pol_file.write(data_to_write.encode('utf-16-le'))
pol_file.write(data_to_write)
try:
gpt_ini_data = ''
if os.path.exists(gpt_ini_path):
@ -4164,13 +4233,14 @@ def _policyFileReplaceOrAppendList(string_list, policy_data):
update existing strings or append the strings
'''
if not policy_data:
policy_data = ''
policy_data = b''
# we are going to clean off the special pre-fixes, so we get only the valuename
specialValueRegex = r'(\*\*Del\.|\*\*DelVals\.){0,1}'
specialValueRegex = salt.utils.to_bytes(r'(\*\*Del\.|\*\*DelVals\.){0,1}')
for this_string in string_list:
list_item_key = this_string.split('{0};'.format(chr(0)))[0].lstrip('[')
list_item_key = this_string.split(b'\00;')[0].lstrip(b'[')
list_item_value_name = re.sub(specialValueRegex,
'', this_string.split('{0};'.format(chr(0)))[1],
b'',
this_string.split(b'\00;')[1],
flags=re.IGNORECASE)
log.debug('item value name is {0}'.format(list_item_value_name))
data_to_replace = _regexSearchKeyValueCombo(policy_data,
@ -4181,7 +4251,7 @@ def _policyFileReplaceOrAppendList(string_list, policy_data):
policy_data = policy_data.replace(data_to_replace, this_string)
else:
log.debug('appending {0}'.format([this_string]))
policy_data = ''.join([policy_data, this_string])
policy_data = b''.join([policy_data, this_string])
return policy_data
@ -4192,16 +4262,16 @@ def _policyFileReplaceOrAppend(this_string, policy_data, append_only=False):
'''
# we are going to clean off the special pre-fixes, so we get only the valuename
if not policy_data:
policy_data = ''
specialValueRegex = r'(\*\*Del\.|\*\*DelVals\.){0,1}'
policy_data = b''
specialValueRegex = salt.utils.to_bytes(r'(\*\*Del\.|\*\*DelVals\.){0,1}')
item_key = None
item_value_name = None
data_to_replace = None
if not append_only:
item_key = this_string.split('{0};'.format(chr(0)))[0].lstrip('[')
item_key = this_string.split(b'\00;')[0].lstrip(b'[')
item_value_name = re.sub(specialValueRegex,
'',
this_string.split('{0};'.format(chr(0)))[1],
b'',
this_string.split(b'\00;')[1],
flags=re.IGNORECASE)
log.debug('item value name is {0}'.format(item_value_name))
data_to_replace = _regexSearchKeyValueCombo(policy_data, item_key, item_value_name)
@ -4210,7 +4280,7 @@ def _policyFileReplaceOrAppend(this_string, policy_data, append_only=False):
policy_data = policy_data.replace(data_to_replace, this_string)
else:
log.debug('appending {0}'.format([this_string]))
policy_data = ''.join([policy_data, this_string])
policy_data = b''.join([policy_data, this_string])
return policy_data
@ -4228,9 +4298,10 @@ def _writeAdminTemplateRegPolFile(admtemplate_data,
REGISTRY_FILE_VERSION (u'\x01\00')
https://msdn.microsoft.com/en-us/library/aa374407(VS.85).aspx
[Registry Path<NULL>;Reg Value<NULL>;Reg Type<NULL>;SizeInBytes<NULL>;Data<NULL>]
+ https://msdn.microsoft.com/en-us/library/cc232696.aspx
[Registry Path<NULL>;Reg Value<NULL>;Reg Type;SizeInBytes;Data<NULL>]
'''
existing_data = ''
existing_data = b''
base_policy_settings = {}
policy_data = _policy_info()
policySearchXpath = '//ns1:*[@id = "{0}" or @name = "{0}"]'
@ -4856,7 +4927,7 @@ def get_policy_info(policy_name,
policy_class,
', '.join(policy_data.policies.keys()))
return ret
if policy_name in policy_data.policies[policy_class]:
if policy_name in policy_data.policies[policy_class]['policies']:
ret['policy_aliases'].append(policy_data.policies[policy_class]['policies'][policy_name]['Policy'])
ret['policy_found'] = True
ret['message'] = ''

View file

@ -132,8 +132,18 @@ def wait_for_successful_query(name, wait_for=300, **kwargs):
Like query but, repeat and wait until match/match_type or status is fulfilled. State returns result from last
query state in case of success or if no successful query was made within wait_for timeout.
name
The name of the query.
wait_for
Total time to wait for requests that succeed.
request_interval
Optional interval to delay requests by N seconds to reduce the number of requests sent.
.. note::
All other arguements are passed to the http.query state.
'''
starttime = time.time()
@ -141,7 +151,7 @@ def wait_for_successful_query(name, wait_for=300, **kwargs):
caught_exception = None
ret = None
try:
ret = query(name, wait_for=wait_for, **kwargs)
ret = query(name, **kwargs)
if ret['result']:
return ret
except Exception as exc:

View file

@ -89,10 +89,10 @@ def zone_present(domain, type, profile):
type = 'master'
matching_zone = [z for z in zones if z['domain'] == domain]
if len(matching_zone) > 0:
return state_result(True, "Zone already exists", domain)
return state_result(True, 'Zone already exists', domain)
else:
result = __salt__['libcloud_dns.create_zone'](domain, profile, type)
return state_result(True, "Created new zone", domain, result)
return state_result(True, 'Created new zone', domain, result)
def zone_absent(domain, profile):
@ -108,10 +108,10 @@ def zone_absent(domain, profile):
zones = __salt__['libcloud_dns.list_zones'](profile)
matching_zone = [z for z in zones if z['domain'] == domain]
if len(matching_zone) == 0:
return state_result(True, "Zone already absent", domain)
return state_result(True, 'Zone already absent', domain)
else:
result = __salt__['libcloud_dns.delete_zone'](matching_zone[0]['id'], profile)
return state_result(result, "Deleted zone", domain)
return state_result(result, 'Deleted zone', domain)
def record_present(name, zone, type, data, profile):
@ -140,7 +140,7 @@ def record_present(name, zone, type, data, profile):
try:
matching_zone = [z for z in zones if z['domain'] == zone][0]
except IndexError:
return state_result(False, "Could not locate zone", name)
return state_result(False, 'Could not locate zone', name)
records = __salt__['libcloud_dns.list_records'](matching_zone['id'], profile)
matching_records = [record for record in records
if record['name'] == name and
@ -150,9 +150,9 @@ def record_present(name, zone, type, data, profile):
result = __salt__['libcloud_dns.create_record'](
name, matching_zone['id'],
type, data, profile)
return state_result(True, "Created new record", name, result)
return state_result(True, 'Created new record', name, result)
else:
return state_result(True, "Record already exists", name)
return state_result(True, 'Record already exists', name)
def record_absent(name, zone, type, data, profile):
@ -181,7 +181,7 @@ def record_absent(name, zone, type, data, profile):
try:
matching_zone = [z for z in zones if z['domain'] == zone][0]
except IndexError:
return state_result(False, "Zone could not be found", name)
return state_result(False, 'Zone could not be found', name)
records = __salt__['libcloud_dns.list_records'](matching_zone['id'], profile)
matching_records = [record for record in records
if record['name'] == name and
@ -194,6 +194,6 @@ def record_absent(name, zone, type, data, profile):
matching_zone['id'],
record['id'],
profile))
return state_result(all(result), "Removed {0} records".format(len(result)), name)
return state_result(all(result), 'Removed {0} records'.format(len(result)), name)
else:
return state_result(True, "Records already absent", name)
return state_result(True, 'Records already absent', name)

View file

@ -68,7 +68,7 @@ def _changes(name,
workphone='',
homephone='',
loginclass=None,
date=0,
date=None,
mindays=0,
maxdays=999999,
inactdays=0,
@ -135,7 +135,7 @@ def _changes(name,
change['passwd'] = password
if empty_password and lshad['passwd'] != '':
change['empty_password'] = True
if date and date is not 0 and lshad['lstchg'] != date:
if date is not None and lshad['lstchg'] != date:
change['date'] = date
if mindays and mindays is not 0 and lshad['min'] != mindays:
change['mindays'] = mindays
@ -687,7 +687,7 @@ def present(name,
'empty password'.format(name)
ret['result'] = False
ret['changes']['password'] = ''
if date:
if date is not None:
__salt__['shadow.set_date'](name, date)
spost = __salt__['shadow.info'](name)
if spost['lstchg'] != date:

View file

@ -15,6 +15,9 @@ import random
import shutil
from salt.ext import six
# Import salt libs
import salt.utils.win_dacl
CAN_RENAME_OPEN_FILE = False
if os.name == 'nt': # pragma: no cover
@ -120,8 +123,12 @@ class _AtomicWFile(object):
self._fh.close()
if os.path.isfile(self._filename):
shutil.copymode(self._filename, self._tmp_filename)
st = os.stat(self._filename)
os.chown(self._tmp_filename, st.st_uid, st.st_gid)
if salt.utils.win_dacl.HAS_WIN32:
owner = salt.utils.win_dacl.get_owner(self._filename)
salt.utils.win_dacl.set_owner(self._tmp_filename, owner)
else:
st = os.stat(self._filename)
os.chown(self._tmp_filename, st.st_uid, st.st_gid)
atomic_rename(self._tmp_filename, self._filename)
def __exit__(self, exc_type, exc_value, traceback):

View file

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import random
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
# Import Salt libs
import salt.utils
class StatusModuleTest(ModuleCase):
'''
Test the status module
'''
@skipIf(salt.utils.is_windows(), 'minion is windows')
def test_status_pid(self):
'''
status.pid
'''
status_pid = self.run_function('status.pid', ['salt'])
grab_pids = status_pid.split()[:10]
random_pid = random.choice(grab_pids)
grep_salt = self.run_function('cmd.run', ['ps aux | grep salt'])
self.assertIn(random_pid, grep_salt)

View file

@ -132,5 +132,5 @@ class RunnerReturnsTest(ShellCase):
'jid': jid,
'return': {'args': ['foo'], 'kwargs': {'bar': 'hello world!'}},
'success': True,
'user': RUNTIME_VARS.RUNNING_TESTS_USER}}
'user': RUNTIME_VARS.RUNNING_TESTS_USER if 'SUDO_USER' not in os.environ else 'root'}}
)

View file

@ -43,9 +43,9 @@ class SPMBuildTest(SPMCase, ModuleCase):
test spm build with a big file
'''
# check to make sure there is enough space to run this test
check_space = self.run_function('status.diskusage', ['/'])
space = check_space['/']['available']
if space < 2000000:
check_space = self.run_function('status.diskusage', ['/tmp'])
space = check_space['/tmp']['available']
if space < 3000000000:
self.skipTest('Not enough space on host to run this test')
self.run_function('cmd.run',

View file

@ -14,7 +14,8 @@ class DownloadArtifacts(object):
def __init__(self, instance, artifacts):
self.instance = instance
self.artifacts = artifacts
self.client = self.setup_transport()
self.transport = self.setup_transport()
self.sftpclient = paramiko.SFTPClient.from_transport(self.transport)
def setup_transport(self):
# pylint: disable=minimum-python-version
@ -33,19 +34,30 @@ class DownloadArtifacts(object):
username=state.get('username', tport.get('username', 'root')),
pkey=pkey
)
return paramiko.SFTPClient.from_transport(transport)
return transport
def _set_permissions(self):
'''
Make sure all xml files are readable by the world so that anyone can grab them
'''
for remote, _ in self.artifacts:
self.transport.open_session().exec_command('sudo chmod -R +r {}'.format(remote))
def download(self):
self._set_permissions()
for remote, local in self.artifacts:
if remote.endswith('/'):
for fxml in self.client.listdir(remote):
for fxml in self.sftpclient.listdir(remote):
self._do_download(os.path.join(remote, fxml), os.path.join(local, os.path.basename(fxml)))
else:
self._do_download(remote, os.path.join(local, os.path.basename(remote)))
def _do_download(self, remote, local):
print('Copying from {0} to {1}'.format(remote, local))
self.client.get(remote, local)
try:
self.sftpclient.get(remote, local)
except IOError:
print('Failed to copy: {0}'.format(remote))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Jenkins Artifact Download Helper')

View file

@ -5,6 +5,7 @@
# Import Python libs
from __future__ import absolute_import
import logging
import os
# Import Salt Testing Libs
@ -29,6 +30,8 @@ if six.PY3:
else:
import salt.ext.ipaddress as ipaddress
log = logging.getLogger(__name__)
# Globals
IPv4Address = ipaddress.IPv4Address
IPv6Address = ipaddress.IPv6Address
@ -683,6 +686,26 @@ SwapTotal: 4789244 kB'''
self.assertEqual(os_grains.get('mem_total'), 2023)
self.assertEqual(os_grains.get('swap_total'), 400)
def test_docker_virtual(self):
'''
Test if OS grains are parsed correctly in Ubuntu Xenial Xerus
'''
with patch.object(os.path, 'isdir', MagicMock(return_value=False)):
with patch.object(os.path,
'isfile',
MagicMock(side_effect=lambda x: True if x == '/proc/1/cgroup' else False)):
for cgroup_substr in (':/system.slice/docker', ':/docker/',
':/docker-ce/'):
cgroup_data = \
'10:memory{0}a_long_sha256sum'.format(cgroup_substr)
log.debug(
'Testing Docker cgroup substring \'%s\'', cgroup_substr)
with patch('salt.utils.fopen', mock_open(read_data=cgroup_data)):
self.assertEqual(
core._virtual({'kernel': 'Linux'}).get('virtual_subtype'),
'Docker'
)
def _check_ipaddress(self, value, ip_v):
'''
check if ip address in a list is valid