Merge branch '2017.7' of https://github.com/saltstack/salt into 2017.7

This commit is contained in:
Sebastian Gerlach 2018-06-21 07:56:20 +02:00
commit ba12ee947b
36 changed files with 584 additions and 137 deletions

29
.ci/docs Normal file
View file

@ -0,0 +1,29 @@
pipeline {
agent { label 'docs' }
environment {
PYENV_ROOT = "/usr/local/pyenv"
PATH = "$PYENV_ROOT/bin:$PATH"
}
stages {
stage('setup') {
steps {
sh 'eval "$(pyenv init -)"; pyenv install 2.7.14 || echo "We already have this python."; pyenv local 2.7.14; pyenv shell 2.7.14'
sh 'eval "$(pyenv init -)"; pip install sphinx -e .'
}
}
stage('build') {
steps {
sh 'eval "$(pyenv init -)"; make -C doc clean html'
archiveArtifacts artifacts: 'doc/_build/html'
}
}
}
post {
success {
githubNotify description: "The docs job has passed, artifacts have been saved", status: "SUCCESS"
}
failure {
githubNotify description: "The docs job has failed", status: "FAILURE"
}
}
}

48
.ci/kitchen-centos7-py2 Normal file
View file

@ -0,0 +1,48 @@
pipeline {
agent { label 'kitchen-slave' }
environment {
SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml"
SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml"
PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin"
RBENV_VERSION = "2.4.2"
TEST_SUITE = "py2"
TEST_PLATFORM = "centos-7"
}
stages {
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
}
}
stage('run kitchen') {
steps {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
}
}}
archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml'
}
post {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
}
}
}
}
post {
success {
githubNotify description: "The centos7-py2 job has passed", status: "SUCCESS"
}
failure {
githubNotify description: "The centos7-py2 job has failed", status: "FAILURE"
}
}
}

48
.ci/kitchen-centos7-py3 Normal file
View file

@ -0,0 +1,48 @@
pipeline {
agent { label 'kitchen-slave' }
environment {
SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml"
SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml"
PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin"
RBENV_VERSION = "2.4.2"
TEST_SUITE = "py3"
TEST_PLATFORM = "centos-7"
}
stages {
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
}
}
stage('run kitchen') {
steps {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
}
}}
archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml'
}
post {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
}
}
}
}
post {
success {
githubNotify description: "The centos7-py3 job has passed", status: "SUCCESS"
}
failure {
githubNotify description: "The centos7-py3 job has failed", status: "FAILURE"
}
}
}

View file

@ -0,0 +1,48 @@
pipeline {
agent { label 'kitchen-slave' }
environment {
SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml"
SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml"
PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin"
RBENV_VERSION = "2.4.2"
TEST_SUITE = "py2"
TEST_PLATFORM = "ubuntu-1604"
}
stages {
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
}
}
stage('run kitchen') {
steps {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
}
}}
archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml'
}
post {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
}
}
}
}
post {
success {
githubNotify description: "The ubuntu-1604-py2 job has passed", status: "SUCCESS"
}
failure {
githubNotify description: "The ubuntu-1604-py2 job has failed", status: "FAILURE"
}
}
}

View file

@ -0,0 +1,48 @@
pipeline {
agent { label 'kitchen-slave' }
environment {
SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml"
SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml"
PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin"
RBENV_VERSION = "2.4.2"
TEST_SUITE = "py3"
TEST_PLATFORM = "ubuntu-1604"
}
stages {
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
}
}
stage('run kitchen') {
steps {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM'
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM'
}
}}
archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml'
}
post {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
}
}
}
}
post {
success {
githubNotify description: "The ubuntu-1604-py3 job has passed", status: "SUCCESS"
}
failure {
githubNotify description: "The ubuntu-1604-py3 job has failed", status: "FAILURE"
}
}
}

41
.ci/lint Normal file
View file

@ -0,0 +1,41 @@
pipeline {
agent { label 'pr-lint-slave' }
environment {
PYENV_ROOT = "/usr/local/pyenv"
PATH = "$PYENV_ROOT/bin:$PATH"
}
stages {
stage('setup') {
steps {
sh 'eval "$(pyenv init -)"; pyenv install 2.7.14 || echo "We already have this python."; pyenv local 2.7.14; pyenv shell 2.7.14'
sh 'eval "$(pyenv init -)"; pip install pylint SaltPyLint'
sh 'eval "$(pyenv init -)"; which pylint; pylint --version'
}
}
stage('linting') {
failFast false
parallel {
stage('salt linting') {
steps {
sh 'eval "$(pyenv init -)"; pylint --rcfile=.testing.pylintrc --disable=W1307,str-format-in-logging setup.py salt/ | tee pylint-report.xml'
archiveArtifacts artifacts: 'pylint-report.xml'
}
}
stage('test linting') {
steps {
sh 'eval "$(pyenv init -)"; pylint --rcfile=.testing.pylintrc --disable=W0232,E1002,W1307,str-format-in-logging tests/ | tee pylint-report-tests.xml'
archiveArtifacts artifacts: 'pylint-report-tests.xml'
}
}
}
}
}
post {
success {
githubNotify description: "The lint job has passed", status: "SUCCESS"
}
failure {
githubNotify description: "The lint job has failed", status: "FAILURE"
}
}
}

View file

@ -43,21 +43,20 @@ provisioner:
repo: git
testingdir: /testing
salt_copy_filter:
- __pycache__
- '*.pyc'
- .bundle
- .tox
- .kitchen
- .kitchen.yml
- artifacts
- Gemfile
- Gemfile.lock
- README.rst
- .travis.yml
state_top:
base:
"os:Windows":
- match: grain
- prep_windows
"*":
- git.salt
- <%= ENV['KITCHEN_STATE'] || 'git.salt' %>
pillars:
top.sls:
base:

View file

@ -250,8 +250,8 @@ on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
project = 'Salt'
version = salt.version.__version__
latest_release = '2018.3.0' # latest release
previous_release = '2017.7.5' # latest release from previous branch
latest_release = '2018.3.1' # latest release
previous_release = '2017.7.6' # latest release from previous branch
previous_release_dir = '2017.7' # path on web server for previous branch
next_release = '' # next release
next_release_dir = '' # path on web server for next release branch

View file

@ -1162,6 +1162,40 @@ The password used for HTTP proxy access.
proxy_password: obolus
Docker Configuration
====================
.. conf_minion:: docker.update_mine
``docker.update_mine``
----------------------
.. versionadded:: 2017.7.8,2018.3.3
.. versionchanged:: Fluorine
The default value is now ``False``
Default: ``True``
If enabled, when containers are added, removed, stopped, started, etc., the
:ref:`mine <salt-mine>` will be updated with the results of :py:func:`docker.ps
verbose=True all=True host=True <salt.modules.dockermod.ps>`. This mine data is
used by :py:func:`mine.get_docker <salt.modules.mine.get_docker>`. Set this
option to ``False`` to keep Salt from updating the mine with this information.
.. note::
This option can also be set in Grains or Pillar data, with Grains
overriding Pillar and the minion config file overriding Grains.
.. note::
Disabling this will of course keep :py:func:`mine.get_docker
<salt.modules.mine.get_docker>` from returning any information for a given
minion.
.. code-block:: yaml
docker.update_mine: False
Minion Module Management
========================

View file

@ -1,11 +1,8 @@
========================================
In Progress: Salt 2017.7.6 Release Notes
========================================
Version 2017.7.6 is an **unreleased** bugfix release for :ref:`2017.7.0
<release-2017-7-0>`. This release is still in progress and has not been
released yet.
===========================
Salt 2017.7.6 Release Notes
===========================
Version 2017.7.6 is a bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
Statistics
==========
@ -16,6 +13,15 @@ Statistics
- Contributors: **47** (`Ch3LL`_, `DmitryKuzmenko`_, `GwiYeong`_, `Quarky9`_, `RichardW42`_, `UtahDave`_, `amaclean199`_, `arif-ali`_, `baniobloom`_, `bdrung`_, `benediktwerner`_, `bmiguel-teixeira`_, `cachedout`_, `dafenko`_, `damon-atkins`_, `dwoz`_, `ezh`_, `folti`_, `fpicot`_, `frogunder`_, `garethgreenaway`_, `gtmanfred`_, `isbm`_, `jeroennijhof`_, `jfindlay`_, `jfoboss`_, `kstreee`_, `lomeroe`_, `mattp-`_, `meaksh`_, `mirceaulinic`_, `myinitialsarepm`_, `mzbroch`_, `nages13`_, `paclat`_, `pcjeff`_, `pruiz`_, `psyer`_, `rallytime`_, `s0undt3ch`_, `skizunov`_, `smitty42`_, `terminalmage`_, `twangboy`_, `vutny`_, `yagnik`_, `yannj-fr`_)
Tornado 5.0 Support for Python 2 Only
-------------------------------------
Tornado 5.0 moves to using asyncio for all python3 versions. Because of this
and changes in asyncio between python 3.4 and 3.5 to only be able to use one
ioloop, which requires some rearchitecting, support for tornado 5.0 and python3
versions of salt has been delayed to a later release.
For now, to use tornado 5.0, the python 2 version of salt must be used.
Option to Return to Previous Pillar Include Behavior
====================================================

View file

@ -5,15 +5,38 @@ In Progress: Salt 2017.7.7 Release Notes
Version 2017.7.7 is an **unreleased** bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
This release is still in progress and has not been released yet.
New win_snmp behavior
=====================
The ``2017.7.7`` release contains only a single fix for Issue `#48038`_, which
is a critical bug that occurs in a multi-syndic setup where the same job is run
multiple times on a minion.
- :py:func:`win_snmp.get_community_names
<salt.modules.win_snmp.get_community_names>` now returns the SNMP settings
actually in effect on the box. If settings are managed via GroupPolicy, those
settings will be returned. Otherwise, normal settings are returned.
Statistics
==========
- :py:func:`win_snmp.set_community_names
<salt.modules.win_snmp.set_community_names>` now raises an error when SNMP
settings are being managed by GroupPolicy.
- Total Merges: **1**
- Total Issue References: **1**
- Total PR References: **2**
- Contributors: **2** (`garethgreenaway`_, `rallytime`_)
Changelog for v2017.7.6..v2017.7.7
==================================
*Generated at: 2018-06-14 15:43:34 UTC*
* **ISSUE** `#48038`_: (`austinpapp`_) jobs are not dedup'ing minion side (refs: `#48075`_)
* **PR** `#48098`_: (`rallytime`_) Back-port `#48075`_ to 2017.7.7
@ *2018-06-14 12:53:42 UTC*
* **PR** `#48075`_: (`garethgreenaway`_) [2017.7] Ensure that the shared list of jids is passed (refs: `#48098`_)
* 084de927fe Merge pull request `#48098`_ from rallytime/bp-48075-2017.7.7
* e4e62e8b3a Ensure that the shared list of jids is passed when creating the Minion. Fixes an issue when minions are pointed at multiple syndics.
.. _`#48038`: https://github.com/saltstack/salt/issues/48038
.. _`#48075`: https://github.com/saltstack/salt/pull/48075
.. _`#48098`: https://github.com/saltstack/salt/pull/48098
.. _`austinpapp`: https://github.com/austinpapp
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`rallytime`: https://github.com/rallytime

View file

@ -0,0 +1,31 @@
========================================
In Progress: Salt 2017.7.8 Release Notes
========================================
Version 2017.7.8 is an **unreleased** bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
This release is still in progress and has not been released yet.
New win_snmp behavior
=====================
- :py:func:`win_snmp.get_community_names
<salt.modules.win_snmp.get_community_names>` now returns the SNMP settings
actually in effect on the box. If settings are managed via GroupPolicy, those
settings will be returned. Otherwise, normal settings are returned.
- :py:func:`win_snmp.set_community_names
<salt.modules.win_snmp.set_community_names>` now raises an error when SNMP
settings are being managed by GroupPolicy.
Option Added to Disable Docker Mine Updates
===========================================
When a docker container is added, removed, started, stopped, etc., the results
of a :py:func:`docker.ps verbose=True all=True host=True
<salt.modules.dockermod.ps>` are sent to the :ref:`mine <salt-mine>`, to be
used by :py:func:`mine.get_docker <salt.modules.mine.get_docker>`.
A new config option (:conf_minion:`docker.update_mine`) has been added. When
set to ``False``, Salt will not send this information to the mine. This is
useful in cases where sensitive information is stored in the container's
environment.

View file

@ -509,9 +509,8 @@ overrides all levels below it):
.. code-block:: yaml
gitfs_saltenv:
- saltenv:
- dev:
- mountpoint: salt://bar
- dev:
- mountpoint: salt://bar
3. Per-remote configuration parameter

View file

@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
} else {
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
}
#==============================================================================
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req.txt" "pip install"
} else {
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
}
#==============================================================================

View file

@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
} else {
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
}
#==============================================================================
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
Write-Output " ----------------------------------------------------------------"
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req.txt" "pip install"
} else {
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
# folder empty
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
}
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --disable-pip-version-check install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
}
#==============================================================================

View file

@ -1,4 +0,0 @@
[pytest]
addopts = --ssh-tests -ra -sv
testpaths = tests
norecursedirs = tests/kitchen

View file

@ -7,5 +7,6 @@ MarkupSafe
requests>=1.0.0
tornado>=4.2.1,<6.0; python_version < '3'
tornado>=4.2.1,<5.0; python_version >= '3.4'
# Required by Tornado to handle threads stuff.
futures>=2.0; python_version < '3.0'

View file

@ -1,3 +1,4 @@
pytest>=3.5.0
pytest-helpers-namespace
pytest-tempdir
pytest-cov

View file

@ -935,7 +935,7 @@ class Minion(MinionBase):
# Flag meaning minion has finished initialization including first connect to the master.
# True means the Minion is fully functional and ready to handle events.
self.ready = False
self.jid_queue = jid_queue or []
self.jid_queue = [] if jid_queue is None else jid_queue
self.periodic_callbacks = {}
if io_loop is None:
@ -1032,10 +1032,11 @@ class Minion(MinionBase):
# I made the following 3 line oddity to preserve traceback.
# Please read PR #23978 before changing, hopefully avoiding regressions.
# Good luck, we're all counting on you. Thanks.
future_exception = self._connect_master_future.exception()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if self._connect_master_future.done():
future_exception = self._connect_master_future.exception()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if timeout and self._sync_connect_master_success is False:
raise SaltDaemonNotRunning('Failed to connect to the salt-master')
@ -1527,7 +1528,9 @@ class Minion(MinionBase):
)
ret['out'] = 'nested'
except TypeError as exc:
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(function_name, exc, func.__doc__, )
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(
function_name, exc, func.__doc__ or ''
)
log.warning(msg, exc_info_on_loglevel=logging.DEBUG)
ret['return'] = msg
ret['out'] = 'nested'

View file

@ -304,7 +304,7 @@ def install(name=None,
# We don't support installing specific version for now
# so transform the dict in list ignoring version provided
pkgs = [
p.keys()[0] for p in pkgs
next(iter(p)) for p in pkgs
if isinstance(p, dict)
]
pkg_to_install.extend(pkgs)

View file

@ -436,11 +436,20 @@ def _refresh_mine_cache(wrapped):
refresh salt mine on exit.
'''
returned = wrapped(*args, **salt.utils.clean_kwargs(**kwargs))
__salt__['mine.send']('docker.ps', verbose=True, all=True, host=True)
if _check_update_mine():
__salt__['mine.send']('docker.ps', verbose=True, all=True, host=True)
return returned
return wrapper
def _check_update_mine():
try:
ret = __context__['docker.update_mine']
except KeyError:
ret = __context__['docker.update_mine'] = __salt__['config.get']('docker.update_mine', default=True)
return ret
# Helper functions
def _change_state(name, action, expected, *args, **kwargs):
'''

View file

@ -870,7 +870,7 @@ def _network_conf(conf_tuples=None, **kwargs):
# on old versions of lxc, still support the gateway auto mode
# if we didn't explicitly say no to
# (lxc.network.ipv4.gateway: auto)
if _LooseVersion(version()) <= '1.0.7' and \
if _LooseVersion(version()) <= _LooseVersion('1.0.7') and \
True not in ['lxc.network.ipv4.gateway' in a for a in ret] and \
True in ['lxc.network.ipv4' in a for a in ret]:
ret.append({'lxc.network.ipv4.gateway': 'auto'})
@ -2079,7 +2079,7 @@ def clone(name,
if backing in ('dir', 'overlayfs', 'btrfs'):
size = None
# LXC commands and options changed in 2.0 - CF issue #34086 for details
if version() >= _LooseVersion('2.0'):
if _LooseVersion(version()) >= _LooseVersion('2.0'):
# https://linuxcontainers.org/lxc/manpages//man1/lxc-copy.1.html
cmd = 'lxc-copy'
cmd += ' {0} -n {1} -N {2}'.format(snapshot, orig, name)
@ -3507,7 +3507,9 @@ def bootstrap(name,
configdir = '/var/tmp/.c_{0}'.format(rstr)
cmd = 'install -m 0700 -d {0}'.format(configdir)
if run(name, cmd, python_shell=False):
if run_all(
name, cmd, path=path, python_shell=False
)['retcode'] != 0:
log.error('tmpdir {0} creation failed ({1}'
.format(configdir, cmd))
return False
@ -3518,6 +3520,7 @@ def bootstrap(name,
copy_to(name, bs_, script, path=path)
result = run_all(name,
'sh -c "chmod +x {0}"'.format(script),
path=path,
python_shell=True)
copy_to(name, cfg_files['config'],
@ -3544,6 +3547,7 @@ def bootstrap(name,
run_all(name,
'sh -c \'if [ -f "{0}" ];then rm -f "{0}";fi\''
''.format(script),
path=path,
ignore_retcode=True,
python_shell=True)
else:

View file

@ -374,10 +374,18 @@ def flush():
def get_docker(interfaces=None, cidrs=None, with_container_id=False):
'''
Get all mine data for 'docker.get_containers' and run an aggregation
routine. The "interfaces" parameter allows for specifying which network
interfaces to select ip addresses from. The "cidrs" parameter allows for
specifying a list of cidrs which the ip address must match.
.. versionchanged:: 2017.7.8,2018.3.3
When :conf_minion:`docker.update_mine` is set to ``False`` for a given
minion, no mine data will be populated for that minion, and thus none
will be returned for it.
.. versionchanged:: Fluorine
:conf_minion:`docker.update_mine` now defaults to ``False``
Get all mine data for :py:func:`docker.ps <salt.modules.dockermod.ps_>` and
run an aggregation routine. The ``interfaces`` parameter allows for
specifying the network interfaces from which to select IP addresses. The
``cidrs`` parameter allows for specifying a list of subnets which the IP
address must match.
with_container_id
Boolean, to expose container_id in the list of results

View file

@ -484,60 +484,67 @@ def set_hwclock(clock):
salt '*' timezone.set_hwclock UTC
'''
if 'AIX' in __grains__['os_family']:
if clock.lower() != 'utc':
raise SaltInvocationError(
'UTC is the only permitted value'
)
return True
timezone = get_zone()
if 'Solaris' in __grains__['os_family']:
if clock.lower() not in ('localtime', 'utc'):
raise SaltInvocationError(
'localtime and UTC are the only permitted values'
)
if 'sparc' in __grains__['cpuarch']:
raise SaltInvocationError(
'UTC is the only choice for SPARC architecture'
)
cmd = ['rtc', '-z', 'GMT' if clock.lower() == 'utc' else timezone]
return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
zonepath = '/usr/share/zoneinfo/{0}'.format(timezone)
if not os.path.exists(zonepath):
raise CommandExecutionError(
'Zone \'{0}\' does not exist'.format(zonepath)
)
os.unlink('/etc/localtime')
os.symlink(zonepath, '/etc/localtime')
if 'Arch' in __grains__['os_family']:
cmd = ['timezonectl', 'set-local-rtc',
if salt.utils.which('timedatectl'):
cmd = ['timedatectl', 'set-local-rtc',
'true' if clock == 'localtime' else 'false']
return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
elif 'RedHat' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="{0}"'.format(timezone))
elif 'Suse' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="{0}"'.format(timezone))
elif 'Debian' in __grains__['os_family']:
if clock == 'UTC':
__salt__['file.sed']('/etc/default/rcS', '^UTC=.*', 'UTC=yes')
elif clock == 'localtime':
__salt__['file.sed']('/etc/default/rcS', '^UTC=.*', 'UTC=no')
elif 'Gentoo' in __grains__['os_family']:
if clock not in ('UTC', 'localtime'):
raise SaltInvocationError(
'Only \'UTC\' and \'localtime\' are allowed'
else:
os_family = __grains__['os_family']
if os_family in ('AIX', 'NILinuxRT'):
if clock.lower() != 'utc':
raise SaltInvocationError(
'UTC is the only permitted value'
)
return True
timezone = get_zone()
if 'Solaris' in __grains__['os_family']:
if clock.lower() not in ('localtime', 'utc'):
raise SaltInvocationError(
'localtime and UTC are the only permitted values'
)
if 'sparc' in __grains__['cpuarch']:
raise SaltInvocationError(
'UTC is the only choice for SPARC architecture'
)
cmd = ['rtc', '-z', 'GMT' if clock.lower() == 'utc' else timezone]
return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
zonepath = '/usr/share/zoneinfo/{0}'.format(timezone)
if not os.path.exists(zonepath):
raise CommandExecutionError(
'Zone \'{0}\' does not exist'.format(zonepath)
)
if clock == 'localtime':
clock = 'local'
__salt__['file.sed'](
'/etc/conf.d/hwclock', '^clock=.*', 'clock="{0}"'.format(clock))
os.unlink('/etc/localtime')
os.symlink(zonepath, '/etc/localtime')
if 'Arch' in __grains__['os_family']:
cmd = ['timezonectl', 'set-local-rtc',
'true' if clock == 'localtime' else 'false']
return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
elif 'RedHat' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="{0}"'.format(timezone))
elif 'Suse' in __grains__['os_family']:
__salt__['file.sed'](
'/etc/sysconfig/clock', '^TIMEZONE=.*', 'TIMEZONE="{0}"'.format(timezone))
elif 'Debian' in __grains__['os_family']:
if clock == 'UTC':
__salt__['file.sed']('/etc/default/rcS', '^UTC=.*', 'UTC=yes')
elif clock == 'localtime':
__salt__['file.sed']('/etc/default/rcS', '^UTC=.*', 'UTC=no')
elif 'Gentoo' in __grains__['os_family']:
if clock not in ('UTC', 'localtime'):
raise SaltInvocationError(
'Only \'UTC\' and \'localtime\' are allowed'
)
if clock == 'localtime':
clock = 'local'
__salt__['file.sed'](
'/etc/conf.d/hwclock', '^clock=.*', 'clock="{0}"'.format(clock))
return True

View file

@ -1225,7 +1225,7 @@ def _makedirs(name,
Helper function for creating directories when the ``makedirs`` option is set
to ``True``. Handles Unix and Windows based systems
.. versionadded:: 2017.7.7
.. versionadded:: 2017.7.8
Args:
name (str): The directory path to create

View file

@ -713,8 +713,8 @@ def repo_present(
ret['result'] = None
else:
result = __salt__['github.add_team_repo'](name, team_name,
permission,
profile=profile)
profile=profile,
permission=permission)
if result:
ret['changes'][team_name] = team_change
else:

View file

@ -752,9 +752,9 @@ class IPCMessageSubscriber(IPCClient):
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'
if self._read_sync_future is not None:
if self._read_sync_future is not None and self._read_sync_future.done():
self._read_sync_future.exception()
if self._read_stream_future is not None:
if self._read_stream_future is not None and self._read_stream_future.done():
self._read_stream_future.exception()
def __del__(self):

View file

@ -896,10 +896,9 @@ class SaltMessageClient(object):
# This happens because the logic is always waiting to read
# the next message and the associated read future is marked
# 'StreamClosedError' when the stream is closed.
self._read_until_future.exception()
if (not self._stream_return_future.done() and
self.io_loop != tornado.ioloop.IOLoop.current(
instance=False)):
if self._read_until_future.done():
self._read_until_future.exception()
elif self.io_loop != tornado.ioloop.IOLoop.current(instance=False):
self.io_loop.add_future(
self._stream_return_future,
lambda future: self.io_loop.stop()
@ -1134,7 +1133,7 @@ class Subscriber(object):
self._closing = True
if not self.stream.closed():
self.stream.close()
if self._read_until_future is not None:
if self._read_until_future is not None and self._read_until_future.done():
# This will prevent this message from showing up:
# '[ERROR ] Future exception was never retrieved:
# StreamClosedError'

View file

@ -2360,7 +2360,7 @@ def alias_function(fun, name, doc=None):
orig_name = fun.__name__
alias_msg = ('\nThis function is an alias of '
'``{0}``.\n'.format(orig_name))
alias_fun.__doc__ = alias_msg + fun.__doc__
alias_fun.__doc__ = alias_msg + (fun.__doc__ or '')
return alias_fun

View file

@ -451,7 +451,7 @@ class ShellCase(ShellTestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixi
'''
Execute salt
'''
arg_str = '-c {0} {1}'.format(self.get_config_dir(), arg_str)
arg_str = '-c {0} -t {1} {2}'.format(self.get_config_dir(), timeout, arg_str)
return self.run_script('salt',
arg_str,
with_retcode=with_retcode,

View file

@ -29,7 +29,7 @@ def iter_installers(content):
x = m.groups()[0]
if not x.startswith(PREFIX):
continue
if x.endswith('zip'):
if x.endswith(('zip', 'sha256')):
continue
if installer:
if x != installer + '.md5':

View file

@ -174,10 +174,15 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
self.assertIn('empty_dir', ret)
def test_symlink_list(self):
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = self.test_symlink_list_file_roots
ret = roots.symlink_list({'saltenv': 'base'})
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
orig_file_roots = self.opts['file_roots']
try:
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = self.test_symlink_list_file_roots
ret = roots.symlink_list({'saltenv': 'base'})
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
finally:
if self.test_symlink_list_file_roots:
self.opts['file_roots'] = orig_file_roots
class RootsLimitTraversalTest(TestCase, AdaptedConfigurationTestCaseMixin):

View file

@ -136,6 +136,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(docker_mod.__salt__,
{'mine.send': mine_send,
'container_resource.run': MagicMock(),
'config.get': MagicMock(return_value=True),
'cp.cache_file': MagicMock(return_value=False)}):
with patch.dict(docker_mod.__utils__,
{'docker.get_client_args': client_args_mock}):
@ -144,6 +145,44 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
mine_send.assert_called_with('docker.ps', verbose=True, all=True,
host=True)
def test_update_mine(self):
'''
Test the docker.update_mine config option
'''
def config_get_disabled(val, default):
return {'base_url': docker_mod.NOTSET,
'version': docker_mod.NOTSET,
'docker.url': docker_mod.NOTSET,
'docker.version': docker_mod.NOTSET,
'docker.machine': docker_mod.NOTSET,
'docker.update_mine': False}[val]
def config_get_enabled(val, default):
return {'base_url': docker_mod.NOTSET,
'version': docker_mod.NOTSET,
'docker.url': docker_mod.NOTSET,
'docker.version': docker_mod.NOTSET,
'docker.machine': docker_mod.NOTSET,
'docker.update_mine': True}[val]
mine_mock = Mock()
dunder_salt = {
'config.get': MagicMock(side_effect=config_get_disabled),
'mine.send': mine_mock,
}
with patch.dict(docker_mod.__salt__, dunder_salt), \
patch.dict(docker_mod.__context__, {'docker.client': Mock()}), \
patch.object(docker_mod, 'state', MagicMock(return_value='stopped')):
docker_mod.stop('foo', timeout=1)
mine_mock.assert_not_called()
with patch.dict(docker_mod.__salt__, dunder_salt), \
patch.dict(docker_mod.__context__, {'docker.client': Mock()}), \
patch.object(docker_mod, 'state', MagicMock(return_value='stopped')):
dunder_salt['config.get'].side_effect = config_get_enabled
docker_mod.stop('foo', timeout=1)
self.assert_called_once(mine_mock)
@skipIf(_docker_py_version() < (1, 5, 0),
'docker module must be installed to run this test or is too old. >=1.5.0')
def test_list_networks(self, *args):

View file

@ -117,12 +117,12 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
mock_open(read_data=file_data),
create=True) as m:
m.return_value.__iter__.return_value = file_data.splitlines()
self.assertEqual(mount.fstab(), {'/tmp': {'device': 'swap',
'device_fsck': '-',
'fstype': 'tmpfs',
'mount_at_boot': 'yes',
'opts': ['size=2048m'],
'pass_fsck': '-'}})
self.assertEqual(mount.vfstab(), {'/tmp': {'device': 'swap',
'device_fsck': '-',
'fstype': 'tmpfs',
'mount_at_boot': 'yes',
'opts': ['size=2048m'],
'pass_fsck': '-'}})
def test_rm_fstab(self):
'''

View file

@ -324,6 +324,21 @@ class TimezoneModuleTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(timezone.__grains__, {'os_family': ['AIX']}):
assert timezone.get_hwclock() == hwclock
@skipIf(salt.utils.is_windows(), 'os.symlink not available in Windows')
@patch('salt.utils.which', MagicMock(return_value=True))
def test_set_hwclock_timedatectl(self):
'''
Test set hwclock with timedatectl
:return:
'''
timezone.set_hwclock('UTC')
name, args, kwargs = timezone.__salt__['cmd.retcode'].mock_calls[0]
assert args == (['timedatectl', 'set-local-rtc', 'false'],)
timezone.set_hwclock('localtime')
name, args, kwargs = timezone.__salt__['cmd.retcode'].mock_calls[1]
assert args == (['timedatectl', 'set-local-rtc', 'true'],)
@skipIf(salt.utils.is_windows(), 'os.symlink not available in Windows')
@patch('salt.utils.which', MagicMock(return_value=False))
@patch('os.path.exists', MagicMock(return_value=True))
@ -334,10 +349,11 @@ class TimezoneModuleTestCase(TestCase, LoaderModuleMockMixin):
Test set hwclock on AIX
:return:
'''
with patch.dict(timezone.__grains__, {'os_family': ['AIX']}):
with self.assertRaises(SaltInvocationError):
assert timezone.set_hwclock('forty two')
assert timezone.set_hwclock('UTC')
for osfamily in ['AIX', 'NILinuxRT']:
with patch.dict(timezone.__grains__, {'os_family': osfamily}):
with self.assertRaises(SaltInvocationError):
assert timezone.set_hwclock('forty two')
assert timezone.set_hwclock('UTC')
@skipIf(salt.utils.is_windows(), 'os.symlink not available in Windows')
@patch('salt.utils.which', MagicMock(return_value=False))

View file

@ -1,7 +1,12 @@
[tox]
envlist = py27,py34,py35,py36
envlist = py27,py3
[testenv]
deps = -r{toxinidir}/requirements/tests.txt
commands = pytest --rootdir {toxinidir} {posargs:--no-print-logs --run-destructive}
commands = pytest --rootdir {toxinidir} {posargs}
passenv = LANG HOME
[pytest]
addopts = --log-file /tmp/salt-runtests.log --no-print-logs --ssh-tests -ra -sv
testpaths = tests
norecursedirs = tests/kitchen