Merge branch '2018.3' into test_git

This commit is contained in:
Daniel Wozniak 2018-07-06 12:11:53 -07:00 committed by GitHub
commit 5ca5e060b3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 6821 additions and 668 deletions

View file

@ -5,6 +5,14 @@ pipeline {
PATH = "$PYENV_ROOT/bin:$PATH"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'Testing docs...',
status: 'PENDING',
context: "jenkins/pr/docs"
}
}
stage('setup') {
steps {
sh 'eval "$(pyenv init -)"; pyenv install 2.7.14 || echo "We already have this python."; pyenv local 2.7.14; pyenv shell 2.7.14'
@ -14,16 +22,25 @@ pipeline {
stage('build') {
steps {
sh 'eval "$(pyenv init -)"; make -C doc clean html'
archiveArtifacts artifacts: 'doc/_build/html'
archiveArtifacts artifacts: 'doc/_build/html/'
}
}
}
post {
always {
cleanWs()
}
success {
githubNotify description: "The docs job has passed, artifacts have been saved", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'The docs job has passed',
status: 'SUCCESS',
context: "jenkins/pr/docs"
}
failure {
githubNotify description: "The docs job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'The docs job has failed',
status: 'FAILURE',
context: "jenkins/pr/docs"
}
}
}

View file

@ -9,6 +9,14 @@ pipeline {
TEST_PLATFORM = "centos-7"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
@ -29,7 +37,7 @@ pipeline {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
@ -38,11 +46,20 @@ pipeline {
}
}
post {
always {
cleanWs()
}
success {
githubNotify description: "The centos7-py2 job has passed", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
failure {
githubNotify description: "The centos7-py2 job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
}

View file

@ -9,6 +9,14 @@ pipeline {
TEST_PLATFORM = "centos-7"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
@ -29,7 +37,7 @@ pipeline {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
@ -38,11 +46,20 @@ pipeline {
}
}
post {
always {
cleanWs()
}
success {
githubNotify description: "The centos7-py3 job has passed", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
failure {
githubNotify description: "The centos7-py3 job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
}

View file

@ -9,6 +9,14 @@ pipeline {
TEST_PLATFORM = "ubuntu-1604"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
@ -29,7 +37,7 @@ pipeline {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
@ -38,11 +46,20 @@ pipeline {
}
}
post {
always {
cleanWs()
}
success {
githubNotify description: "The ubuntu-1604-py2 job has passed", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
failure {
githubNotify description: "The ubuntu-1604-py2 job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
}

View file

@ -9,6 +9,14 @@ pipeline {
TEST_PLATFORM = "ubuntu-1604"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: "running ${TEST_SUITE}-${TEST_PLATFORM}...",
status: 'PENDING',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
stage('setup') {
steps {
sh 'bundle install --with ec2 windows --without opennebula docker'
@ -29,7 +37,7 @@ pipeline {
always {
script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) {
sshagent(credentials: ['jenkins-testing-ssh-key']) {
sh 'ssh-add ~/.ssh/jenkins/jenkins-testing.pem'
sh 'ssh-add ~/.ssh/jenkins-testing.pem'
sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM'
}
}}
@ -38,11 +46,20 @@ pipeline {
}
}
post {
always {
cleanWs()
}
success {
githubNotify description: "The ubuntu-1604-py3 job has passed", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed",
status: 'SUCCESS',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
failure {
githubNotify description: "The ubuntu-1604-py3 job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed",
status: 'FAILURE',
context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}"
}
}
}

View file

@ -5,6 +5,14 @@ pipeline {
PATH = "$PYENV_ROOT/bin:$PATH"
}
stages {
stage('github-pending') {
steps {
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'Testing lint...',
status: 'PENDING',
context: "jenkins/pr/lint"
}
}
stage('setup') {
steps {
sh 'eval "$(pyenv init -)"; pyenv install 2.7.14 || echo "We already have this python."; pyenv local 2.7.14; pyenv shell 2.7.14'
@ -31,11 +39,28 @@ pipeline {
}
}
post {
always {
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report*.xml'
]],
unstableTotalAll: '999',
usePreviousBuildAsReference: true
])
cleanWs()
}
success {
githubNotify description: "The lint job has passed", status: "SUCCESS"
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'The lint job has passed',
status: 'SUCCESS',
context: "jenkins/pr/lint"
}
failure {
githubNotify description: "The lint job has failed", status: "FAILURE"
githubNotify credentialsId: 'test-jenkins-credentials',
description: 'The lint job has failed',
status: 'FAILURE',
context: "jenkins/pr/lint"
}
}
}

1
.github/CODEOWNERS vendored
View file

@ -48,6 +48,7 @@ salt/spm/* @saltstack/team-spm
# Team SSH
salt/cli/ssh.py @saltstack/team-ssh
salt/client/ssh/* @saltstack/team-ssh
salt/roster/* @saltstack/team-ssh
salt/runners/ssh.py @saltstack/team-ssh
salt/**/thin.py @saltstack/team-ssh

View file

@ -1,6 +1,6 @@
---
<% vagrant = system('gem list -i kitchen-vagrant 2>/dev/null >/dev/null') %>
<% version = '2017.7.4' %>
<% version = '2017.7.6' %>
<% platformsfile = ENV['SALT_KITCHEN_PLATFORMS'] || '.kitchen/platforms.yml' %>
<% driverfile = ENV['SALT_KITCHEN_DRIVER'] || '.kitchen/driver.yml' %>
<% verifierfile = ENV['SALT_KITCHEN_VERIFIER'] || '.kitchen/verifier.yml' %>
@ -31,7 +31,7 @@ provisioner:
salt_version: latest
salt_bootstrap_url: https://bootstrap.saltstack.com
salt_bootstrap_options: -X -p rsync stable <%= version %>
log_level: debug
log_level: info
sudo: true
require_chef: false
retry_on_exit_code:

View file

@ -251,7 +251,7 @@ project = 'Salt'
version = salt.version.__version__
latest_release = '2018.3.2' # latest release
previous_release = '2017.7.6' # latest release from previous branch
previous_release = '2017.7.7' # latest release from previous branch
previous_release_dir = '2017.7' # path on web server for previous branch
next_release = '' # next release
next_release_dir = '' # path on web server for next release branch

View file

@ -148,22 +148,23 @@ Why aren't my custom modules/states/etc. available on my Minions?
-----------------------------------------------------------------
Custom modules are synced to Minions when
:mod:`saltutil.sync_modules <salt.modules.saltutil.sync_modules>`,
or :mod:`saltutil.sync_all <salt.modules.saltutil.sync_all>` is run.
Custom modules are also synced by :mod:`state.apply` when run without
any arguments.
:py:func:`saltutil.sync_modules <salt.modules.saltutil.sync_modules>`,
or :py:func:`saltutil.sync_all <salt.modules.saltutil.sync_all>` is run.
Similarly, custom states are synced to Minions when :py:func:`saltutil.sync_states
<salt.modules.saltutil.sync_states>`, or :py:func:`saltutil.sync_all
<salt.modules.saltutil.sync_all>` is run.
Similarly, custom states are synced to Minions
when :mod:`state.apply <salt.modules.state.apply_>`,
:mod:`saltutil.sync_states <salt.modules.saltutil.sync_states>`, or
:mod:`saltutil.sync_all <salt.modules.saltutil.sync_all>` is run.
They are both also synced when a :ref:`highstate <running-highstate>` is
triggered.
Custom states are also synced by :mod:`state.apply<salt.modules.state.apply_>`
when run without any arguments.
As of the Fluorine release, as well as 2017.7.7 and 2018.3.2 in their
respective release cycles, the ``sync`` argument to :py:func:`state.apply
<salt.modules.state.apply_>`/:py:func:`state.sls <salt.modules.state.sls>` can
be used to sync custom types when running individual SLS files.
Other custom types (renderers, outputters, etc.) have similar behavior, see the
documentation for the :mod:`saltutil <salt.modules.saltutil>` module for more
documentation for the :py:func:`saltutil <salt.modules.saltutil>` module for more
information.
:ref:`This reactor example <minion-start-reactor>` can be used to automatically

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-API" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-API" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-api \- salt-api Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CALL" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-CALL" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-call \- salt-call Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CLOUD" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-CLOUD" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-cloud \- Salt Cloud Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-CP" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-CP" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-cp \- salt-cp Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-KEY" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-KEY" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-key \- salt-key Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MASTER" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-MASTER" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-master \- salt-master Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-MINION" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-MINION" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-minion \- salt-minion Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-PROXY" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-PROXY" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-proxy \- salt-proxy Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-RUN" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-RUN" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-run \- salt-run Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SSH" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-SSH" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-ssh \- salt-ssh Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-SYNDIC" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-SYNDIC" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-syndic \- salt-syndic Documentation
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT-UNITY" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT-UNITY" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt-unity \- salt-unity Command
.

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SALT" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SALT" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
salt \- salt
.

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
.\" Man page generated from reStructuredText.
.
.TH "SPM" "1" "May 09, 2018" "2018.3.1" "Salt"
.TH "SPM" "1" "Jun 14, 2018" "2018.3.2" "Salt"
.SH NAME
spm \- Salt Package Manager Command
.

View file

@ -886,6 +886,10 @@ Example:
encoding (usually a ``unicode`` type). This filter was incorrectly-named
when it was added. ``json_decode_list`` will be supported until the Neon
release.
.. deprecated:: 2018.3.3,Fluorine
The :jinja_ref:`tojson` filter accomplishes what this filter was designed
to do, making this filter redundant.
Recursively encodes all string elements of the list to bytes.
@ -915,6 +919,9 @@ Returns:
encoding (usually a ``unicode`` type). This filter was incorrectly-named
when it was added. ``json_decode_dict`` will be supported until the Neon
release.
.. deprecated:: 2018.3.3,Fluorine
The :jinja_ref:`tojson` filter accomplishes what this filter was designed
to do, making this filter redundant.
Recursively encodes all string items in the dictionary to bytes.
@ -934,6 +941,22 @@ Returns:
{'a': '\xd0\x94'}
.. jinja_ref:: tojson
``tojson``
----------
.. versionadded:: 2018.3.3,Fluorine
Dumps a data structure to JSON.
This filter was added to provide this functionality to hosts which have a
Jinja release older than version 2.9 installed. If Jinja 2.9 or newer is
installed, then the upstream version of the filter will be used. See the
`upstream docs`__ for more information.
.. __: http://jinja.pocoo.org/docs/2.10/templates/#tojson
.. jinja_ref:: random_hash
``random_hash``

View file

@ -1,27 +1,67 @@
========================================
In Progress: Salt 2017.7.7 Release Notes
========================================
===========================
Salt 2017.7.7 Release Notes
===========================
Version 2017.7.7 is an **unreleased** bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
This release is still in progress and has not been released yet.
Version 2017.7.7 is a bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
The ``2017.7.7`` release contains only a single fix for Issue `#48038`_, which
is a critical bug that occurs in a multi-syndic setup where the same job is run
multiple times on a minion.
The ``2017.7.7`` release contains only a small number of fixes, which are detailed
below.
This release fixes two critical issues.
The first is Issue `#48038`_, which is a critical bug that occurs in a multi-syndic
setup where the same job is run multiple times on a minion.
The second issue is `#48130`_. This bug appears in certain setups where the Master
reports a Minion time-out, even though the job is still running on the Minion.
Both of these issues have been fixed with this release.
Statistics
==========
- Total Merges: **1**
- Total Issue References: **1**
- Total PR References: **2**
- Total Merges: **5**
- Total Issue References: **2**
- Total PR References: **6**
- Contributors: **2** (`garethgreenaway`_, `rallytime`_)
- Contributors: **3** (`garethgreenaway`_, `gtmanfred`_, `rallytime`_)
Changelog for v2017.7.6..v2017.7.7
==================================
*Generated at: 2018-06-14 15:43:34 UTC*
*Generated at: 2018-06-17 19:26:52 UTC*
* **ISSUE** `#48130`_: (`rmarchei`_) Minion timeouts with 2018.3.1 (refs: `#48157`_)
* **PR** `#48157`_: (`gtmanfred`_) always listen when gathering job info
@ *2018-06-17 19:04:09 UTC*
* 8af4452134 Merge pull request `#48157`_ from gtmanfred/2017.7.7
* d8209e8a40 always listen when gathering job info
* **PR** `#48140`_: (`rallytime`_) Update man pages for 2017.7.7
@ *2018-06-14 21:22:43 UTC*
* b98c52ee51 Merge pull request `#48140`_ from rallytime/man-pages-2017.7.7
* 8893bf0d4c Update man pages for 2017.7.7
* **PR** `#48136`_: (`gtmanfred`_) [2017.7.7] bootstrap kitchen branch tests with 2017.7.6
@ *2018-06-14 21:20:16 UTC*
* baa0363336 Merge pull request `#48136`_ from gtmanfred/2017.7.7
* fce1c31146 bootstrap kitchen branch tests with 2017.7.6
* **PR** `#48134`_: (`rallytime`_) Add release notes file for 2017.7.7
@ *2018-06-14 16:31:34 UTC*
* b0ba08f4d9 Merge pull request `#48134`_ from rallytime/release-notes-2017.7.7
* 217005b8f1 Add missing `v` for tag reference
* d53569d1e3 Add release notes file for 2017.7.7
* **ISSUE** `#48038`_: (`austinpapp`_) jobs are not dedup'ing minion side (refs: `#48075`_)
@ -37,6 +77,13 @@ Changelog for v2017.7.6..v2017.7.7
.. _`#48038`: https://github.com/saltstack/salt/issues/48038
.. _`#48075`: https://github.com/saltstack/salt/pull/48075
.. _`#48098`: https://github.com/saltstack/salt/pull/48098
.. _`#48130`: https://github.com/saltstack/salt/issues/48130
.. _`#48134`: https://github.com/saltstack/salt/pull/48134
.. _`#48136`: https://github.com/saltstack/salt/pull/48136
.. _`#48140`: https://github.com/saltstack/salt/pull/48140
.. _`#48157`: https://github.com/saltstack/salt/pull/48157
.. _`austinpapp`: https://github.com/austinpapp
.. _`garethgreenaway`: https://github.com/garethgreenaway
.. _`gtmanfred`: https://github.com/gtmanfred
.. _`rallytime`: https://github.com/rallytime
.. _`rmarchei`: https://github.com/rmarchei

View file

@ -14,3 +14,20 @@ Improves timezone detection by using the pytz module.
Adds ``timezone.list`` to list supported timezones in either Windows or Unix
format.
New Jinja Filter
================
The :jinja_ref:`tojson` filter (from Jinja 2.9 and later) has been ported to
Salt, and will be used when this filter is not available. This allows older LTS
releases such as CentOS 7 and Ubuntu 14.04 to use this filter.
You should use this filter any time you wish to dump a list or dictionary into
an SLS file, to ensure that the result is able to be loaded by the YAML
renderer. For example:
.. code-block:: jinja
foo:
bar.baz:
- some_arg: {{ mydict|tojson }}

View file

@ -330,7 +330,13 @@ Nested pillar values can also be set via the command line:
.. code-block:: bash
salt '*' state.sls my_sls_file pillar='{"foo": {"bar": "baz"}}'
salt '*' state.sls my_sls_file pillar='{"foo": {"bar": "baz"}}'
Lists can be passed via command line pillar data as follows:
.. code-block:: bash
salt '*' state.sls my_sls_file pillar='{"some_list": ["foo", "bar", "baz"]}'
.. note::

View file

@ -139,13 +139,18 @@ where it is necessary to invoke the same function from a custom :ref:`outputter
<all-salt.output>`/returner, as well as an execution module.
Utility modules placed in ``salt://_utils/`` will be synced to the minions when
any of the following Salt functions are called:
a :ref:`highstate <running-highstate>` is run, as well as when any of the
following Salt functions are called:
* :mod:`state.apply <salt.modules.state.apply_>`
* :mod:`saltutil.sync_utils <salt.modules.saltutil.sync_utils>`
* :mod:`saltutil.sync_all <salt.modules.saltutil.sync_all>`
* :py:func:`saltutil.sync_utils <salt.modules.saltutil.sync_utils>`
* :py:func:`saltutil.sync_all <salt.modules.saltutil.sync_all>`
As of the Fluorine release, as well as 2017.7.7 and 2018.3.2 in their
respective release cycles, the ``sync`` argument to :py:func:`state.apply
<salt.modules.state.apply_>`/:py:func:`state.sls <salt.modules.state.sls>` can
be used to sync custom types when running individual SLS files.
To sync to the Master, use either of the following:
* :mod:`saltutil.sync_utils <salt.runners.saltutil.sync_utils>`
* :mod:`saltutil.sync_all <salt.runners.saltutil.sync_all>`
* :py:func:`saltutil.sync_utils <salt.runners.saltutil.sync_utils>`
* :py:func:`saltutil.sync_all <salt.runners.saltutil.sync_all>`

View file

@ -229,7 +229,7 @@ class LocalClient(object):
# Looks like the timeout is invalid, use config
return self.opts['timeout']
def gather_job_info(self, jid, tgt, tgt_type, **kwargs):
def gather_job_info(self, jid, tgt, tgt_type, listen=True, **kwargs):
'''
Return the information about a given job
'''
@ -241,6 +241,7 @@ class LocalClient(object):
arg=[jid],
tgt_type=tgt_type,
timeout=timeout,
listen=listen,
**kwargs
)

View file

@ -1245,7 +1245,10 @@ ARGS = {10}\n'''.format(self.minion_config,
shim_tmp_file.write(salt.utils.stringutils.to_bytes(cmd_str))
# Copy shim to target system, under $HOME/.<randomized name>
target_shim_file = '.{0}.{1}'.format(binascii.hexlify(os.urandom(6)), extension)
target_shim_file = '.{0}.{1}'.format(
binascii.hexlify(os.urandom(6)).decode('ascii'),
extension
)
if self.winrm:
target_shim_file = saltwinshell.get_target_shim_file(self, target_shim_file)
self.shell.send(shim_tmp_file.name, target_shim_file, makedirs=True)

View file

@ -206,8 +206,8 @@ def get_rsa_pub_key(path):
'''
log.debug('salt.crypt.get_rsa_pub_key: Loading public key')
if HAS_M2:
with salt.utils.files.fopen(path) as f:
data = f.read().replace(b'RSA ', '')
with salt.utils.files.fopen(path, 'rb') as f:
data = f.read().replace(b'RSA ', b'')
bio = BIO.MemoryBuffer(data)
key = RSA.load_pub_key_bio(bio)
else:

View file

@ -1597,7 +1597,7 @@ def os_data():
# my_init as pid1
grains['init'] = 'runit'
else:
log.info(
log.debug(
'Could not determine init system from command line: (%s)',
' '.join(init_cmdline)
)

View file

@ -359,13 +359,11 @@ class FileserverUpdate(salt.utils.process.SignalHandlingMultiprocessingProcess):
self.__init__(
state['opts'],
log_queue=state['log_queue'],
log_queue_level=state['log_queue_level']
)
def __getstate__(self):
return {'opts': self.opts,
'log_queue': self.log_queue,
'log_queue_level': self.log_queue_level
}
def fill_buckets(self):
@ -590,11 +588,19 @@ class Master(SMaster):
pass
if self.opts.get('git_pillar_verify_config', True):
git_pillars = [
x for x in self.opts.get('ext_pillar', [])
if 'git' in x
and not isinstance(x['git'], six.string_types)
]
try:
git_pillars = [
x for x in self.opts.get('ext_pillar', [])
if 'git' in x
and not isinstance(x['git'], six.string_types)
]
except TypeError:
git_pillars = []
critical_errors.append(
'Invalid ext_pillar configuration. It is likely that the '
'external pillar type was not specified for one or more '
'external pillars.'
)
if git_pillars:
try:
new_opts = copy.deepcopy(self.opts)

View file

@ -4463,27 +4463,6 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
if perms['lattrs']:
chattr(name, operator='remove', attributes=perms['lattrs'])
# Mode changes if needed
if mode is not None:
# File is a symlink, ignore the mode setting
# if follow_symlinks is False
if os.path.islink(name) and not follow_symlinks:
pass
else:
mode = salt.utils.files.normalize_mode(mode)
if mode != perms['lmode']:
if __opts__['test'] is True:
ret['changes']['mode'] = mode
else:
set_mode(name, mode)
if mode != salt.utils.files.normalize_mode(get_mode(name)):
ret['result'] = False
ret['comment'].append(
'Failed to change mode to {0}'.format(mode)
)
else:
ret['changes']['mode'] = mode
# user/group changes if needed, then check if it worked
if user:
if isinstance(user, int):
@ -4572,6 +4551,27 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
if perms.get('lattrs', ''):
chattr(name, operator='add', attributes=perms['lattrs'])
# Mode changes if needed
if mode is not None:
# File is a symlink, ignore the mode setting
# if follow_symlinks is False
if os.path.islink(name) and not follow_symlinks:
pass
else:
mode = salt.utils.files.normalize_mode(mode)
if mode != perms['lmode']:
if __opts__['test'] is True:
ret['changes']['mode'] = mode
else:
set_mode(name, mode)
if mode != salt.utils.files.normalize_mode(get_mode(name)):
ret['result'] = False
ret['comment'].append(
'Failed to change mode to {0}'.format(mode)
)
else:
ret['changes']['mode'] = mode
# Modify attributes of file if needed
if attrs is not None and not is_dir:
# File is a symlink, ignore the mode setting
@ -4722,6 +4722,11 @@ def check_managed_changes(
defaults,
skip_verify,
**kwargs)
# Ensure that user-provided hash string is lowercase
if source_sum and ('hsum' in source_sum):
source_sum['hsum'] = source_sum['hsum'].lower()
if comments:
__clean_tmp(sfn)
return False, comments

View file

@ -35,9 +35,9 @@ def __virtual__():
return __virtualname__
ini_regx = re.compile(r'^\s*\[(.+?)\]\s*$', flags=re.M)
com_regx = re.compile(r'^\s*(#|;)\s*(.*)')
indented_regx = re.compile(r'(\s+)(.*)')
INI_REGX = re.compile(r'^\s*\[(.+?)\]\s*$', flags=re.M)
COM_REGX = re.compile(r'^\s*(#|;)\s*(.*)')
INDENTED_REGX = re.compile(r'(\s+)(.*)')
def set_option(file_name, sections=None, separator='='):
@ -105,7 +105,13 @@ def get_option(file_name, section, option, separator='='):
salt '*' ini.get_option /path/to/ini section_name option_name
'''
inifile = _Ini.get_ini_file(file_name, separator=separator)
return inifile.get(section, {}).get(option, None)
if section:
try:
return inifile.get(section, {}).get(option, None)
except AttributeError:
return None
else:
return inifile.get(option, None)
def remove_option(file_name, section, option, separator='='):
@ -129,7 +135,10 @@ def remove_option(file_name, section, option, separator='='):
salt '*' ini.remove_option /path/to/ini section_name option_name
'''
inifile = _Ini.get_ini_file(file_name, separator=separator)
value = inifile.get(section, {}).pop(option, None)
if isinstance(inifile.get(section), (dict, OrderedDict)):
value = inifile.get(section, {}).pop(option, None)
else:
value = inifile.pop(option, None)
inifile.flush()
return value
@ -182,15 +191,53 @@ def remove_section(file_name, section, separator='='):
salt '*' ini.remove_section /path/to/ini section_name
'''
inifile = _Ini.get_ini_file(file_name, separator=separator)
if section in inifile:
section = inifile.pop(section)
inifile.flush()
ret = {}
for key, value in six.iteritems(section):
if key[0] != '#':
ret.update({key: value})
return ret
def get_ini(file_name, separator='='):
'''
Retrieve whole structure from an ini file and return it as dictionary.
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.get_ini',
[path_to_ini_file])
CLI Example:
.. code-block:: bash
salt '*' ini.get_ini /path/to/ini
'''
def ini_odict2dict(odict):
'''
Transform OrderedDict to regular dict recursively
:param odict: OrderedDict
:return: regular dict
'''
ret = {}
for key, val in six.iteritems(odict):
if key[0] != '#':
if isinstance(val, (dict, OrderedDict)):
ret.update({key: ini_odict2dict(val)})
else:
ret.update({key: val})
return ret
inifile = _Ini.get_ini_file(file_name, separator=separator)
section = inifile.pop(section, {})
inifile.flush()
ret = {}
for key, value in six.iteritems(section):
if key[0] != '#':
ret.update({key: value})
return ret
return ini_odict2dict(inifile)
class _Section(OrderedDict):
@ -221,7 +268,7 @@ class _Section(OrderedDict):
self.pop(opt)
for opt_str in inicontents.split(os.linesep):
# Match comments
com_match = com_regx.match(opt_str)
com_match = COM_REGX.match(opt_str)
if com_match:
name = '#comment{0}'.format(comment_count)
self.com = com_match.group(1)
@ -229,7 +276,7 @@ class _Section(OrderedDict):
self.update({name: opt_str})
continue
# Add indented lines to the value of the previous entry.
indented_match = indented_regx.match(opt_str)
indented_match = INDENTED_REGX.match(opt_str)
if indented_match:
indent = indented_match.group(1).replace('\t', ' ')
if indent > curr_indent:
@ -318,7 +365,7 @@ class _Section(OrderedDict):
sections_dict = OrderedDict()
for name, value in six.iteritems(self):
# Handle Comment Lines
if com_regx.match(name):
if COM_REGX.match(name):
yield '{0}{1}'.format(value, os.linesep)
# Handle Sections
elif isinstance(value, _Section):
@ -363,9 +410,6 @@ class _Section(OrderedDict):
class _Ini(_Section):
def __init__(self, name, inicontents='', separator='=', commenter='#'):
super(_Ini, self).__init__(name, inicontents, separator, commenter)
def refresh(self, inicontents=None):
if inicontents is None:
try:
@ -382,7 +426,7 @@ class _Ini(_Section):
# Remove anything left behind from a previous run.
self.clear()
inicontents = ini_regx.split(inicontents)
inicontents = INI_REGX.split(inicontents)
inicontents.reverse()
# Pop anything defined outside of a section (ie. at the top of
# the ini file).

View file

@ -2268,22 +2268,22 @@ def _change_state(cmd,
# as te command itself mess with double forks; we must not
# communicate with it, but just wait for the exit status
pkwargs = {'python_shell': False,
'redirect_stderr': True,
'with_communicate': with_communicate,
'use_vt': use_vt,
'stdin': stdin,
'stdout': stdout,
'stderr': stderr}
'stdout': stdout}
for i in [a for a in pkwargs]:
val = pkwargs[i]
if val is _marker:
pkwargs.pop(i, None)
error = __salt__['cmd.run_stderr'](cmd, **pkwargs)
_cmdout = __salt__['cmd.run_all'](cmd, **pkwargs)
if error:
if _cmdout['retcode'] != 0:
raise CommandExecutionError(
'Error changing state for container \'{0}\' using command '
'\'{1}\': {2}'.format(name, cmd, error)
'\'{1}\': {2}'.format(name, cmd, _cmdout['stdout'])
)
if expected is not None:
# some commands do not wait, so we will

View file

@ -25,13 +25,13 @@ def __virtual__():
'''
if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows():
return True
return (False, 'Module proxy: module only works on Windows or MacOS systems')
return False, 'Module proxy: module only works on Windows or MacOS systems'
def _get_proxy_osx(function, network_service):
def _get_proxy_osx(cmd_function, network_service):
ret = {}
out = __salt__['cmd.run']('networksetup -{0} {1}'.format(function, network_service))
out = __salt__['cmd.run']('networksetup -{0} {1}'.format(cmd_function, network_service))
match = re.match('Enabled: (.*)\nServer: (.*)\nPort: (.*)\n', out)
if match is not None:
g = match.groups()
@ -41,8 +41,8 @@ def _get_proxy_osx(function, network_service):
return ret
def _set_proxy_osx(function, server, port, user, password, network_service):
cmd = 'networksetup -{0} {1} {2} {3}'.format(function, network_service, server, port)
def _set_proxy_osx(cmd_function, server, port, user, password, network_service):
cmd = 'networksetup -{0} {1} {2} {3}'.format(cmd_function, network_service, server, port)
if user is not None and password is not None:
cmd = cmd + ' On {0} {1}'.format(user, password)
@ -58,12 +58,12 @@ def _get_proxy_windows(types=None):
if types is None:
types = ['http', 'https', 'ftp']
reg_val = __salt__['reg.read_value']('HKEY_CURRENT_USER',
r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyServer')
servers = reg_val['vdata']
servers = __salt__['reg.read_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyServer')['vdata']
if "=" in servers:
if servers and "=" in servers:
split = servers.split(";")
for s in split:
if len(s) == 0:
@ -87,16 +87,19 @@ def _get_proxy_windows(types=None):
del ret[key]
# Return enabled info
reg_val = __salt__['reg.read_value']('HKEY_CURRENT_USER',
r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyEnable')
enabled = reg_val.get('vdata', 0)
ret['enabled'] = True if enabled == 1 else False
ret['enabled'] = __salt__['reg.read_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyEnable')['vdata'] == 1
return ret
def _set_proxy_windows(server, port, types=None, bypass_hosts=None, import_winhttp=True):
def _set_proxy_windows(server,
port,
types=None,
bypass_hosts=None,
import_winhttp=True):
if types is None:
types = ['http', 'https', 'ftp']
@ -104,17 +107,27 @@ def _set_proxy_windows(server, port, types=None, bypass_hosts=None, import_winht
for t in types:
server_str += '{0}={1}:{2};'.format(t, server, port)
__salt__['reg.set_value']('HKEY_CURRENT_USER', r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyServer', server_str)
__salt__['reg.set_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyServer',
vdata=server_str)
__salt__['reg.set_value']('HKEY_CURRENT_USER', r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyEnable', 1, vtype='REG_DWORD')
__salt__['reg.set_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD')
if bypass_hosts is not None:
bypass_hosts_str = '<local>;{0}'.format(';'.join(bypass_hosts))
__salt__['reg.set_value']('HKEY_CURRENT_USER', r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyOverride', bypass_hosts_str)
__salt__['reg.set_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyOverride',
vdata=bypass_hosts_str)
if import_winhttp:
cmd = 'netsh winhttp import proxy source=ie'
@ -138,15 +151,22 @@ def get_http_proxy(network_service="Ethernet"):
salt '*' proxy.get_http_proxy Ethernet
'''
if __grains__['os'] == 'Windows':
return _get_proxy_windows(['http'])
return _get_proxy_windows(types=['http'])
return _get_proxy_osx("getwebproxy", network_service)
return _get_proxy_osx(cmd_function="getwebproxy",
network_service=network_service)
def set_http_proxy(server, port, user=None, password=None, network_service="Ethernet", bypass_hosts=None):
def set_http_proxy(server,
port,
user=None,
password=None,
network_service="Ethernet",
bypass_hosts=None):
'''
Sets the http proxy settings. Note: On Windows this will override any other proxy settings you have,
the preferred method of updating proxies on windows is using set_proxy.
Sets the http proxy settings. Note: On Windows this will override any other
proxy settings you have, the preferred method of updating proxies on windows
is using set_proxy.
server
The proxy server to use
@ -165,8 +185,8 @@ def set_http_proxy(server, port, user=None, password=None, network_service="Ethe
macOS
bypass_hosts
The hosts that are allowed to by pass the proxy. Only used on Windows for other OS's use
set_proxy_bypass to edit the bypass hosts.
The hosts that are allowed to by pass the proxy. Only used on Windows
for other OS's use set_proxy_bypass to edit the bypass hosts.
CLI Example:
@ -175,9 +195,17 @@ def set_http_proxy(server, port, user=None, password=None, network_service="Ethe
salt '*' proxy.set_http_proxy example.com 1080 user=proxy_user password=proxy_pass network_service=Ethernet
'''
if __grains__['os'] == 'Windows':
return _set_proxy_windows(server, port, ['http'], bypass_hosts)
return _set_proxy_windows(server=server,
port=port,
types=['http'],
bypass_hosts=bypass_hosts)
return _set_proxy_osx("setwebproxy", server, port, user, password, network_service)
return _set_proxy_osx(cmd_function="setwebproxy",
server=server,
port=port,
user=user,
password=password,
network_service=network_service)
def get_https_proxy(network_service="Ethernet"):
@ -195,15 +223,22 @@ def get_https_proxy(network_service="Ethernet"):
salt '*' proxy.get_https_proxy Ethernet
'''
if __grains__['os'] == 'Windows':
return _get_proxy_windows(['https'])
return _get_proxy_windows(types=['https'])
return _get_proxy_osx("getsecurewebproxy", network_service)
return _get_proxy_osx(cmd_function="getsecurewebproxy",
network_service=network_service)
def set_https_proxy(server, port, user=None, password=None, network_service="Ethernet", bypass_hosts=None):
def set_https_proxy(server,
port,
user=None,
password=None,
network_service="Ethernet",
bypass_hosts=None):
'''
Sets the https proxy settings. Note: On Windows this will override any other proxy settings you have,
the preferred method of updating proxies on windows is using set_proxy.
Sets the https proxy settings. Note: On Windows this will override any other
proxy settings you have, the preferred method of updating proxies on windows
is using set_proxy.
server
The proxy server to use
@ -222,8 +257,8 @@ def set_https_proxy(server, port, user=None, password=None, network_service="Eth
macOS
bypass_hosts
The hosts that are allowed to by pass the proxy. Only used on Windows for other OS's use
set_proxy_bypass to edit the bypass hosts.
The hosts that are allowed to by pass the proxy. Only used on Windows
for other OS's use set_proxy_bypass to edit the bypass hosts.
CLI Example:
@ -232,9 +267,17 @@ def set_https_proxy(server, port, user=None, password=None, network_service="Eth
salt '*' proxy.set_https_proxy example.com 1080 user=proxy_user password=proxy_pass network_service=Ethernet
'''
if __grains__['os'] == 'Windows':
return _set_proxy_windows(server, port, ['https'], bypass_hosts)
return _set_proxy_windows(server=server,
port=port,
types=['https'],
bypass_hosts=bypass_hosts)
return _set_proxy_osx("setsecurewebproxy", server, port, user, password, network_service)
return _set_proxy_osx(cmd_function="setsecurewebproxy",
server=server,
port=port,
user=user,
password=password,
network_service=network_service)
def get_ftp_proxy(network_service="Ethernet"):
@ -252,12 +295,18 @@ def get_ftp_proxy(network_service="Ethernet"):
salt '*' proxy.get_ftp_proxy Ethernet
'''
if __grains__['os'] == 'Windows':
return _get_proxy_windows(['ftp'])
return _get_proxy_windows(types=['ftp'])
return _get_proxy_osx("getftpproxy", network_service)
return _get_proxy_osx(cmd_function="getftpproxy",
network_service=network_service)
def set_ftp_proxy(server, port, user=None, password=None, network_service="Ethernet", bypass_hosts=None):
def set_ftp_proxy(server,
port,
user=None,
password=None,
network_service="Ethernet",
bypass_hosts=None):
'''
Sets the ftp proxy settings
@ -278,8 +327,8 @@ def set_ftp_proxy(server, port, user=None, password=None, network_service="Ether
macOS
bypass_hosts
The hosts that are allowed to by pass the proxy. Only used on Windows for other OS's use
set_proxy_bypass to edit the bypass hosts.
The hosts that are allowed to by pass the proxy. Only used on Windows
for other OS's use set_proxy_bypass to edit the bypass hosts.
CLI Example:
@ -288,9 +337,17 @@ def set_ftp_proxy(server, port, user=None, password=None, network_service="Ether
salt '*' proxy.set_ftp_proxy example.com 1080 user=proxy_user password=proxy_pass network_service=Ethernet
'''
if __grains__['os'] == 'Windows':
return _set_proxy_windows(server, port, ['ftp'], bypass_hosts)
return _set_proxy_windows(server=server,
port=port,
types=['ftp'],
bypass_hosts=bypass_hosts)
return _set_proxy_osx("setftpproxy", server, port, user, password, network_service)
return _set_proxy_osx(cmd_function="setftpproxy",
server=server,
port=port,
user=user,
password=password,
network_service=network_service)
def get_proxy_bypass(network_service="Ethernet"):
@ -309,12 +366,16 @@ def get_proxy_bypass(network_service="Ethernet"):
'''
if __grains__['os'] == 'Windows':
reg_val = __salt__['reg.read_value']('HKEY_CURRENT_USER',
r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
'ProxyOverride')
bypass_servers = reg_val['vdata'].replace("<local>", "").split(";")
reg_val = __salt__['reg.read_value'](
hive='HKEY_CURRENT_USER',
key=r'SOFTWARE\Microsoft\Windows\CurrentVersion\Internet Settings',
vname='ProxyOverride')['vdata']
return bypass_servers
# `reg.read_value` returns None if the key doesn't exist
if reg_val is None:
return []
return reg_val.replace('<local>', '').split(';')
out = __salt__['cmd.run']('networksetup -getproxybypassdomains {0}'.format(network_service))
@ -357,7 +418,12 @@ def set_proxy_win(server, port, types=None, bypass_hosts=None):
The password to use if required by the server
types
The types of proxy connections should be setup with this server. Valid types are http and https.
The types of proxy connections should be setup with this server. Valid
types are:
- ``http``
- ``https``
- ``ftp``
bypass_hosts
The hosts that are allowed to by pass the proxy.
@ -369,7 +435,10 @@ def set_proxy_win(server, port, types=None, bypass_hosts=None):
salt '*' proxy.set_http_proxy example.com 1080 types="['http', 'https']"
'''
if __grains__['os'] == 'Windows':
return _set_proxy_windows(server, port, types, bypass_hosts)
return _set_proxy_windows(server=server,
port=port,
types=types,
bypass_hosts=bypass_hosts)
def get_proxy_win():

View file

@ -225,6 +225,7 @@ def list_users(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_users', '-q'],
reset_system_locale=False,
runas=runas,
python_shell=False)
@ -248,6 +249,7 @@ def list_vhosts(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_vhosts', '-q'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -322,6 +324,7 @@ def add_user(name, password=None, runas=None):
res = __salt__['cmd.run_all'](
cmd,
reset_system_locale=False,
output_loglevel='quiet',
runas=runas,
python_shell=python_shell)
@ -354,6 +357,7 @@ def delete_user(name, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'delete_user', name],
reset_system_locale=False,
python_shell=False,
runas=runas)
msg = 'Deleted'
@ -389,6 +393,7 @@ def change_password(name, password, runas=None):
cmd = [RABBITMQCTL, 'change_password', name, password]
res = __salt__['cmd.run_all'](
cmd,
reset_system_locale=False,
runas=runas,
output_loglevel='quiet',
python_shell=python_shell)
@ -411,6 +416,7 @@ def clear_password(name, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'clear_password', name],
reset_system_locale=False,
runas=runas,
python_shell=False)
msg = 'Password Cleared'
@ -436,7 +442,7 @@ def check_password(name, password, runas=None):
runas = salt.utils.user.get_user()
try:
res = __salt__['cmd.run']([RABBITMQCTL, 'status'], runas=runas, python_shell=False)
res = __salt__['cmd.run']([RABBITMQCTL, 'status'], reset_system_locale=False, runas=runas, python_shell=False)
server_version = re.search(r'\{rabbit,"RabbitMQ","(.+)"\}', res)
if server_version is None:
@ -468,6 +474,7 @@ def check_password(name, password, runas=None):
res = __salt__['cmd.run_all'](
cmd,
reset_system_locale=False,
runas=runas,
output_loglevel='quiet',
python_shell=python_shell)
@ -483,6 +490,7 @@ def check_password(name, password, runas=None):
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'eval', cmd],
reset_system_locale=False,
runas=runas,
output_loglevel='quiet',
python_shell=False)
@ -511,6 +519,7 @@ def add_vhost(vhost, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'add_vhost', vhost],
reset_system_locale=False,
runas=runas,
python_shell=False)
@ -532,6 +541,7 @@ def delete_vhost(vhost, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'delete_vhost', vhost],
reset_system_locale=False,
runas=runas,
python_shell=False)
msg = 'Deleted'
@ -553,6 +563,7 @@ def set_permissions(vhost, user, conf='.*', write='.*', read='.*', runas=None):
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'set_permissions', '-p',
vhost, user, conf, write, read],
reset_system_locale=False,
runas=runas,
python_shell=False)
msg = 'Permissions Set'
@ -573,6 +584,7 @@ def list_permissions(vhost, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_permissions', '-q', '-p', vhost],
reset_system_locale=False,
runas=runas,
python_shell=False)
@ -593,6 +605,7 @@ def list_user_permissions(name, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_user_permissions', name, '-q'],
reset_system_locale=False,
runas=runas,
python_shell=False)
@ -616,6 +629,7 @@ def set_user_tags(name, tags, runas=None):
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'set_user_tags', name] + list(tags),
reset_system_locale=False,
runas=runas,
python_shell=False)
msg = "Tag(s) set"
@ -636,6 +650,7 @@ def status(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'status'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -656,6 +671,7 @@ def cluster_status(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'cluster_status'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -680,7 +696,7 @@ def join_cluster(host, user='rabbit', ram_node=None, runas=None):
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
stop_app(runas)
res = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
res = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
start_app(runas)
return _format_response(res, 'Join')
@ -700,6 +716,7 @@ def stop_app(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'stop_app'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -720,6 +737,7 @@ def start_app(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'start_app'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -740,6 +758,7 @@ def reset(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'reset'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -760,6 +779,7 @@ def force_reset(runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'force_reset'],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -780,7 +800,7 @@ def list_queues(runas=None, *args):
runas = salt.utils.user.get_user()
cmd = [RABBITMQCTL, 'list_queues', '-q']
cmd.extend(args)
res = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
res = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
_check_response(res)
return _output_to_dict(res['stdout'])
@ -802,7 +822,7 @@ def list_queues_vhost(vhost, runas=None, *args):
runas = salt.utils.user.get_user()
cmd = [RABBITMQCTL, 'list_queues', '-q', '-p', vhost]
cmd.extend(args)
res = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
res = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
_check_response(res)
return _output_to_dict(res['stdout'])
@ -825,6 +845,7 @@ def list_policies(vhost="/", runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'list_policies', '-q', '-p', vhost],
reset_system_locale=False,
runas=runas,
python_shell=False)
_check_response(res)
@ -902,7 +923,7 @@ def set_policy(vhost,
if apply_to:
cmd.extend(['--apply-to', apply_to])
cmd.extend([name, pattern, definition])
res = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
res = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
log.debug('Set policy: %s', res['stdout'])
return _format_response(res, 'Set')
@ -923,6 +944,7 @@ def delete_policy(vhost, name, runas=None):
runas = salt.utils.user.get_user()
res = __salt__['cmd.run_all'](
[RABBITMQCTL, 'clear_policy', '-p', vhost, name],
reset_system_locale=False,
runas=runas,
python_shell=False)
log.debug('Delete policy: %s', res['stdout'])
@ -960,7 +982,7 @@ def list_available_plugins(runas=None):
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
cmd = [_get_rabbitmq_plugin(), 'list', '-m']
ret = __salt__['cmd.run_all'](cmd, python_shell=False, runas=runas)
ret = __salt__['cmd.run_all'](cmd, reset_system_locale=False, python_shell=False, runas=runas)
_check_response(ret)
return _output_to_list(ret['stdout'])
@ -978,7 +1000,7 @@ def list_enabled_plugins(runas=None):
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
cmd = [_get_rabbitmq_plugin(), 'list', '-m', '-e']
ret = __salt__['cmd.run_all'](cmd, python_shell=False, runas=runas)
ret = __salt__['cmd.run_all'](cmd, reset_system_locale=False, python_shell=False, runas=runas)
_check_response(ret)
return _output_to_list(ret['stdout'])
@ -1011,7 +1033,7 @@ def enable_plugin(name, runas=None):
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
cmd = [_get_rabbitmq_plugin(), 'enable', name]
ret = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
ret = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
return _format_response(ret, 'Enabled')
@ -1028,5 +1050,5 @@ def disable_plugin(name, runas=None):
if runas is None and not salt.utils.platform.is_windows():
runas = salt.utils.user.get_user()
cmd = [_get_rabbitmq_plugin(), 'disable', name]
ret = __salt__['cmd.run_all'](cmd, runas=runas, python_shell=False)
ret = __salt__['cmd.run_all'](cmd, reset_system_locale=False, runas=runas, python_shell=False)
return _format_response(ret, 'Disabled')

View file

@ -37,6 +37,7 @@ def __virtual__():
'Devuan',
'Arch',
'Arch ARM',
'Manjaro',
'ALT',
'SUSE Enterprise Server',
'SUSE',

View file

@ -602,8 +602,7 @@ def template_str(tem, queue=False, **kwargs):
return ret
def apply_(mods=None,
**kwargs):
def apply_(mods=None, **kwargs):
'''
.. versionadded:: 2015.5.0
@ -743,6 +742,22 @@ def apply_(mods=None,
.. code-block:: bash
salt '*' state.apply test localconfig=/path/to/minion.yml
sync_mods
If specified, the desired custom module types will be synced prior to
running the SLS files:
.. code-block:: bash
salt '*' state.apply test sync_mods=states,modules
salt '*' state.apply test sync_mods=all
.. note::
This option is ignored when no SLS files are specified, as a
:ref:`highstate <running-highstate>` automatically syncs all custom
module types.
.. versionadded:: 2017.7.8,2018.3.3,Fluorine
'''
if mods:
return sls(mods, **kwargs)
@ -1068,7 +1083,7 @@ def highstate(test=None, queue=False, **kwargs):
return ret
def sls(mods, test=None, exclude=None, queue=False, **kwargs):
def sls(mods, test=None, exclude=None, queue=False, sync_mods=None, **kwargs):
'''
Execute the states in one or more SLS files
@ -1160,6 +1175,17 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
.. versionadded:: 2015.8.4
sync_mods
If specified, the desired custom module types will be synced prior to
running the SLS files:
.. code-block:: bash
salt '*' state.sls test sync_mods=states,modules
salt '*' state.sls test sync_mods=all
.. versionadded:: 2017.7.8,2018.3.3,Fluorine
CLI Example:
.. code-block:: bash
@ -1223,6 +1249,28 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
'{0}.cache.p'.format(kwargs.get('cache_name', 'highstate'))
)
if sync_mods is True:
sync_mods = ['all']
if sync_mods is not None:
sync_mods = salt.utils.args.split_input(sync_mods)
else:
sync_mods = []
if 'all' in sync_mods and sync_mods != ['all']:
# Prevent unnecessary extra syncing
sync_mods = ['all']
for module_type in sync_mods:
try:
__salt__['saltutil.sync_{0}'.format(module_type)](
saltenv=opts['saltenv']
)
except KeyError:
log.warning(
'Invalid custom module type \'%s\', ignoring',
module_type
)
try:
st_ = salt.state.HighState(opts,
pillar_override,

View file

@ -705,7 +705,7 @@ def modify(name,
win32service.SERVICE_QUERY_CONFIG)
except pywintypes.error as exc:
raise CommandExecutionError(
'Failed To Open {0}: {1}'.format(name, exc[2]))
'Failed To Open {0}: {1}'.format(name, exc))
config_info = win32service.QueryServiceConfig(handle_svc)

View file

@ -933,9 +933,11 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
# Generate jid before triggering a job to subscribe all returns from minions
chunk['jid'] = salt.utils.jid.gen_jid(self.application.opts)
# Subscribe returns from minions before firing a job
minions = set(self.ckminions.check_minions(chunk['tgt'], chunk.get('tgt_type', 'glob')))
future_minion_map = self.subscribe_minion_returns(chunk['jid'], minions)
# start listening for the event before we fire the job to avoid races
events = [
self.application.event_listener.get_event(self, tag='salt/job/'+chunk['jid']),
self.application.event_listener.get_event(self, tag='syndic/job/'+chunk['jid']),
]
f_call = self._format_call_run_job_async(chunk)
# fire a job off
@ -944,88 +946,92 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
# if the job didn't publish, lets not wait around for nothing
# TODO: set header??
if 'jid' not in pub_data:
for future in future_minion_map:
for future in events:
try:
future.set_result(None)
except Exception:
pass
raise tornado.gen.Return('No minions matched the target. No command was sent, no jid was assigned.')
# Map of minion_id -> returned for all minions we think we need to wait on
minions = {m: False for m in pub_data['minions']}
# minimum time required for return to complete. By default no waiting, if
# we are a syndic then we must wait syndic_wait at a minimum
min_wait_time = Future()
min_wait_time.set_result(True)
# wait syndic a while to avoid missing published events
if self.application.opts['order_masters']:
yield tornado.gen.sleep(self.application.opts['syndic_wait'])
min_wait_time = tornado.gen.sleep(self.application.opts['syndic_wait'])
# To ensure job_not_running and all_return are terminated by each other, communicate using a future
is_finished = Future()
is_finished = tornado.gen.sleep(self.application.opts['gather_job_timeout'])
job_not_running_future = self.job_not_running(pub_data['jid'],
# ping until the job is not running, while doing so, if we see new minions returning
# that they are running the job, add them to the list
tornado.ioloop.IOLoop.current().spawn_callback(self.job_not_running, pub_data['jid'],
chunk['tgt'],
f_call['kwargs']['tgt_type'],
minions,
is_finished)
minion_returns_future = self.sanitize_minion_returns(future_minion_map, pub_data['minions'], is_finished)
yield job_not_running_future
raise tornado.gen.Return((yield minion_returns_future))
def subscribe_minion_returns(self, jid, minions):
# Subscribe each minion event
future_minion_map = {}
for minion in minions:
tag = tagify([jid, 'ret', minion], 'job')
minion_future = self.application.event_listener.get_event(self,
tag=tag,
matcher=EventListener.exact_matcher)
future_minion_map[minion_future] = minion
return future_minion_map
@tornado.gen.coroutine
def sanitize_minion_returns(self, future_minion_map, minions, is_finished):
'''
Return a future which will complete once all returns are completed
(according to minions), or one of the passed in "finish_chunk_ret_future" completes
'''
if minions is None:
minions = []
# Remove redundant minions
redundant_minion_futures = [future for future in future_minion_map.keys() if future_minion_map[future] not in minions]
for redundant_minion_future in redundant_minion_futures:
try:
redundant_minion_future.set_result(None)
except Exception:
pass
del future_minion_map[redundant_minion_future]
def more_todo():
'''Check if there are any more minions we are waiting on returns from
'''
return any(x is False for x in six.itervalues(minions))
# here we want to follow the behavior of LocalClient.get_iter_returns
# namely we want to wait at least syndic_wait (assuming we are a syndic)
# and that there are no more jobs running on minions. We are allowed to exit
# early if gather_job_timeout has been exceeded
chunk_ret = {}
while True:
f = yield Any(list(future_minion_map.keys()) + [is_finished])
to_wait = events+[is_finished]
if not min_wait_time.done():
to_wait += [min_wait_time]
def cancel_inflight_futures():
for event in to_wait:
if not event.done():
event.set_result(None)
f = yield Any(to_wait)
try:
# When finished entire routine, cleanup other futures and return result
if f is is_finished:
for event in future_minion_map.keys():
if not event.done():
event.set_result(None)
cancel_inflight_futures()
raise tornado.gen.Return(chunk_ret)
elif f is min_wait_time:
if not more_todo():
cancel_inflight_futures()
raise tornado.gen.Return(chunk_ret)
continue
f_result = f.result()
chunk_ret[f_result['data']['id']] = f_result['data']['return']
# if this is a start, then we need to add it to the pile
if f_result['tag'].endswith('/new'):
for minion_id in f_result['data']['minions']:
if minion_id not in minions:
minions[minion_id] = False
else:
chunk_ret[f_result['data']['id']] = f_result['data']['return']
# clear finished event future
minions[f_result['data']['id']] = True
# if there are no more minions to wait for, then we are done
if not more_todo() and min_wait_time.done():
cancel_inflight_futures()
raise tornado.gen.Return(chunk_ret)
except TimeoutException:
pass
# clear finished event future
try:
minions.remove(future_minion_map[f])
del future_minion_map[f]
except ValueError:
pass
if not minions:
if not is_finished.done():
is_finished.set_result(True)
raise tornado.gen.Return(chunk_ret)
if f == events[0]:
events[0] = self.application.event_listener.get_event(self, tag='salt/job/'+chunk['jid'])
else:
events[1] = self.application.event_listener.get_event(self, tag='syndic/job/'+chunk['jid'])
@tornado.gen.coroutine
def job_not_running(self, jid, tgt, tgt_type, is_finished):
def job_not_running(self, jid, tgt, tgt_type, minions, is_finished):
'''
Return a future which will complete once jid (passed in) is no longer
running on tgt
@ -1051,8 +1057,6 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
event = f.result()
except TimeoutException:
if not minion_running:
if not is_finished.done():
is_finished.set_result(True)
raise tornado.gen.Return(True)
else:
ping_pub_data = yield self.saltclients['local'](tgt,
@ -1066,6 +1070,8 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
# Minions can return, we want to see if the job is running...
if event['data'].get('return', {}) == {}:
continue
if event['data']['id'] not in minions:
minions[event['data']['id']] = False
minion_running = True
@tornado.gen.coroutine

View file

@ -328,7 +328,7 @@ def chconfig(cmd, *args, **kwargs):
'''
# Strip the __pub_ keys...is there a better way to do this?
for k in kwargs:
for k in list(kwargs):
if k.startswith('__pub_'):
kwargs.pop(k)

View file

@ -164,7 +164,7 @@ def action(func=None,
instances,
provider,
instance,
**salt.utils.args.clean_kwargs(**kwargs)
salt.utils.args.clean_kwargs(**kwargs)
)
except SaltCloudConfigError as err:
log.error(err)

View file

@ -69,7 +69,7 @@ def set_(key, value, service=None, profile=None): # pylint: disable=W0613
'''
key, profile = _parse_key(key, profile)
cache = salt.cache.Cache(__opts__)
cache.set(profile['bank'], key=key, value=value)
cache.store(profile['bank'], key, value)
return get(key, service, profile)

View file

@ -83,6 +83,8 @@ def _checksum_file_path(path):
drive.rstrip(':'),
path.lstrip('/\\'),
)
elif str(exc).startswith('Cannot mix UNC'):
relpath = salt.utils.path_join('unc', path)
else:
raise
ret = salt.utils.path.join(__opts__['cachedir'], 'archive_hash', relpath)

View file

@ -1422,19 +1422,25 @@ def symlink(
preflight_errors = []
if salt.utils.platform.is_windows():
# Make sure the passed owner exists
if not salt.utils.win_functions.get_sid_from_name(win_owner):
try:
salt.utils.win_functions.get_sid_from_name(win_owner)
except CommandExecutionError as exc:
preflight_errors.append('User {0} does not exist'.format(win_owner))
# Make sure users passed in win_perms exist
if win_perms:
for name_check in win_perms:
if not salt.utils.win_functions.get_sid_from_name(name_check):
try:
salt.utils.win_functions.get_sid_from_name(name_check)
except CommandExecutionError as exc:
preflight_errors.append('User {0} does not exist'.format(name_check))
# Make sure users passed in win_deny_perms exist
if win_deny_perms:
for name_check in win_deny_perms:
if not salt.utils.win_functions.get_sid_from_name(name_check):
try:
salt.utils.win_functions.get_sid_from_name(name_check)
except CommandExecutionError as exc:
preflight_errors.append('User {0} does not exist'.format(name_check))
else:
uid = __salt__['file.user_to_uid'](user)

View file

@ -15,6 +15,7 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import Salt libs
from salt.ext import six
from salt.utils.odict import OrderedDict
__virtualname__ = 'ini'
@ -53,46 +54,79 @@ def options_present(name, sections=None, separator='=', strict=False):
'comment': 'No anomaly detected'
}
if __opts__['test']:
ret['result'] = True
ret['comment'] = ''
for section in sections or {}:
section_name = ' in section ' + section if section != 'DEFAULT_IMPLICIT' else ''
try:
cur_section = __salt__['ini.get_section'](name, section, separator)
except IOError as err:
ret['comment'] = "{0}".format(err)
ret['result'] = False
return ret
for key in sections[section]:
cur_value = cur_section.get(key)
if cur_value == six.text_type(sections[section][key]):
ret['comment'] += 'Key {0}{1} unchanged.\n'.format(key, section_name)
continue
ret['comment'] += 'Changed key {0}{1}.\n'.format(key, section_name)
ret['result'] = None
if ret['comment'] == '':
ret['comment'] = 'No changes detected.'
return ret
# pylint: disable=too-many-nested-blocks
try:
changes = {}
if sections:
for section_name, section_body in sections.items():
options = {}
for sname, sbody in sections.items():
if not isinstance(sbody, (dict, OrderedDict)):
options.update({sname: sbody})
cur_ini = __salt__['ini.get_ini'](name, separator)
original_top_level_opts = {}
original_sections = {}
for key, val in cur_ini.items():
if isinstance(val, (dict, OrderedDict)):
original_sections.update({key: val})
else:
original_top_level_opts.update({key: val})
if __opts__['test']:
for option in options:
if option in original_top_level_opts:
if six.text_type(original_top_level_opts[option]) == six.text_type(options[option]):
ret['comment'] += 'Unchanged key {0}.\n'.format(option)
else:
ret['comment'] += 'Changed key {0}.\n'.format(option)
ret['result'] = None
else:
ret['comment'] += 'Changed key {0}.\n'.format(option)
ret['result'] = None
else:
options_updated = __salt__['ini.set_option'](name, options, separator)
changes.update(options_updated)
if strict:
for opt_to_remove in set(original_top_level_opts).difference(options):
if __opts__['test']:
ret['comment'] += 'Removed key {0}.\n'.format(opt_to_remove)
ret['result'] = None
else:
__salt__['ini.remove_option'](name, None, opt_to_remove, separator)
changes.update({opt_to_remove: {'before': original_top_level_opts[opt_to_remove],
'after': None}})
for section_name, section_body in [(sname, sbody) for sname, sbody in sections.items()
if isinstance(sbody, (dict, OrderedDict))]:
section_descr = ' in section ' + section_name if section_name else ''
changes[section_name] = {}
if strict:
original = __salt__['ini.get_section'](name, section_name, separator)
original = cur_ini.get(section_name, {})
for key_to_remove in set(original.keys()).difference(section_body.keys()):
orig_value = __salt__['ini.get_option'](name, section_name, key_to_remove, separator)
__salt__['ini.remove_option'](name, section_name, key_to_remove, separator)
changes[section_name].update({key_to_remove: ''})
changes[section_name].update({key_to_remove: {'before': orig_value,
'after': None}})
options_updated = __salt__['ini.set_option'](name, {section_name: section_body}, separator)
if options_updated:
changes[section_name].update(options_updated[section_name])
if not changes[section_name]:
del changes[section_name]
orig_value = original_sections.get(section_name, {}).get(key_to_remove, '#-#-')
if __opts__['test']:
ret['comment'] += 'Deleted key {0}{1}.\n'.format(key_to_remove, section_descr)
ret['result'] = None
else:
__salt__['ini.remove_option'](name, section_name, key_to_remove, separator)
changes[section_name].update({key_to_remove: ''})
changes[section_name].update({key_to_remove: {'before': orig_value,
'after': None}})
if __opts__['test']:
for option in section_body:
if six.text_type(section_body[option]) == \
six.text_type(original_sections.get(section_name, {}).get(option, '#-#-')):
ret['comment'] += 'Unchanged key {0}{1}.\n'.format(option, section_descr)
else:
ret['comment'] += 'Changed key {0}{1}.\n'.format(option, section_descr)
ret['result'] = None
else:
options_updated = __salt__['ini.set_option'](name, {section_name: section_body}, separator)
if options_updated:
changes[section_name].update(options_updated[section_name])
if not changes[section_name]:
del changes[section_name]
else:
changes = __salt__['ini.set_option'](name, sections, separator)
if not __opts__['test']:
changes = __salt__['ini.set_option'](name, sections, separator)
except (IOError, KeyError) as err:
ret['comment'] = "{0}".format(err)
ret['result'] = False
@ -102,10 +136,10 @@ def options_present(name, sections=None, separator='=', strict=False):
ret['comment'] = 'Errors encountered. {0}'.format(changes['error'])
ret['changes'] = {}
else:
for name, body in changes.items():
for ciname, body in changes.items():
if body:
ret['comment'] = 'Changes take effect'
ret['changes'].update({name: changes[name]})
ret['changes'].update({ciname: changes[ciname]})
return ret
@ -137,20 +171,31 @@ def options_absent(name, sections=None, separator='='):
ret['result'] = True
ret['comment'] = ''
for section in sections or {}:
section_name = ' in section ' + section if section != 'DEFAULT_IMPLICIT' else ''
section_name = ' in section ' + section if section else ''
try:
cur_section = __salt__['ini.get_section'](name, section, separator)
except IOError as err:
ret['comment'] = "{0}".format(err)
ret['result'] = False
return ret
for key in sections[section]:
cur_value = cur_section.get(key)
if not cur_value:
ret['comment'] += 'Key {0}{1} does not exist.\n'.format(key, section_name)
except AttributeError:
cur_section = section
if isinstance(sections[section], (dict, OrderedDict)):
for key in sections[section]:
cur_value = cur_section.get(key)
if not cur_value:
ret['comment'] += 'Key {0}{1} does not exist.\n'.format(key, section_name)
continue
ret['comment'] += 'Deleted key {0}{1}.\n'.format(key, section_name)
ret['result'] = None
else:
option = section
if not __salt__['ini.get_option'](name, None, option, separator):
ret['comment'] += 'Key {0} does not exist.\n'.format(option)
continue
ret['comment'] += 'Deleted key {0}{1}.\n'.format(key, section_name)
ret['comment'] += 'Deleted key {0}.\n'.format(option)
ret['result'] = None
if ret['comment'] == '':
ret['comment'] = 'No changes detected.'
return ret
@ -168,6 +213,9 @@ def options_absent(name, sections=None, separator='='):
if section not in ret['changes']:
ret['changes'].update({section: {}})
ret['changes'][section].update({key: current_value})
if not isinstance(sections[section], (dict, OrderedDict)):
ret['changes'].update({section: current_value})
# break
ret['comment'] = 'Changes take effect'
return ret
@ -197,18 +245,16 @@ def sections_present(name, sections=None, separator='='):
if __opts__['test']:
ret['result'] = True
ret['comment'] = ''
try:
cur_ini = __salt__['ini.get_ini'](name, separator)
except IOError as err:
ret['result'] = False
ret['comment'] = "{0}".format(err)
return ret
for section in sections or {}:
try:
cur_section = __salt__['ini.get_section'](name, section, separator)
except IOError as err:
ret['result'] = False
ret['comment'] = "{0}".format(err)
return ret
if dict(sections[section]) == cur_section:
if section in cur_ini:
ret['comment'] += 'Section unchanged {0}.\n'.format(section)
continue
elif cur_section:
ret['comment'] += 'Changed existing section {0}.\n'.format(section)
else:
ret['comment'] += 'Created new section {0}.\n'.format(section)
ret['result'] = None
@ -255,14 +301,14 @@ def sections_absent(name, sections=None, separator='='):
if __opts__['test']:
ret['result'] = True
ret['comment'] = ''
try:
cur_ini = __salt__['ini.get_ini'](name, separator)
except IOError as err:
ret['result'] = False
ret['comment'] = "{0}".format(err)
return ret
for section in sections or []:
try:
cur_section = __salt__['ini.get_section'](name, section, separator)
except IOError as err:
ret['result'] = False
ret['comment'] = "{0}".format(err)
return ret
if not cur_section:
if section not in cur_ini:
ret['comment'] += 'Section {0} does not exist.\n'.format(section)
continue
ret['comment'] += 'Deleted section {0}.\n'.format(section)

View file

@ -255,7 +255,12 @@ def _disable(name, started, result=True, **kwargs):
return ret
# Service can be disabled
before_toggle_disable_status = __salt__['service.disabled'](name)
if salt.utils.platform.is_windows():
# service.disabled in Windows returns True for services that are set to
# Manual start, so we need to check specifically for Disabled
before_toggle_disable_status = __salt__['service.info'](name)['StartType'] in ['Disabled']
else:
before_toggle_disable_status = __salt__['service.disabled'](name)
if before_toggle_disable_status:
# Service is disabled
if started is True:
@ -556,7 +561,12 @@ def dead(name,
# command, so it is just an indicator but can not be fully trusted
before_toggle_status = __salt__['service.status'](name, sig)
if 'service.enabled' in __salt__:
before_toggle_enable_status = __salt__['service.enabled'](name)
if salt.utils.platform.is_windows():
# service.enabled in Windows returns True for services that are set
# to Auto start, but services set to Manual can also be disabled
before_toggle_enable_status = __salt__['service.info'](name)['StartType'] in ['Auto', 'Manual']
else:
before_toggle_enable_status = __salt__['service.enabled'](name)
else:
before_toggle_enable_status = True

View file

@ -445,7 +445,26 @@ def present(name,
# hash_password is True, then hash it.
if password and hash_password:
log.debug('Hashing a clear text password')
password = __salt__['shadow.gen_password'](password)
# in case a password is already set, it will contain a Salt
# which should be re-used to generate the new hash, other-
# wise the Salt will be generated randomly, causing the
# hash to change each time and thereby making the
# user.present state non-idempotent.
algorithms = {
'1': 'md5',
'2a': 'blowfish',
'5': 'sha256',
'6': 'sha512',
}
try:
_, algo, shadow_salt, shadow_hash = __salt__['shadow.info'](name)['passwd'].split('$', 4)
if algo == '1':
log.warning('Using MD5 for hashing passwords is considered insecure!')
log.debug('Re-using existing shadow salt for hashing password using {}'.format(algorithms.get(algo)))
password = __salt__['shadow.gen_password'](password, crypt_salt=shadow_salt, algorithm=algorithms.get(algo))
except ValueError:
log.info('No existing shadow salt found, defaulting to a randomly generated new one')
password = __salt__['shadow.gen_password'](password)
if fullname is not None:
fullname = sdecode(fullname)

View file

@ -274,7 +274,7 @@ def encode(data, encoding=None, errors='strict', keep=False,
@jinja_filter('json_decode_dict') # Remove this for Neon
@jinja_filter('json_encode_dict')
@jinja_filter('json_encode_dict') # Remove this for Neon
def encode_dict(data, encoding=None, errors='strict', keep=False,
preserve_dict_class=False, preserve_tuples=False):
'''
@ -327,7 +327,7 @@ def encode_dict(data, encoding=None, errors='strict', keep=False,
@jinja_filter('json_decode_list') # Remove this for Neon
@jinja_filter('json_encode_list')
@jinja_filter('json_encode_list') # Remove this for Neon
def encode_list(data, encoding=None, errors='strict', keep=False,
preserve_dict_class=False, preserve_tuples=False):
'''

View file

@ -278,6 +278,26 @@ def to_bool(val):
return False
@jinja_filter('tojson')
def tojson(val, indent=None):
'''
Implementation of tojson filter (only present in Jinja 2.9 and later). If
Jinja 2.9 or later is installed, then the upstream version of this filter
will be used.
'''
options = {'ensure_ascii': True}
if indent is not None:
options['indent'] = indent
return (
salt.utils.json.dumps(
val, **options
).replace('<', '\\u003c')
.replace('>', '\\u003e')
.replace('&', '\\u0026')
.replace("'", '\\u0027')
)
@jinja_filter('quote')
def quote(txt):
'''

View file

@ -23,6 +23,7 @@ import salt.utils.hashutils
import salt.utils.xmlutil as xml
from salt._compat import ElementTree as ET
from salt.exceptions import CommandExecutionError
from salt.ext.six.moves.urllib.parse import quote as _quote # pylint: disable=import-error,no-name-in-module
from salt.ext import six
log = logging.getLogger(__name__)
@ -117,6 +118,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
location = salt.utils.aws.get_location()
data = ''
fh = None
payload_hash = None
if method == 'PUT':
if local_file:
@ -124,6 +126,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
if path is None:
path = ''
path = _quote(path)
if not requesturl:
requesturl = (('https' if https_enable else 'http')+'://{0}/{1}').format(endpoint, path)
@ -152,7 +155,8 @@ def query(key, keyid, method='GET', params=None, headers=None,
try:
if method == 'PUT':
if local_file:
data = salt.utils.files.fopen(local_file, 'r') # pylint: disable=resource-leakage
fh = salt.utils.files.fopen(local_file, 'rb') # pylint: disable=resource-leakage
data = fh.read() # pylint: disable=resource-leakage
result = requests.request(method,
requesturl,
headers=headers,
@ -173,8 +177,8 @@ def query(key, keyid, method='GET', params=None, headers=None,
data=data,
verify=verify_ssl)
finally:
if data is not None:
data.close()
if fh is not None:
fh.close()
err_code = None
err_msg = None

View file

@ -358,8 +358,12 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
jinja_env = jinja2.Environment(undefined=jinja2.StrictUndefined,
**env_args)
tojson_filter = jinja_env.filters.get('tojson')
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
if tojson_filter is not None:
# Use the existing tojson filter, if present (jinja2 >= 2.9)
jinja_env.filters['tojson'] = tojson_filter
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
# globals

View file

@ -13,6 +13,7 @@ import tarfile
import zipfile
import tempfile
import subprocess
import concurrent
# Import third party libs
import jinja2
@ -110,6 +111,8 @@ def get_tops(extra_mods='', so_mods=''):
os.path.dirname(msgpack.__file__),
]
if _six.PY2:
tops.append(os.path.dirname(concurrent.__file__))
tops.append(_six.__file__.replace('.pyc', '.py'))
tops.append(backports_abc.__file__.replace('.pyc', '.py'))

View file

@ -142,7 +142,7 @@ def accept_dict(match, include_rejected=False, include_denied=False):
.. code-block:: python
>>> wheel.cmd('accept_dict',
>>> wheel.cmd('key.accept_dict',
{
'minions_pre': [
'jerry',

View file

@ -0,0 +1 @@
Hello, World!

View file

@ -264,6 +264,10 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
'tgt': '*',
'fun': 'test.ping',
}
self.application.opts['order_masters'] = True
self.application.opts['syndic_wait'] = 5
response = self.fetch('/',
method='POST',
body=salt.utils.json.dumps(low),

View file

@ -656,6 +656,57 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertIn(
'does not exist', ret['comment'])
def test_managed_source_hash_indifferent_case(self):
'''
Test passing a source_hash as an uppercase hash.
This is a regression test for Issue #38914 and Issue #48230 (test=true use).
'''
name = os.path.join(TMP, 'source_hash_indifferent_case')
state_name = 'file_|-/tmp/salt-tests-tmpdir/source_hash_indifferent_case_|' \
'-/tmp/salt-tests-tmpdir/source_hash_indifferent_case_|-managed'
local_path = os.path.join(FILES, 'file', 'base', 'hello_world.txt')
actual_hash = 'c98c24b677eff44860afea6f493bbaec5bb1c4cbb209c6fc2bbb47f66ff2ad31'
uppercase_hash = actual_hash.upper()
try:
# Lay down tmp file to test against
self.run_state(
'file.managed',
name=name,
source=local_path,
source_hash=actual_hash
)
# Test uppercase source_hash: should return True with no changes
ret = self.run_state(
'file.managed',
name=name,
source=local_path,
source_hash=uppercase_hash
)
assert ret[state_name]['result'] is True
assert ret[state_name]['pchanges'] == {}
assert ret[state_name]['changes'] == {}
# Test uppercase source_hash using test=true
# Should return True with no changes
ret = self.run_state(
'file.managed',
name=name,
source=local_path,
source_hash=uppercase_hash,
test=True
)
assert ret[state_name]['result'] is True
assert ret[state_name]['pchanges'] == {}
assert ret[state_name]['changes'] == {}
finally:
# Clean Up File
if os.path.exists(name):
os.remove(name)
def test_directory(self):
'''
file.directory
@ -2271,6 +2322,38 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
except OSError:
pass
@skip_if_not_root
@skipIf(not HAS_PWD, "pwd not available. Skipping test")
@skipIf(not HAS_GRP, "grp not available. Skipping test")
@with_system_user_and_group('user12209', 'group12209',
on_existing='delete', delete=True)
@with_tempdir()
def test_issue_48336_file_managed_mode_setuid(self, tempdir, user, group):
'''
Ensure that mode is correct with changing of ownership and group
symlinks)
'''
tempfile = os.path.join(tempdir, 'temp_file_issue_48336')
# Run the state
ret = self.run_state(
'file.managed', name=tempfile,
user=user, group=group, mode='4750',
)
self.assertSaltTrueReturn(ret)
# Check that the owner and group are correct, and
# the mode is what we expect
temp_file_stats = os.stat(tempfile)
# Normalize the mode
temp_file_mode = six.text_type(oct(stat.S_IMODE(temp_file_stats.st_mode)))
temp_file_mode = salt.utils.files.normalize_mode(temp_file_mode)
self.assertEqual(temp_file_mode, '4750')
self.assertEqual(pwd.getpwuid(temp_file_stats.st_uid).pw_name, user)
self.assertEqual(grp.getgrgid(temp_file_stats.st_gid).gr_name, group)
class BlockreplaceTest(ModuleCase, SaltReturnAssertsMixin):
marker_start = '# start'

View file

@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import tarfile
import tempfile
import subprocess
import sys
import os
from tests.support.unit import TestCase
import salt.utils.files
import salt.utils.thin
try:
import virtualenv
HAS_VENV = True
except ImportError:
HAS_VENV = False
class TestThinDir(TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
salt.utils.files.rm_rf(self.tmpdir)
def test_thin_dir(self):
'''
Test the thin dir to make sure salt-call can run
Run salt call via a python in a new virtual environment to ensure
salt-call has all dependencies needed.
'''
venv_dir = os.path.join(self.tmpdir, 'venv')
virtualenv.create_environment(venv_dir)
salt.utils.thin.gen_thin(self.tmpdir)
thin_dir = os.path.join(self.tmpdir, 'thin')
thin_archive = os.path.join(thin_dir, 'thin.tgz')
tar = tarfile.open(thin_archive)
tar.extractall(thin_dir)
tar.close()
bins = 'bin'
if sys.platform == 'win32':
bins = 'Scripts'
cmd = [
os.path.join(venv_dir, bins, 'python'),
os.path.join(thin_dir, 'salt-call'),
'--version',
]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
proc.wait()
assert proc.returncode == 0, (stdout, stderr, proc.returncode)

View file

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
'''
Testing ini_manage exec module.
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import os
@ -15,6 +17,9 @@ import salt.modules.ini_manage as ini
class IniManageTestCase(TestCase):
'''
Testing ini_manage exec module.
'''
TEST_FILE_CONTENT = os.linesep.join([
'# Comment on the first line',
@ -54,6 +59,9 @@ class IniManageTestCase(TestCase):
os.remove(self.tfile.name)
def test_get_option(self):
'''
Test get_option method.
'''
self.assertEqual(
ini.get_option(self.tfile.name, 'main', 'test1'),
'value 1')
@ -71,23 +79,46 @@ class IniManageTestCase(TestCase):
'')
def test_get_section(self):
'''
Test get_section method.
'''
self.assertEqual(
ini.get_section(self.tfile.name, 'SectionB'),
{'test1': 'value 1B', 'test3': 'value 3B'})
def test_remove_option(self):
'''
Test remove_option method.
'''
self.assertEqual(
ini.remove_option(self.tfile.name, 'SectionB', 'test1'),
'value 1B')
self.assertIsNone(ini.get_option(self.tfile.name, 'SectionB', 'test1'))
def test_remove_section(self):
'''
Test remove_section method.
'''
self.assertEqual(
ini.remove_section(self.tfile.name, 'SectionB'),
{'test1': 'value 1B', 'test3': 'value 3B'})
self.assertEqual(ini.get_section(self.tfile.name, 'SectionB'), {})
def test_get_ini(self):
'''
Test get_ini method.
'''
self.assertEqual(
dict(ini.get_ini(self.tfile.name)), {
'SectionC': {'empty_option': ''},
'SectionB': {'test1': 'value 1B', 'test3': 'value 3B'},
'main': {'test1': 'value 1', 'test2': 'value 2'},
'option2': 'main2', 'option1': 'main1'})
def test_set_option(self):
'''
Test set_option method.
'''
result = ini.set_option(self.tfile.name, {
'SectionB': {
'test3': 'new value 3B',
@ -101,12 +132,9 @@ class IniManageTestCase(TestCase):
'SectionB': {'test3': {'after': 'new value 3B',
'before': 'value 3B'},
'test_set_option': {'after': 'test_set_value',
'before': None}
},
'before': None}},
'SectionD': {'after': {'test_set_option2': 'test_set_value1'},
'before': None
}
})
'before': None}})
# Check existing option updated
self.assertEqual(
ini.get_option(self.tfile.name, 'SectionB', 'test3'),
@ -116,16 +144,22 @@ class IniManageTestCase(TestCase):
ini.get_option(self.tfile.name, 'SectionD', 'test_set_option2'),
'test_set_value1')
def test_empty_value_preserved_after_edit(self):
def test_empty_value(self):
'''
Test empty value preserved after edit
'''
ini.set_option(self.tfile.name, {
'SectionB': {'test3': 'new value 3B'},
})
with salt.utils.files.fopen(self.tfile.name, 'r') as fp:
file_content = salt.utils.stringutils.to_unicode(fp.read())
with salt.utils.files.fopen(self.tfile.name, 'r') as fp_:
file_content = salt.utils.stringutils.to_unicode(fp_.read())
expected = '{0}{1}{0}'.format(os.linesep, 'empty_option = ')
self.assertIn(expected, file_content, 'empty_option was not preserved')
def test_empty_lines_preserved_after_edit(self):
def test_empty_lines(self):
'''
Test empty lines preserved after edit
'''
ini.set_option(self.tfile.name, {
'SectionB': {'test3': 'new value 3B'},
})
@ -155,12 +189,15 @@ class IniManageTestCase(TestCase):
'empty_option = ',
''
])
with salt.utils.files.fopen(self.tfile.name, 'r') as fp:
file_content = salt.utils.stringutils.to_unicode(fp.read())
with salt.utils.files.fopen(self.tfile.name, 'r') as fp_:
file_content = salt.utils.stringutils.to_unicode(fp_.read())
self.assertEqual(expected, file_content)
def test_empty_lines_preserved_after_multiple_edits(self):
def test_empty_lines_multiple_edits(self):
'''
Test empty lines preserved after multiple edits
'''
ini.set_option(self.tfile.name, {
'SectionB': {'test3': 'this value will be edited two times'},
})
self.test_empty_lines_preserved_after_edit()
self.test_empty_lines()

View file

@ -126,65 +126,56 @@ class ProxyTestCase(TestCase, LoaderModuleMockMixin):
def test_get_http_proxy_windows(self):
'''
Test to make sure that we correctly get the current proxy info
on Windows
Test to make sure that we correctly get the current proxy info on
Windows
'''
result = {'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'}
mock = MagicMock(return_value=result)
expected = {'server': '192.168.0.1',
'port': '3128'}
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
result = {
'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'
}
mock = MagicMock(return_value=result)
expected = {
'server': '192.168.0.1',
'port': '3128'
}
with patch.dict(proxy.__salt__, {'reg.read_value': mock}):
out = proxy.get_http_proxy()
mock.assert_called_once_with('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer')
mock.assert_called_once_with(
hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer')
self.assertEqual(expected, out)
def test_get_https_proxy_windows(self):
'''
Test to make sure that we correctly get the current proxy info
on Windows
Test to make sure that we correctly get the current proxy info on
Windows
'''
result = {'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'}
mock = MagicMock(return_value=result)
expected = {'server': '192.168.0.2',
'port': '3128'}
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
result = {
'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'
}
mock = MagicMock(return_value=result)
expected = {
'server': '192.168.0.2',
'port': '3128'
}
with patch.dict(proxy.__salt__, {'reg.read_value': mock}):
out = proxy.get_https_proxy()
mock.assert_called_once_with('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer')
mock.assert_called_once_with(
hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer')
self.assertEqual(expected, out)
def test_get_ftp_proxy_windows(self):
'''
Test to make sure that we correctly get the current proxy info
on Windows
Test to make sure that we correctly get the current proxy info on
Windows
'''
result = {'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'}
mock = MagicMock(return_value=result)
expected = {'server': '192.168.0.3',
'port': '3128'}
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
result = {
'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'
}
mock = MagicMock(return_value=result)
expected = {
'server': '192.168.0.3',
'port': '3128'
}
with patch.dict(proxy.__salt__, {'reg.read_value': mock}):
out = proxy.get_ftp_proxy()
mock.assert_called_once_with('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer')
mock.assert_called_once_with(
hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer')
self.assertEqual(expected, out)
def test_get_all_proxies_macos_fails(self):
@ -196,201 +187,179 @@ class ProxyTestCase(TestCase, LoaderModuleMockMixin):
def test_get_all_proxies_windows(self):
'''
Test to make sure that we correctly get the current proxy info
on Windows
Test to make sure that we correctly get the current proxy info on
Windows
'''
results = [{'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'},
{'vdata': 1}]
mock = MagicMock(side_effect=results)
expected = {'enabled': True,
'http': {'server': '192.168.0.1',
'port': '3128'},
'https': {'server': '192.168.0.2',
'port': '3128'},
'ftp': {'server': '192.168.0.3',
'port': '3128'}}
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable')]
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
results = [
{
'vdata': 'http=192.168.0.1:3128;https=192.168.0.2:3128;ftp=192.168.0.3:3128'
},
{
'vdata': 1
}
]
mock = MagicMock(side_effect=results)
expected = {
'enabled': True,
'http': {
'server': '192.168.0.1',
'port': '3128'
},
'https': {
'server': '192.168.0.2',
'port': '3128'
},
'ftp': {
'server': '192.168.0.3',
'port': '3128'
}
}
with patch.dict(proxy.__salt__, {'reg.read_value': mock}):
out = proxy.get_proxy_win()
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable'),
]
mock.assert_has_calls(calls)
self.assertEqual(expected, out)
def test_set_http_proxy_windows(self):
'''
Test to make sure that we correctly set the proxy info
on Windows
Test to make sure that we correctly set the proxy info on Windows
'''
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer',
vdata='http=192.168.0.1:3128;'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyOverride',
vdata='<local>;.moo.com;.salt.com')]
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg, 'cmd.run': mock_cmd}):
out = proxy.set_http_proxy('192.168.0.1', 3128, bypass_hosts=['.moo.com', '.salt.com'])
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer',
'http=192.168.0.1:3128;'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable',
1,
vtype='REG_DWORD'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyOverride',
'<local>;.moo.com;.salt.com')
]
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg,
'cmd.run': mock_cmd}):
out = proxy.set_http_proxy(server='192.168.0.1',
port=3128,
bypass_hosts=['.moo.com', '.salt.com'])
mock_reg.assert_has_calls(calls)
mock_cmd.assert_called_once_with('netsh winhttp import proxy source=ie')
self.assertTrue(out)
def test_set_https_proxy_windows(self):
'''
Test to make sure that we correctly set the proxy info
on Windows
Test to make sure that we correctly set the proxy info on Windows
'''
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer',
vdata='https=192.168.0.1:3128;'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyOverride',
vdata='<local>;.moo.com;.salt.com')]
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg, 'cmd.run': mock_cmd}):
out = proxy.set_https_proxy('192.168.0.1', 3128, bypass_hosts=['.moo.com', '.salt.com'])
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer',
'https=192.168.0.1:3128;'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable',
1,
vtype='REG_DWORD'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyOverride',
'<local>;.moo.com;.salt.com')
]
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg,
'cmd.run': mock_cmd}):
out = proxy.set_https_proxy(server='192.168.0.1',
port=3128,
bypass_hosts=['.moo.com', '.salt.com'])
mock_reg.assert_has_calls(calls)
mock_cmd.assert_called_once_with('netsh winhttp import proxy source=ie')
self.assertTrue(out)
def test_set_ftp_proxy_windows(self):
'''
Test to make sure that we correctly set the proxy info
on Windows
Test to make sure that we correctly set the proxy info on Windows
'''
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer',
vdata='ftp=192.168.0.1:3128;'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyOverride',
vdata='<local>;.moo.com;.salt.com')]
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg, 'cmd.run': mock_cmd}):
out = proxy.set_ftp_proxy('192.168.0.1', 3128, bypass_hosts=['.moo.com', '.salt.com'])
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer',
'ftp=192.168.0.1:3128;'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable',
1,
vtype='REG_DWORD'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyOverride',
'<local>;.moo.com;.salt.com')
]
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg,
'cmd.run': mock_cmd}):
out = proxy.set_ftp_proxy(server='192.168.0.1',
port=3128,
bypass_hosts=['.moo.com', '.salt.com'])
mock_reg.assert_has_calls(calls)
mock_cmd.assert_called_once_with('netsh winhttp import proxy source=ie')
self.assertTrue(out)
def test_set_proxy_windows(self):
'''
Test to make sure that we correctly set the proxy info
on Windows
Test to make sure that we correctly set the proxy info on Windows
'''
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer',
vdata='http=192.168.0.1:3128;https=192.168.0.1:3128;ftp=192.168.0.1:3128;'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyOverride',
vdata='<local>;.moo.com;.salt.com')]
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg, 'cmd.run': mock_cmd}):
out = proxy.set_proxy_win('192.168.0.1', 3128, bypass_hosts=['.moo.com', '.salt.com'])
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer',
'http=192.168.0.1:3128;https=192.168.0.1:3128;ftp=192.168.0.1:3128;'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable',
1,
vtype='REG_DWORD'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyOverride',
'<local>;.moo.com;.salt.com')
]
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg,
'cmd.run': mock_cmd}):
out = proxy.set_proxy_win(server='192.168.0.1',
port=3128,
bypass_hosts=['.moo.com', '.salt.com'])
mock_reg.assert_has_calls(calls)
mock_cmd.assert_called_once_with('netsh winhttp import proxy source=ie')
self.assertTrue(out)
def test_set_proxy_windows_no_ftp(self):
'''
Test to make sure that we correctly set the proxy info
on Windows
Test to make sure that we correctly set the proxy info on Windows
'''
calls = [
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyServer',
vdata='http=192.168.0.1:3128;https=192.168.0.1:3128;'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyEnable',
vdata=1,
vtype='REG_DWORD'),
call(hive='HKEY_CURRENT_USER',
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
vname='ProxyOverride',
vdata='<local>;.moo.com;.salt.com')]
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__grains__, {'os': 'Windows'}):
mock_reg = MagicMock()
mock_cmd = MagicMock()
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg, 'cmd.run': mock_cmd}):
out = proxy.set_proxy_win('192.168.0.1', 3128, types=['http', 'https'],
with patch.dict(proxy.__salt__, {'reg.set_value': mock_reg,
'cmd.run': mock_cmd}):
out = proxy.set_proxy_win(server='192.168.0.1',
port=3128,
types=['http', 'https'],
bypass_hosts=['.moo.com', '.salt.com'])
calls = [
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyServer',
'http=192.168.0.1:3128;https=192.168.0.1:3128;'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyEnable',
1,
vtype='REG_DWORD'),
call('HKEY_CURRENT_USER',
'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Internet Settings',
'ProxyOverride',
'<local>;.moo.com;.salt.com')
]
mock_reg.assert_has_calls(calls)
mock_cmd.assert_called_once_with('netsh winhttp import proxy source=ie')
self.assertTrue(out)

View file

@ -16,6 +16,7 @@ from tests.support.mixins import LoaderModuleMockMixin
from tests.support.paths import TMP, TMP_CONF_DIR
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
patch,
mock_open,
@ -358,6 +359,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
'__utils__': utils,
'__salt__': {
'config.get': config.get,
'config.option': MagicMock(return_value=''),
}
},
config: {
@ -759,28 +761,25 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
"whitelist=sls1.sls",
pillar="A")
mock = MagicMock(return_value=True)
with patch.dict(state.__salt__,
{'config.option': mock}):
mock = MagicMock(return_value="A")
mock = MagicMock(return_value="A")
with patch.object(state, '_filter_running',
mock):
mock = MagicMock(return_value=True)
with patch.object(state, '_filter_running',
mock):
mock = MagicMock(return_value=True)
with patch.object(state, '_filter_running',
with patch.object(salt.payload, 'Serial',
mock):
mock = MagicMock(return_value=True)
with patch.object(salt.payload, 'Serial',
mock):
with patch.object(os.path,
'join', mock):
with patch.object(
state,
'_set'
'_retcode',
mock):
self.assertTrue(state.
highstate
(arg))
with patch.object(os.path,
'join', mock):
with patch.object(
state,
'_set'
'_retcode',
mock):
self.assertTrue(state.
highstate
(arg))
def test_clear_request(self):
'''
@ -921,17 +920,11 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
MockState.HighState.flag = False
mock = MagicMock(return_value=True)
with patch.dict(state.__salt__,
{'config.option':
mock}):
mock = MagicMock(return_value=
True)
with patch.object(
state,
'_filter_'
'running',
mock):
self.sub_test_sls()
with patch.object(state,
'_filter_'
'running',
mock):
self.sub_test_sls()
def test_get_test_value(self):
'''
@ -1014,6 +1007,75 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
None,
True))
def test_sls_sync(self):
'''
Test test.sls with the sync argument
We're only mocking the sync functions we expect to sync. If any other
sync functions are run then they will raise a KeyError, which we want
as it will tell us that we are syncing things we shouldn't.
'''
mock_empty_list = MagicMock(return_value=[])
with patch.object(state, 'running', mock_empty_list), \
patch.object(state, '_disabled', mock_empty_list), \
patch.object(state, '_get_pillar_errors', mock_empty_list):
sync_mocks = {
'saltutil.sync_modules': Mock(),
'saltutil.sync_states': Mock(),
}
with patch.dict(state.__salt__, sync_mocks):
state.sls('foo', sync_mods='modules,states')
for key in sync_mocks:
call_count = sync_mocks[key].call_count
expected = 1
assert call_count == expected, \
'{0} called {1} time(s) (expected: {2})'.format(
key, call_count, expected
)
# Test syncing all
sync_mocks = {'saltutil.sync_all': Mock()}
with patch.dict(state.__salt__, sync_mocks):
state.sls('foo', sync_mods='all')
for key in sync_mocks:
call_count = sync_mocks[key].call_count
expected = 1
assert call_count == expected, \
'{0} called {1} time(s) (expected: {2})'.format(
key, call_count, expected
)
# sync_mods=True should be interpreted as sync_mods=all
sync_mocks = {'saltutil.sync_all': Mock()}
with patch.dict(state.__salt__, sync_mocks):
state.sls('foo', sync_mods=True)
for key in sync_mocks:
call_count = sync_mocks[key].call_count
expected = 1
assert call_count == expected, \
'{0} called {1} time(s) (expected: {2})'.format(
key, call_count, expected
)
# Test syncing all when "all" is passed along with module types.
# This tests that we *only* run a sync_all and avoid unnecessary
# extra syncing.
sync_mocks = {'saltutil.sync_all': Mock()}
with patch.dict(state.__salt__, sync_mocks):
state.sls('foo', sync_mods='modules,all')
for key in sync_mocks:
call_count = sync_mocks[key].call_count
expected = 1
assert call_count == expected, \
'{0} called {1} time(s) (expected: {2})'.format(
key, call_count, expected
)
def test_pkg(self):
'''
Test to execute a packaged state run

View file

@ -36,6 +36,7 @@ import salt.serializers.yaml as yamlserializer
import salt.serializers.json as jsonserializer
import salt.serializers.python as pythonserializer
from salt.exceptions import CommandExecutionError
import salt.utils.win_functions
@skipIf(NO_MOCK, NO_MOCK_REASON)
@ -143,10 +144,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
else:
group = 'saltstack'
ret = {'name': name,
'result': False,
'comment': '',
'changes': {}}
def return_val(kwargs):
val = {
'name': name,
'result': False,
'comment': '',
'changes': {},
}
val.update(kwargs)
return val
mock_t = MagicMock(return_value=True)
mock_f = MagicMock(return_value=False)
@ -160,7 +166,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t}):
comt = ('Must provide name to file.symlink')
ret.update({'comment': comt, 'name': ''})
ret = return_val({'comment': comt, 'name': ''})
self.assertDictEqual(filestate.symlink('', target), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
@ -168,8 +174,12 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
'file.group_to_gid': mock_empty,
'user.info': mock_empty,
'user.current': mock_user}):
comt = ('User {0} does not exist. Group {1} does not exist.'.format(user, group))
ret.update({'comment': comt, 'name': name})
if salt.utils.is_windows():
comt = ('User {0} does not exist'.format(user))
ret = return_val({'comment': comt, 'name': name})
else:
comt = ('User {0} does not exist. Group {1} does not exist.'.format(user, group))
ret = return_val({'comment': comt, 'name': name})
self.assertDictEqual(filestate.symlink(name, target, user=user,
group=group), ret)
@ -181,11 +191,22 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
'user.current': mock_user}):
with patch.dict(filestate.__opts__, {'test': True}):
with patch.object(os.path, 'exists', mock_f):
comt = ('Symlink {0} to {1}'
' is set for creation').format(name, target)
ret.update({'comment': comt,
'result': None,
'pchanges': {'new': name}})
if salt.utils.is_windows():
comt = ('User {0} does not exist'.format(user))
ret = return_val(
{
'comment': comt,
'result': False,
'name': name,
'changes': {}
}
)
else:
comt = ('Symlink {0} to {1}'
' is set for creation').format(name, target)
ret = return_val({'comment': comt,
'result': None,
'pchanges': {'new': name}})
self.assertDictEqual(filestate.symlink(name, target,
user=user,
group=group), ret)
@ -199,10 +220,21 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__opts__, {'test': False}):
with patch.object(os.path, 'isdir', mock_f):
with patch.object(os.path, 'exists', mock_f):
comt = ('Directory {0} for symlink is not present').format(test_dir)
ret.update({'comment': comt,
if salt.utils.is_windows():
comt = 'User {0} does not exist'.format(user)
ret = return_val(
{
'comment': comt,
'result': False,
'pchanges': {'new': name}})
'name': name,
'changes': {},
}
)
else:
comt = ('Directory {0} for symlink is not present').format(test_dir)
ret = return_val({'comment': comt,
'result': False,
'pchanges': {'new': name}})
self.assertDictEqual(filestate.symlink(name, target,
user=user,
group=group), ret)
@ -217,15 +249,19 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__opts__, {'test': False}):
with patch.object(os.path, 'isdir', mock_t):
with patch.object(salt.states.file, '_check_symlink_ownership', mock_t):
comt = ('Symlink {0} is present and owned by '
'{1}:{2}'.format(name, user, group))
ret.update({'comment': comt,
'result': True,
'pchanges': {}})
self.assertDictEqual(filestate.symlink(name, target,
user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
if salt.utils.is_windows():
comt = ('Symlink {0} is present and owned by '
'{1}'.format(name, user))
else:
comt = ('Symlink {0} is present and owned by '
'{1}:{2}'.format(name, user, group))
ret = return_val({'comment': comt,
'result': True,
'pchanges': {}})
self.assertDictEqual(filestate.symlink(name, target,
user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
'file.group_to_gid': mock_gid,
@ -237,15 +273,16 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isdir', mock_t):
with patch.object(os.path, 'exists', mock_f):
with patch.object(os.path, 'lexists', mock_t):
comt = ('File exists where the backup target SALT'
' should go')
ret.update({'comment': comt,
'result': False,
'pchanges': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group, backupname='SALT'),
ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = ('File exists where the backup target SALT'
' should go')
ret = return_val({'comment': comt,
'result': False,
'pchanges': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group, backupname='SALT'),
ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -258,14 +295,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isdir', mock_t):
with patch.object(os.path, 'exists', mock_f):
with patch.object(os.path, 'isfile', mock_t):
comt = ('File exists where the symlink {0} should be'
.format(name))
ret.update({'comment': comt,
'pchanges': {'new': name},
'result': False})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = ('File exists where the symlink {0} should be'
.format(name))
ret = return_val({'comment': comt,
'pchanges': {'new': name},
'result': False})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -279,11 +317,12 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])):
with patch.object(os.path, 'isfile', mock_t):
with patch.object(os.path, 'exists', mock_f):
comt = ('File exists where the symlink {0} should be'.format(name))
ret.update({'comment': comt, 'result': False})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = ('File exists where the symlink {0} should be'.format(name))
ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -297,11 +336,12 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])):
with patch.object(os.path, 'isdir', mock_t):
with patch.object(os.path, 'exists', mock_f):
comt = ('Directory exists where the symlink {0} should be'.format(name))
ret.update({'comment': comt, 'result': False})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = ('Directory exists where the symlink {0} should be'.format(name))
ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -314,12 +354,13 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__opts__, {'test': False}):
with patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])):
with patch.object(os.path, 'isfile', mock_f):
comt = ('Unable to create new symlink {0} -> '
'{1}: '.format(name, target))
ret.update({'comment': comt, 'result': False})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = ('Unable to create new symlink {0} -> '
'{1}: '.format(name, target))
ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -334,13 +375,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__opts__, {'test': False}):
with patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])):
with patch.object(os.path, 'isfile', mock_f):
comt = 'Created new symlink {0} -> {1}'.format(name, target)
ret.update({'comment': comt,
'result': True,
'changes': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.states.file._check_symlink_ownership', return_value=True):
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
comt = 'Created new symlink {0} -> {1}'.format(name, target)
ret = return_val({'comment': comt,
'result': True, 'pchanges': {'new': '/tmp/testfile.txt'},
'changes': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch.dict(filestate.__salt__, {'config.manage_mode': mock_t,
'file.user_to_uid': mock_uid,
@ -355,15 +398,19 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.dict(filestate.__opts__, {'test': False}):
with patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])):
with patch.object(os.path, 'isfile', mock_f):
comt = ('Created new symlink {0} -> {1}, '
'but was unable to set ownership to '
'{2}:{3}'.format(name, target, user, group))
ret.update({'comment': comt,
'result': False,
'changes': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
with patch('salt.states.file._set_symlink_ownership', return_value=False):
with patch('salt.states.file._check_symlink_ownership', return_value=False):
comt = ('Created new symlink {0} -> {1}, '
'but was unable to set ownership to '
'{2}:{3}'.format(name, target, user, group))
ret = return_val({'comment': comt,
'result': False,
'pchanges': {'new': '/tmp/testfile.txt'},
'changes': {'new': name}})
self.assertDictEqual(filestate.symlink
(name, target, user=user,
group=group), ret)
# 'absent' function tests: 1
def test_absent(self):
@ -1131,6 +1178,8 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
Test to ensure that some text appears at the beginning of a file.
'''
name = '/etc/motd'
if salt.utils.is_windows():
name = 'c:\\etc\\motd'
source = ['salt://motd/hr-messages.tmpl']
sources = ['salt://motd/devops-messages.tmpl']
text = ['Trust no one unless you have eaten much salt with him.']
@ -1159,12 +1208,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
'cp.get_template': mock_f,
'file.search': mock_f,
'file.prepend': mock_t}):
with patch.object(os.path, 'isdir', mock_t):
comt = ('The following files will be changed:\n/etc:'
' directory - new\n')
ret.update({'comment': comt, 'name': name, 'pchanges': {'/etc': {'directory': 'new'}}})
self.assertDictEqual(filestate.prepend(name, makedirs=True),
ret)
comt = ('The following files will be changed:\n/etc:'
' directory - new\n')
pchanges = {'/etc': {'directory': 'new'}}
if salt.utils.is_windows():
comt = 'The directory "c:\\etc" will be changed'
pchanges = {'c:\\etc': {'directory': 'new'}}
ret.update({'comment': comt, 'name': name, 'pchanges': pchanges})
self.assertDictEqual(filestate.prepend(name, makedirs=True),
ret)
with patch.object(os.path, 'isabs', mock_f):
comt = ('Specified file {0} is not an absolute path'

View file

@ -17,6 +17,8 @@ from tests.support.mock import (
# Import Salt Libs
import salt.states.ini_manage as ini_manage
# pylint: disable=no-member
@skipIf(NO_MOCK, NO_MOCK_REASON)
class IniManageTestCase(TestCase, LoaderModuleMockMixin):
@ -40,7 +42,7 @@ class IniManageTestCase(TestCase, LoaderModuleMockMixin):
'changes': {}}
with patch.dict(ini_manage.__opts__, {'test': True}):
comt = 'No changes detected.'
comt = ''
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(ini_manage.options_present(name), ret)
@ -61,7 +63,7 @@ class IniManageTestCase(TestCase, LoaderModuleMockMixin):
changes = {'mysection': {'first': 'who is on',
'second': 'what is on',
'third': {'after': None, 'before': "I don't know"}}}
with patch.dict(ini_manage.__salt__, {'ini.get_section': MagicMock(return_value=original['mysection'])}):
with patch.dict(ini_manage.__salt__, {'ini.get_ini': MagicMock(return_value=original)}):
with patch.dict(ini_manage.__salt__, {'ini.remove_option': MagicMock(return_value='third')}):
with patch.dict(ini_manage.__salt__, {'ini.get_option': MagicMock(return_value="I don't know")}):
with patch.dict(ini_manage.__salt__, {'ini.set_option': MagicMock(return_value=desired)}):
@ -107,9 +109,10 @@ class IniManageTestCase(TestCase, LoaderModuleMockMixin):
'changes': {}}
with patch.dict(ini_manage.__opts__, {'test': True}):
comt = 'No changes detected.'
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(ini_manage.sections_present(name), ret)
with patch.dict(ini_manage.__salt__, {'ini.get_ini': MagicMock(return_value=None)}):
comt = 'No changes detected.'
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(ini_manage.sections_present(name), ret)
changes = {'first': 'who is on',
'second': 'what is on',
@ -134,9 +137,10 @@ class IniManageTestCase(TestCase, LoaderModuleMockMixin):
'changes': {}}
with patch.dict(ini_manage.__opts__, {'test': True}):
comt = 'No changes detected.'
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(ini_manage.sections_absent(name), ret)
with patch.dict(ini_manage.__salt__, {'ini.get_ini': MagicMock(return_value=None)}):
comt = 'No changes detected.'
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(ini_manage.sections_absent(name), ret)
with patch.dict(ini_manage.__opts__, {'test': False}):
comt = ('No anomaly detected')

View file

@ -125,6 +125,7 @@ class ServiceTestCase(TestCase, LoaderModuleMockMixin):
{'changes': 'saltstack',
'comment': 'The service salt is already dead', 'name': 'salt',
'result': True}]
info_mock = MagicMock(return_value={'StartType': ''})
mock = MagicMock(return_value="salt")
with patch.object(service, '_enabled_used_error', mock):
@ -140,31 +141,36 @@ class ServiceTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(service.__opts__, {'test': True}):
with patch.dict(service.__salt__, {'service.enabled': fmock,
'service.stop': tmock,
'service.status': fmock}):
'service.status': fmock,
'service.info': info_mock}):
with patch.object(service, '_enable', mock):
self.assertDictEqual(service.dead("salt", True), ret[5])
with patch.dict(service.__salt__, {'service.enabled': tmock,
'service.status': tmock}):
'service.status': tmock,
'service.info': info_mock}):
self.assertDictEqual(service.dead("salt"), ret[2])
with patch.dict(service.__opts__, {'test': False}):
with patch.dict(service.__salt__, {'service.enabled': fmock,
'service.stop': tmock,
'service.status': fmock}):
'service.status': fmock,
'service.info': info_mock}):
with patch.object(service, '_enable', mock):
self.assertDictEqual(service.dead("salt", True), ret[1])
with patch.dict(service.__salt__, {'service.enabled': MagicMock(side_effect=[True, True, False]),
'service.status': MagicMock(side_effect=[True, False, False]),
'service.stop': MagicMock(return_value="stack")}):
'service.stop': MagicMock(return_value="stack"),
'service.info': info_mock}):
with patch.object(service, '_enable', MagicMock(return_value={'changes': 'saltstack'})):
self.assertDictEqual(service.dead("salt", True), ret[3])
# test an initd which a wrong status (True even if dead)
with patch.dict(service.__salt__, {'service.enabled': MagicMock(side_effect=[False, False, False]),
'service.status': MagicMock(side_effect=[True, True, True]),
'service.stop': MagicMock(return_value="stack")}):
'service.stop': MagicMock(return_value="stack"),
'service.info': info_mock}):
with patch.object(service, '_disable', MagicMock(return_value={})):
self.assertDictEqual(service.dead("salt", False), ret[4])

View file

@ -215,7 +215,7 @@ class M2CryptTestCase(TestCase):
self.assertEqual(SIG, crypt.sign_message('/keydir/keyname.pem', MSG, passphrase='password'))
def test_verify_signature(self):
with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)):
with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))):
self.assertTrue(crypt.verify_signature('/keydir/keyname.pub', MSG, SIG))
def test_encrypt_decrypt_bin(self):
@ -272,30 +272,30 @@ class TestBadCryptodomePubKey(TestCase):
class TestM2CryptoRegression47124(TestCase):
SIGNATURE = (
'w\xac\xfe18o\xeb\xfb\x14+\x9e\xd1\xb7\x7fe}\xec\xd6\xe1P\x9e\xab'
'\xb5\x07\xe0\xc1\xfd\xda#\x04Z\x8d\x7f\x0b\x1f}:~\xb2s\x860u\x02N'
'\xd4q"\xb7\x86*\x8f\x1f\xd0\x9d\x11\x92\xc5~\xa68\xac>\x12H\xc2%y,'
'\xe6\xceU\x1e\xa3?\x0c,\xf0u\xbb\xd0[g_\xdd\x8b\xb0\x95:Y\x18\xa5*'
'\x99\xfd\xf3K\x92\x92 ({\xd1\xff\xd9F\xc8\xd6K\x86e\xf9\xa8\xad\xb0z'
'\xe3\x9dD\xf5k\x8b_<\xe7\xe7\xec\xf3"\'\xd5\xd2M\xb4\xce\x1a\xe3$'
'\x9c\x81\xad\xf9\x11\xf6\xf5>)\xc7\xdd\x03&\xf7\x86@ks\xa6\x05\xc2'
'\xd0\xbd\x1a7\xfc\xde\xe6\xb0\xad!\x12#\xc86Y\xea\xc5\xe3\xe2\xb3'
'\xc9\xaf\xfa\x0c\xf2?\xbf\x93w\x18\x9e\x0b\xa2a\x10:M\x05\x89\xe2W.Q'
'\xe8;yGT\xb1\xf2\xc6A\xd2\xc4\xbeN\xb3\xcfS\xaf\x03f\xe2\xb4)\xe7\xf6'
'\xdbs\xd0Z}8\xa4\xd2\x1fW*\xe6\x1c"\x8b\xd0\x18w\xb9\x7f\x9e\x96\xa3'
'\xd9v\xf7\x833\x8e\x01'
b'w\xac\xfe18o\xeb\xfb\x14+\x9e\xd1\xb7\x7fe}\xec\xd6\xe1P\x9e\xab'
b'\xb5\x07\xe0\xc1\xfd\xda#\x04Z\x8d\x7f\x0b\x1f}:~\xb2s\x860u\x02N'
b'\xd4q"\xb7\x86*\x8f\x1f\xd0\x9d\x11\x92\xc5~\xa68\xac>\x12H\xc2%y,'
b'\xe6\xceU\x1e\xa3?\x0c,\xf0u\xbb\xd0[g_\xdd\x8b\xb0\x95:Y\x18\xa5*'
b'\x99\xfd\xf3K\x92\x92 ({\xd1\xff\xd9F\xc8\xd6K\x86e\xf9\xa8\xad\xb0z'
b'\xe3\x9dD\xf5k\x8b_<\xe7\xe7\xec\xf3"\'\xd5\xd2M\xb4\xce\x1a\xe3$'
b'\x9c\x81\xad\xf9\x11\xf6\xf5>)\xc7\xdd\x03&\xf7\x86@ks\xa6\x05\xc2'
b'\xd0\xbd\x1a7\xfc\xde\xe6\xb0\xad!\x12#\xc86Y\xea\xc5\xe3\xe2\xb3'
b'\xc9\xaf\xfa\x0c\xf2?\xbf\x93w\x18\x9e\x0b\xa2a\x10:M\x05\x89\xe2W.Q'
b'\xe8;yGT\xb1\xf2\xc6A\xd2\xc4\xbeN\xb3\xcfS\xaf\x03f\xe2\xb4)\xe7\xf6'
b'\xdbs\xd0Z}8\xa4\xd2\x1fW*\xe6\x1c"\x8b\xd0\x18w\xb9\x7f\x9e\x96\xa3'
b'\xd9v\xf7\x833\x8e\x01'
)
@skipIf(not HAS_M2, "Skip when m2crypto is not installed")
def test_m2crypto_verify_bytes(self):
message = salt.utils.stringutils.to_unicode('meh')
with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)):
with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))):
salt.crypt.verify_signature('/keydir/keyname.pub', message, self.SIGNATURE)
@skipIf(not HAS_M2, "Skip when m2crypto is not installed")
def test_m2crypto_verify_unicode(self):
message = salt.utils.stringutils.to_bytes('meh')
with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)):
with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))):
salt.crypt.verify_signature('/keydir/keyname.pub', message, self.SIGNATURE)
@skipIf(not HAS_M2, "Skip when m2crypto is not installed")

View file

@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result