mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2018.3' of github.com:saltstack/salt into load_beacon_fix
This commit is contained in:
commit
e59ced6507
63 changed files with 1812 additions and 880 deletions
3
.ci/lint
3
.ci/lint
|
@ -28,6 +28,7 @@ pipeline {
|
|||
# the -l increase the search limit, lets use awk so we do not need to repeat the search above.
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 != "D") {print $NF}}' file-list-status.log > file-list-changed.log
|
||||
gawk 'BEGIN {FS="\\t"} {if ($1 == "D") {print $NF}}' file-list-status.log > file-list-deleted.log
|
||||
(git diff --name-status -l99999 -C "origin/$CHANGE_TARGET";echo "---";git diff --name-status -l99999 -C "origin/$BRANCH_NAME";printenv|grep -E '=[0-9a-z]{40,}+$|COMMIT=|BRANCH') > file-list-experiment.log
|
||||
touch pylint-report-salt.log pylint-report-tests.log
|
||||
eval "$(pyenv init -)"
|
||||
pyenv --version
|
||||
|
@ -37,7 +38,7 @@ pipeline {
|
|||
python --version
|
||||
pip install tox
|
||||
'''
|
||||
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log'
|
||||
archiveArtifacts artifacts: 'file-list-status.log,file-list-changed.log,file-list-deleted.log,file-list-experiment.log'
|
||||
}
|
||||
}
|
||||
stage('linting') {
|
||||
|
|
|
@ -30,7 +30,7 @@ provisioner:
|
|||
salt_install: bootstrap
|
||||
salt_version: latest
|
||||
salt_bootstrap_url: https://bootstrap.saltstack.com
|
||||
salt_bootstrap_options: -X -p rsync stable <%= version %>
|
||||
salt_bootstrap_options: -X -p rsync git <%= version %>
|
||||
log_level: info
|
||||
sudo: true
|
||||
require_chef: false
|
||||
|
|
3
Gemfile
3
Gemfile
|
@ -2,7 +2,8 @@
|
|||
|
||||
source 'https://rubygems.org'
|
||||
|
||||
gem 'test-kitchen', '~>1.21'
|
||||
# Point this back at the test-kitchen package after 1.23.3 is relased
|
||||
gem 'test-kitchen', :git => 'https://github.com/dwoz/test-kitchen.git', :branch => 'winrm_opts'
|
||||
gem 'kitchen-salt', '~>0.2'
|
||||
gem 'kitchen-sync'
|
||||
gem 'git'
|
||||
|
|
|
@ -7,7 +7,7 @@ from docutils.parsers.rst import Directive
|
|||
|
||||
from docutils.statemachine import ViewList
|
||||
from sphinx import addnodes
|
||||
from sphinx.directives import ObjectDescription, Directive
|
||||
from sphinx.directives import ObjectDescription
|
||||
from sphinx.domains import Domain, ObjType
|
||||
from sphinx.domains.python import PyObject
|
||||
from sphinx.locale import l_, _
|
||||
|
|
2
doc/_themes/saltstack2/layout.html
vendored
2
doc/_themes/saltstack2/layout.html
vendored
|
@ -256,7 +256,7 @@
|
|||
<!--
|
||||
<a href="https://saltstack.com/saltstack-enterprise/" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/enterprise_ad.jpg', 1) }}"/></a>
|
||||
-->
|
||||
<a href="http://saltconf.com" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/DOCBANNER.jpg', 1) }}"/></a>
|
||||
<a href="http://saltconf.com/saltconf18-speakers/" target="_blank"><img class="nolightbox footer-banner center" src="{{ pathto('_static/images/DOCBANNER.png', 1) }}"/></a>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
|
BIN
doc/_themes/saltstack2/static/images/DOCBANNER.jpg
vendored
BIN
doc/_themes/saltstack2/static/images/DOCBANNER.jpg
vendored
Binary file not shown.
Before Width: | Height: | Size: 497 KiB |
BIN
doc/_themes/saltstack2/static/images/DOCBANNER.png
vendored
Normal file
BIN
doc/_themes/saltstack2/static/images/DOCBANNER.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 767 KiB |
|
@ -92,13 +92,13 @@ RunnerClient
|
|||
------------
|
||||
|
||||
.. autoclass:: salt.runner.RunnerClient
|
||||
:members: cmd, async, cmd_sync, cmd_async
|
||||
:members: cmd, asynchronous, cmd_sync, cmd_async
|
||||
|
||||
WheelClient
|
||||
-----------
|
||||
|
||||
.. autoclass:: salt.wheel.WheelClient
|
||||
:members: cmd, async, cmd_sync, cmd_async
|
||||
:members: cmd, asynchronous, cmd_sync, cmd_async
|
||||
|
||||
CloudClient
|
||||
-----------
|
||||
|
|
|
@ -526,6 +526,19 @@ GPG key with ``git`` locally, and linking the GPG key to your GitHub account.
|
|||
Once these steps are completed, the commit signing verification will look like
|
||||
the example in GitHub's `GPG Signature Verification feature announcement`_.
|
||||
|
||||
Bootstrap Script Changes
|
||||
------------------------
|
||||
|
||||
Salt's Bootstrap Script, known as `bootstrap-salt.sh`_ in the Salt repo, has it's own
|
||||
repository, contributing guidelines, and release cadence.
|
||||
|
||||
All changes to the Bootstrap Script should be made to `salt-bootstrap repo`_. Any
|
||||
pull requests made to the `bootstrap-salt.sh`_ file in the Salt repository will be
|
||||
automatically overwritten upon the next stable release of the Bootstrap Script.
|
||||
|
||||
For more information on the release process or how to contribute to the Bootstrap
|
||||
Script, see the Bootstrap Script's `Contributing Guidelines`_.
|
||||
|
||||
.. _`saltstack/salt`: https://github.com/saltstack/salt
|
||||
.. _`GitHub Fork a Repo Guide`: https://help.github.com/articles/fork-a-repo
|
||||
.. _`GitHub issue tracker`: https://github.com/saltstack/salt/issues
|
||||
|
@ -537,3 +550,6 @@ the example in GitHub's `GPG Signature Verification feature announcement`_.
|
|||
.. _GPG Probot: https://probot.github.io/apps/gpg/
|
||||
.. _help articles: https://help.github.com/articles/signing-commits-with-gpg/
|
||||
.. _GPG Signature Verification feature announcement: https://github.com/blog/2144-gpg-signature-verification
|
||||
.. _bootstrap-salt.sh: https://github.com/saltstack/salt/blob/develop/salt/cloud/deploy/bootstrap-salt.sh
|
||||
.. _salt-bootstrap repo: https://github.com/saltstack/salt-bootstrap
|
||||
.. _Contributing Guidelines: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md
|
||||
|
|
|
@ -92,7 +92,7 @@ Example:
|
|||
cmd.run 'echo '\''h=\"baz\"'\''' runas=macuser
|
||||
|
||||
Changelog for v2018.3.2..v2018.3.3
|
||||
=================================================================
|
||||
==================================
|
||||
|
||||
*Generated at: 2018-09-21 17:45:27 UTC*
|
||||
|
||||
|
@ -507,7 +507,7 @@ Changelog for v2018.3.2..v2018.3.3
|
|||
|
||||
* 3d26affa10 Fix remaining file state integration tests (py3)
|
||||
|
||||
* **PR** `#49171`_: (`Ch3LL`_) [2018.3.3] cherry pick `#49103`_
|
||||
* **PR** `#49171`_: (`Ch3LL`_) [2018.3.3] cherry pick `#49103`_
|
||||
@ *2018-08-17 20:23:32 UTC*
|
||||
|
||||
* **PR** `#49103`_: (`dwoz`_) Install the launcher so we can execute py files (refs: `#49171`_)
|
||||
|
@ -1630,7 +1630,7 @@ Changelog for v2018.3.2..v2018.3.3
|
|||
|
||||
* **ISSUE** `#46896`_: (`Poil`_) Proxy + file.managed => Comment: Failed to cache xxx invalid arguments to setopt (refs: `#48754`_)
|
||||
|
||||
* **PR** `#48754`_: (`lomeroe`_) send proxy/ca_cert parameters as strings (not unicode) to tornado httpclient
|
||||
* **PR** `#48754`_: (`lomeroe`_) send proxy/ca_cert parameters as strings (not unicode) to tornado httpclient
|
||||
@ *2018-07-25 14:55:42 UTC*
|
||||
|
||||
* 030c921914 Merge pull request `#48754`_ from lomeroe/fix-tornado-proxy
|
||||
|
@ -3075,7 +3075,7 @@ Changelog for v2018.3.2..v2018.3.3
|
|||
|
||||
* dae65da256 Merge branch '2018.3.1' into '2018.3'
|
||||
|
||||
* **PR** `#48186`_: (`rallytime`_) Add autodoc module for saltcheck.py
|
||||
* **PR** `#48186`_: (`rallytime`_) Add autodoc module for saltcheck.py
|
||||
@ *2018-06-19 19:03:55 UTC*
|
||||
|
||||
* 5b4897f050 Merge pull request `#48186`_ from rallytime/saltcheck-docs
|
||||
|
@ -3362,11 +3362,11 @@ Changelog for v2018.3.2..v2018.3.3
|
|||
* **PR** `#48109`_: (`rallytime`_) Back-port `#47851`_ to 2018.3
|
||||
@ *2018-06-14 13:09:04 UTC*
|
||||
|
||||
* **PR** `#47851`_: (`rares-pop`_) Fixup! add master.py:FileserverUpdate **kwargs (refs: `#48109`_)
|
||||
* **PR** `#47851`_: (`rares-pop`_) Fixup! add master.py:FileserverUpdate \*\*kwargs (refs: `#48109`_)
|
||||
|
||||
* 2902ee0b14 Merge pull request `#48109`_ from rallytime/bp-47851
|
||||
|
||||
* e9dc30bf8e Fixup! add master.py:FileserverUpdate **kwargs
|
||||
* e9dc30bf8e Fixup! add master.py:FileserverUpdate \*\*kwargs
|
||||
|
||||
* **ISSUE** `#47925`_: (`JonGriggs`_) GitFS looking for files in the master branch only (refs: `#47943`_)
|
||||
|
||||
|
@ -3377,7 +3377,7 @@ Changelog for v2018.3.2..v2018.3.3
|
|||
|
||||
* 534e1a7100 Merge branch '2018.3' into issue47925
|
||||
|
||||
* **PR** `#48089`_: (`rallytime`_) Update release versions for the 2018.3 branch
|
||||
* **PR** `#48089`_: (`rallytime`_) Update release versions for the 2018.3 branch
|
||||
@ *2018-06-13 14:03:44 UTC*
|
||||
|
||||
* 9e1d0040e4 Merge pull request `#48089`_ from rallytime/update_version_doc_2018.3
|
||||
|
|
|
@ -36,6 +36,8 @@ Assigned codenames:
|
|||
- Nitrogen: ``2017.7.0``
|
||||
- Oxygen: ``2018.3.0``
|
||||
- Fluorine: ``TBD``
|
||||
- Neon: ``TBD``
|
||||
- Sodium: ``TBD``
|
||||
|
||||
Example
|
||||
-------
|
||||
|
|
|
@ -4,386 +4,40 @@
|
|||
Salt Bootstrap
|
||||
==============
|
||||
|
||||
The Salt Bootstrap script allows for a user to install the Salt Minion or
|
||||
Master on a variety of system distributions and versions. This shell script
|
||||
known as ``bootstrap-salt.sh`` runs through a series of checks to determine
|
||||
the operating system type and version. It then installs the Salt binaries
|
||||
using the appropriate methods. The Salt Bootstrap script installs the
|
||||
minimum number of packages required to run Salt. This means that in the event
|
||||
you run the bootstrap to install via package, Git will not be installed.
|
||||
Installing the minimum number of packages helps ensure the script stays as
|
||||
lightweight as possible, assuming the user will install any other required
|
||||
packages after the Salt binaries are present on the system. The script source
|
||||
is available on GitHub: https://github.com/saltstack/salt-bootstrap
|
||||
The Salt Bootstrap Script allows a user to install the Salt Minion or Master
|
||||
on a variety of system distributions and versions.
|
||||
|
||||
The Salt Bootstrap Script is a shell script is known as ``bootstrap-salt.sh``.
|
||||
It runs through a series of checks to determine the operating system type and
|
||||
version. It then installs the Salt binaries using the appropriate methods.
|
||||
|
||||
Supported Operating Systems
|
||||
---------------------------
|
||||
The Salt Bootstrap Script installs the minimum number of packages required to
|
||||
run Salt. This means that in the event you run the bootstrap to install via
|
||||
package, Git will not be installed. Installing the minimum number of packages
|
||||
helps ensure the script stays as lightweight as possible, assuming the user
|
||||
will install any other required packages after the Salt binaries are present
|
||||
on the system.
|
||||
|
||||
The Salt Bootstrap Script is maintained in a separate repo from Salt, complete
|
||||
with its own issues, pull requests, contributing guidelines, release protocol,
|
||||
etc.
|
||||
|
||||
To learn more, please see the Salt Bootstrap repo links:
|
||||
|
||||
- `Salt Bootstrap repo`_
|
||||
- `README`_: includes supported operating systems, example usage, and more.
|
||||
- `Contributing Guidelines`_
|
||||
- `Release Process`_
|
||||
|
||||
.. note::
|
||||
|
||||
In the event you do not see your distribution or version available please
|
||||
review the develop branch on GitHub as it may contain updates that are
|
||||
not present in the stable release:
|
||||
https://github.com/saltstack/salt-bootstrap/tree/develop
|
||||
|
||||
|
||||
Debian and derivatives
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Debian GNU/Linux 7/8
|
||||
- Linux Mint Debian Edition 1 (based on Debian 8)
|
||||
- Kali Linux 1.0 (based on Debian 7)
|
||||
|
||||
|
||||
Red Hat family
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
- Amazon Linux 2012.09/2013.03/2013.09/2014.03/2014.09
|
||||
- CentOS 5/6/7
|
||||
- Fedora 17/18/20/21/22
|
||||
- Oracle Linux 5/6/7
|
||||
- Red Hat Enterprise Linux 5/6/7
|
||||
- Scientific Linux 5/6/7
|
||||
|
||||
|
||||
SUSE family
|
||||
~~~~~~~~~~~
|
||||
|
||||
- openSUSE 12/13
|
||||
- openSUSE Leap 42
|
||||
- openSUSE Tumbleweed 2015
|
||||
- SUSE Linux Enterprise Server 11 SP1/11 SP2/11 SP3/12
|
||||
|
||||
|
||||
Ubuntu and derivatives
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Elementary OS 0.2 (based on Ubuntu 12.04)
|
||||
- Linaro 12.04
|
||||
- Linux Mint 13/14/16/17
|
||||
- Trisquel GNU/Linux 6 (based on Ubuntu 12.04)
|
||||
- Ubuntu 10.x/11.x/12.x/13.x/14.x/15.x/16.x
|
||||
|
||||
|
||||
Other Linux distro
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
- Arch Linux
|
||||
- Gentoo
|
||||
|
||||
|
||||
UNIX systems
|
||||
~~~~~~~~~~~~
|
||||
|
||||
**BSD**:
|
||||
|
||||
- OpenBSD
|
||||
- FreeBSD 9/10/11
|
||||
|
||||
**SunOS**:
|
||||
|
||||
- SmartOS
|
||||
|
||||
|
||||
Example Usage
|
||||
-------------
|
||||
|
||||
If you're looking for the *one-liner* to install Salt, please scroll to the
|
||||
bottom and use the instructions for `Installing via an Insecure One-Liner`_
|
||||
|
||||
.. note::
|
||||
|
||||
In every two-step example, you would be well-served to examine the downloaded file and examine
|
||||
it to ensure that it does what you expect.
|
||||
|
||||
|
||||
The Salt Bootstrap script has a wide variety of options that can be passed as
|
||||
well as several ways of obtaining the bootstrap script itself.
|
||||
|
||||
.. note::
|
||||
|
||||
These examples below show how to bootstrap Salt directly from GitHub or other Git repository.
|
||||
Run the script without any parameters to get latest stable Salt packages for your system from
|
||||
`SaltStack corporate repository`_. See first example in the `Install using wget`_ section.
|
||||
|
||||
.. _`SaltStack corporate repository`: https://repo.saltstack.com/
|
||||
|
||||
|
||||
Install using curl
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Using ``curl`` to install latest development version from GitHub:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -o bootstrap-salt.sh -L https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh git develop
|
||||
|
||||
If you want to install a specific release version (based on the Git tags):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -o bootstrap-salt.sh -L https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh git v2015.8.8
|
||||
|
||||
To install a specific branch from a Git fork:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -o bootstrap-salt.sh -L https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh -g https://github.com/myuser/salt.git git mybranch
|
||||
|
||||
If all you want is to install a ``salt-master`` using latest Git:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -o bootstrap-salt.sh -L https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh -M -N git develop
|
||||
|
||||
If your host has Internet access only via HTTP proxy:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
PROXY='http://user:password@myproxy.example.com:3128'
|
||||
curl -o bootstrap-salt.sh -L -x "$PROXY" https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh -G -H "$PROXY" git
|
||||
|
||||
|
||||
Install using wget
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Using ``wget`` to install your distribution's stable packages:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wget -O bootstrap-salt.sh https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh
|
||||
|
||||
Downloading the script from develop branch:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wget -O bootstrap-salt.sh https://bootstrap.saltstack.com/develop
|
||||
sudo sh bootstrap-salt.sh
|
||||
|
||||
Installing a specific version from git using ``wget``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wget -O bootstrap-salt.sh https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh -P git v2015.8.8
|
||||
|
||||
.. note::
|
||||
|
||||
On the above example we added `-P` which will allow PIP packages to be installed if required but
|
||||
it's not a necessary flag for Git based bootstraps.
|
||||
|
||||
|
||||
Install using Python
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
If you already have Python installed, ``python 2.6``, then it's as easy as:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
python -m urllib "https://bootstrap.saltstack.com" > bootstrap-salt.sh
|
||||
sudo sh bootstrap-salt.sh git develop
|
||||
|
||||
All Python versions should support the following in-line code:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
python -c 'import urllib; print urllib.urlopen("https://bootstrap.saltstack.com").read()' > bootstrap-salt.sh
|
||||
sudo sh bootstrap-salt.sh git develop
|
||||
|
||||
|
||||
Install using fetch
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
On a FreeBSD base system you usually don't have either of the above binaries available. You **do**
|
||||
have ``fetch`` available though:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
fetch -o bootstrap-salt.sh https://bootstrap.saltstack.com
|
||||
sudo sh bootstrap-salt.sh
|
||||
|
||||
If you have any SSL issues install ``ca_root_nssp``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pkg install ca_root_nssp
|
||||
|
||||
And either copy the certificates to the place where fetch can find them:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cp /usr/local/share/certs/ca-root-nss.crt /etc/ssl/cert.pem
|
||||
|
||||
Or link them to the right place:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
ln -s /usr/local/share/certs/ca-root-nss.crt /etc/ssl/cert.pem
|
||||
|
||||
|
||||
Installing via an Insecure One-Liner
|
||||
------------------------------------
|
||||
|
||||
The following examples illustrate how to install Salt via a one-liner.
|
||||
|
||||
.. note::
|
||||
|
||||
Warning! These methods do not involve a verification step and assume that
|
||||
the delivered file is trustworthy.
|
||||
|
||||
|
||||
Any of the example above which use two-lines can be made to run in a single-line
|
||||
configuration with minor modifications.
|
||||
|
||||
For example, using ``curl`` to install your distribution's stable packages:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -L https://bootstrap.saltstack.com | sudo sh
|
||||
|
||||
|
||||
Using ``wget`` to install your distribution's stable packages:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wget -O - https://bootstrap.saltstack.com | sudo sh
|
||||
|
||||
|
||||
Installing the latest develop branch of Salt:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
curl -L https://bootstrap.saltstack.com | sudo sh -s -- git develop
|
||||
|
||||
|
||||
Command Line Options
|
||||
--------------------
|
||||
|
||||
Here's a summary of the command line options:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
$ sh bootstrap-salt.sh -h
|
||||
|
||||
Installation types:
|
||||
- stable Install latest stable release. This is the default
|
||||
install type
|
||||
- stable [branch] Install latest version on a branch. Only supported
|
||||
for packages available at repo.saltstack.com
|
||||
- stable [version] Install a specific version. Only supported for
|
||||
packages available at repo.saltstack.com
|
||||
- daily Ubuntu specific: configure SaltStack Daily PPA
|
||||
- testing RHEL-family specific: configure EPEL testing repo
|
||||
- git Install from the head of the develop branch
|
||||
- git [ref] Install from any git ref (such as a branch, tag, or
|
||||
commit)
|
||||
|
||||
Examples:
|
||||
- bootstrap-salt.sh
|
||||
- bootstrap-salt.sh stable
|
||||
- bootstrap-salt.sh stable 2017.7
|
||||
- bootstrap-salt.sh stable 2017.7.2
|
||||
- bootstrap-salt.sh daily
|
||||
- bootstrap-salt.sh testing
|
||||
- bootstrap-salt.sh git
|
||||
- bootstrap-salt.sh git 2017.7
|
||||
- bootstrap-salt.sh git v2017.7.2
|
||||
- bootstrap-salt.sh git 06f249901a2e2f1ed310d58ea3921a129f214358
|
||||
|
||||
Options:
|
||||
-h Display this message
|
||||
-v Display script version
|
||||
-n No colours
|
||||
-D Show debug output
|
||||
-c Temporary configuration directory
|
||||
-g Salt Git repository URL. Default: https://github.com/saltstack/salt.git
|
||||
-w Install packages from downstream package repository rather than
|
||||
upstream, saltstack package repository. This is currently only
|
||||
implemented for SUSE.
|
||||
-k Temporary directory holding the minion keys which will pre-seed
|
||||
the master.
|
||||
-s Sleep time used when waiting for daemons to start, restart and when
|
||||
checking for the services running. Default: 3
|
||||
-L Also install salt-cloud and required python-libcloud package
|
||||
-M Also install salt-master
|
||||
-S Also install salt-syndic
|
||||
-N Do not install salt-minion
|
||||
-X Do not start daemons after installation
|
||||
-d Disables checking if Salt services are enabled to start on system boot.
|
||||
You can also do this by touching /tmp/disable_salt_checks on the target
|
||||
host. Default: ${BS_FALSE}
|
||||
-P Allow pip based installations. On some distributions the required salt
|
||||
packages or its dependencies are not available as a package for that
|
||||
distribution. Using this flag allows the script to use pip as a last
|
||||
resort method. NOTE: This only works for functions which actually
|
||||
implement pip based installations.
|
||||
-U If set, fully upgrade the system prior to bootstrapping Salt
|
||||
-I If set, allow insecure connections while downloading any files. For
|
||||
example, pass '--no-check-certificate' to 'wget' or '--insecure' to
|
||||
'curl'. On Debian and Ubuntu, using this option with -U allows one to obtain
|
||||
GnuPG archive keys insecurely if distro has changed release signatures.
|
||||
-F Allow copied files to overwrite existing (config, init.d, etc)
|
||||
-K If set, keep the temporary files in the temporary directories specified
|
||||
with -c and -k
|
||||
-C Only run the configuration function. Implies -F (forced overwrite).
|
||||
To overwrite Master or Syndic configs, -M or -S, respectively, must
|
||||
also be specified. Salt installation will be omitted, but some of the
|
||||
dependencies could be installed to write configuration with -j or -J.
|
||||
-A Pass the salt-master DNS name or IP. This will be stored under
|
||||
${BS_SALT_ETC_DIR}/minion.d/99-master-address.conf
|
||||
-i Pass the salt-minion id. This will be stored under
|
||||
${BS_SALT_ETC_DIR}/minion_id
|
||||
-p Extra-package to install while installing Salt dependencies. One package
|
||||
per -p flag. You're responsible for providing the proper package name.
|
||||
-H Use the specified HTTP proxy for all download URLs (including https://).
|
||||
For example: http://myproxy.example.com:3128
|
||||
-Z Enable additional package repository for newer ZeroMQ
|
||||
(only available for RHEL/CentOS/Fedora/Ubuntu based distributions)
|
||||
-b Assume that dependencies are already installed and software sources are
|
||||
set up. If git is selected, git tree is still checked out as dependency
|
||||
step.
|
||||
-f Force shallow cloning for git installations.
|
||||
This may result in an "n/a" in the version number.
|
||||
-l Disable ssl checks. When passed, switches "https" calls to "http" where
|
||||
possible.
|
||||
-V Install Salt into virtualenv
|
||||
(only available for Ubuntu based distributions)
|
||||
-a Pip install all Python pkg dependencies for Salt. Requires -V to install
|
||||
all pip pkgs into the virtualenv.
|
||||
(Only available for Ubuntu based distributions)
|
||||
-r Disable all repository configuration performed by this script. This
|
||||
option assumes all necessary repository configuration is already present
|
||||
on the system.
|
||||
-R Specify a custom repository URL. Assumes the custom repository URL
|
||||
points to a repository that mirrors Salt packages located at
|
||||
repo.saltstack.com. The option passed with -R replaces the
|
||||
"repo.saltstack.com". If -R is passed, -r is also set. Currently only
|
||||
works on CentOS/RHEL and Debian based distributions.
|
||||
-J Replace the Master config file with data passed in as a JSON string. If
|
||||
a Master config file is found, a reasonable effort will be made to save
|
||||
the file with a ".bak" extension. If used in conjunction with -C or -F,
|
||||
no ".bak" file will be created as either of those options will force
|
||||
a complete overwrite of the file.
|
||||
-j Replace the Minion config file with data passed in as a JSON string. If
|
||||
a Minion config file is found, a reasonable effort will be made to save
|
||||
the file with a ".bak" extension. If used in conjunction with -C or -F,
|
||||
no ".bak" file will be created as either of those options will force
|
||||
a complete overwrite of the file.
|
||||
-q Quiet salt installation from git (setup.py install -q)
|
||||
-x Changes the python version used to install a git version of salt. Currently
|
||||
this is considered experimental and has only been tested on Centos 6. This
|
||||
only works for git installations.
|
||||
-y Installs a different python version on host. Currently this has only been
|
||||
tested with Centos 6 and is considered experimental. This will install the
|
||||
ius repo on the box if disable repo is false. This must be used in conjunction
|
||||
with -x <pythonversion>. For example:
|
||||
sh bootstrap.sh -P -y -x python2.7 git v2016.11.3
|
||||
The above will install python27 and install the git version of salt using the
|
||||
python2.7 executable. This only works for git and pip installations.
|
||||
The Salt Bootstrap script can be found in the Salt repo under the
|
||||
``salt/cloud/deploy/bootstrap-salt.sh`` path. Any changes to this file
|
||||
will be overwritten! Bug fixes and feature additions must be submitted
|
||||
via the `Salt Bootstrap repo`_. Please see the Salt Bootstrap Script's
|
||||
`Release Process`_ for more information.
|
||||
|
||||
.. _Salt Bootstrap repo: https://github.com/saltstack/salt-bootstrap
|
||||
.. _README: https://github.com/saltstack/salt-bootstrap#bootstrapping-salt
|
||||
.. _Contributing Guidelines: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md
|
||||
.. _Release Process: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md#release-information
|
||||
|
|
|
@ -428,3 +428,9 @@ class Beacon(object):
|
|||
tag='/salt/minion/minion_beacon_disabled_complete')
|
||||
|
||||
return True
|
||||
|
||||
def reset(self):
|
||||
'''
|
||||
Reset the beacons to defaults
|
||||
'''
|
||||
self.opts['beacons'] = {}
|
||||
|
|
|
@ -93,6 +93,7 @@ def _gather_buffer_space():
|
|||
# Return the higher number between 5% of the system memory and 10MiB
|
||||
return max([total_mem * 0.05, 10 << 20])
|
||||
|
||||
|
||||
# For the time being this will be a fixed calculation
|
||||
# TODO: Allow user configuration
|
||||
_DFLT_IPC_WBUFFER = _gather_buffer_space() * .5
|
||||
|
@ -3488,7 +3489,7 @@ def check_driver_dependencies(driver, dependencies):
|
|||
if value is False:
|
||||
log.warning(
|
||||
"Missing dependency: '%s'. The %s driver requires "
|
||||
"'%s' to be installed.", key, key, driver
|
||||
"'%s' to be installed.", key, driver, key
|
||||
)
|
||||
ret = False
|
||||
|
||||
|
|
|
@ -1365,6 +1365,7 @@ _OS_FAMILY_MAP = {
|
|||
'GCEL': 'Debian',
|
||||
'Linaro': 'Debian',
|
||||
'elementary OS': 'Debian',
|
||||
'elementary': 'Debian',
|
||||
'Univention': 'Debian',
|
||||
'ScientificLinux': 'RedHat',
|
||||
'Raspbian': 'Debian',
|
||||
|
|
|
@ -660,9 +660,10 @@ class Master(SMaster):
|
|||
self.process_manager = salt.utils.process.ProcessManager(wait_for_kill=5)
|
||||
pub_channels = []
|
||||
log.info('Creating master publisher process')
|
||||
log_queue = salt.log.setup.get_multiprocessing_logging_queue()
|
||||
for transport, opts in iter_transport_opts(self.opts):
|
||||
chan = salt.transport.server.PubServerChannel.factory(opts)
|
||||
chan.pre_fork(self.process_manager)
|
||||
chan.pre_fork(self.process_manager, kwargs={'log_queue': log_queue})
|
||||
pub_channels.append(chan)
|
||||
|
||||
log.info('Creating master event publisher process')
|
||||
|
@ -719,7 +720,7 @@ class Master(SMaster):
|
|||
log.info('Creating master request server process')
|
||||
kwargs = {}
|
||||
if salt.utils.platform.is_windows():
|
||||
kwargs['log_queue'] = salt.log.setup.get_multiprocessing_logging_queue()
|
||||
kwargs['log_queue'] = log_queue
|
||||
kwargs['secrets'] = SMaster.secrets
|
||||
|
||||
self.process_manager.add_process(
|
||||
|
|
|
@ -2214,6 +2214,8 @@ class Minion(MinionBase):
|
|||
self.beacons.list_available_beacons()
|
||||
elif func == 'validate_beacon':
|
||||
self.beacons.validate_beacon(name, beacon_data)
|
||||
elif func == 'reset':
|
||||
self.beacons.reset()
|
||||
|
||||
def environ_setenv(self, tag, data):
|
||||
'''
|
||||
|
|
|
@ -134,7 +134,7 @@ def add(name, beacon_data, **kwargs):
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' beacons.add ps "[{'salt-master': 'stopped', 'apache2': 'stopped'}]"
|
||||
salt '*' beacons.add ps "[{'salt-master': 'stopped'}, {'apache2': 'stopped'}]"
|
||||
|
||||
'''
|
||||
ret = {'comment': 'Failed to add beacon {0}.'.format(name),
|
||||
|
@ -207,7 +207,7 @@ def modify(name, beacon_data, **kwargs):
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' beacons.modify ps "[{'salt-master': 'stopped', 'apache2': 'stopped'}]"
|
||||
salt '*' beacons.modify ps "[{'salt-master': 'stopped'}, {'apache2': 'stopped'}]"
|
||||
'''
|
||||
|
||||
ret = {'comment': '',
|
||||
|
@ -571,3 +571,38 @@ def disable_beacon(name, **kwargs):
|
|||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret['comment'] = 'Event module not available. Beacon disable job failed.'
|
||||
return ret
|
||||
|
||||
|
||||
def reset(**kwargs):
|
||||
'''
|
||||
Resest beacon configuration on the minion
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' beacons.reset
|
||||
'''
|
||||
|
||||
ret = {'comment': [],
|
||||
'result': True}
|
||||
|
||||
if 'test' in kwargs and kwargs['test']:
|
||||
ret['comment'] = 'Beacons would be reset.'
|
||||
else:
|
||||
try:
|
||||
eventer = salt.utils.event.get_event('minion', opts=__opts__)
|
||||
res = __salt__['event.fire']({'func': 'reset'}, 'manage_beacons')
|
||||
if res:
|
||||
event_ret = eventer.get_event(tag='/salt/minion/minion_beacon_reset_complete', wait=30)
|
||||
if event_ret and event_ret['complete']:
|
||||
ret['result'] = True
|
||||
ret['comment'] = 'Beacon configuration reset.'
|
||||
else:
|
||||
ret['result'] = False
|
||||
ret['comment'] = event_ret['comment']
|
||||
return ret
|
||||
except KeyError:
|
||||
# Effectively a no-op, since we can't really return without an event system
|
||||
ret['comment'] = 'Event module not available. Beacon disable job failed.'
|
||||
return ret
|
||||
|
|
|
@ -939,6 +939,9 @@ def compare_containers(first, second, ignore=None):
|
|||
if item == 'Ulimits':
|
||||
val1 = _ulimit_sort(val1)
|
||||
val2 = _ulimit_sort(val2)
|
||||
if item == 'Env':
|
||||
val1 = sorted(val1)
|
||||
val2 = sorted(val2)
|
||||
if val1 != val2:
|
||||
ret.setdefault(conf_dict, {})[item] = {'old': val1, 'new': val2}
|
||||
# Check for optionally-present items that were in the second container
|
||||
|
@ -965,6 +968,9 @@ def compare_containers(first, second, ignore=None):
|
|||
if item == 'Ulimits':
|
||||
val1 = _ulimit_sort(val1)
|
||||
val2 = _ulimit_sort(val2)
|
||||
if item == 'Env':
|
||||
val1 = sorted(val1)
|
||||
val2 = sorted(val2)
|
||||
if val1 != val2:
|
||||
ret.setdefault(conf_dict, {})[item] = {'old': val1, 'new': val2}
|
||||
return ret
|
||||
|
@ -5617,6 +5623,7 @@ def pause(name):
|
|||
.format(name))}
|
||||
return _change_state(name, 'pause', 'paused')
|
||||
|
||||
|
||||
freeze = salt.utils.functools.alias_function(pause, 'freeze')
|
||||
|
||||
|
||||
|
@ -5819,6 +5826,7 @@ def unpause(name):
|
|||
.format(name))}
|
||||
return _change_state(name, 'unpause', 'running')
|
||||
|
||||
|
||||
unfreeze = salt.utils.functools.alias_function(unpause, 'unfreeze')
|
||||
|
||||
|
||||
|
|
|
@ -31,6 +31,10 @@ import salt.utils.versions
|
|||
from salt.exceptions import CommandExecutionError, MinionError
|
||||
from salt.ext import six
|
||||
|
||||
# Workaround for 'reload' builtin of py2.7
|
||||
if six.PY3:
|
||||
from importlib import reload # pylint: disable=no-name-in-module
|
||||
|
||||
# Import third party libs
|
||||
HAS_PORTAGE = False
|
||||
try:
|
||||
|
@ -274,6 +278,7 @@ def latest_version(*names, **kwargs):
|
|||
return ret[names[0]]
|
||||
return ret
|
||||
|
||||
|
||||
# available_version is being deprecated
|
||||
available_version = salt.utils.functools.alias_function(latest_version, 'available_version')
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ Module for viewing and modifying sysctl parameters
|
|||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
|
@ -66,6 +67,10 @@ def show(config_file=False):
|
|||
comps = ['']
|
||||
|
||||
if config_file:
|
||||
# If the file doesn't exist, return an empty list
|
||||
if not os.path.exists(config_file):
|
||||
return []
|
||||
|
||||
try:
|
||||
with salt.utils.files.fopen(config_file, 'r') as f:
|
||||
for line in f.readlines():
|
||||
|
|
|
@ -71,6 +71,10 @@ def show(config_file=False):
|
|||
'''
|
||||
ret = {}
|
||||
if config_file:
|
||||
# If the file doesn't exist, return an empty list
|
||||
if not os.path.exists(config_file):
|
||||
return []
|
||||
|
||||
try:
|
||||
with salt.utils.files.fopen(config_file) as fp_:
|
||||
for line in fp_:
|
||||
|
|
|
@ -8,7 +8,6 @@ System module for sleeping, restarting, and shutting down the system on Mac OS X
|
|||
Using this module will enable ``atrun`` on the system if it is disabled.
|
||||
'''
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import os
|
||||
|
||||
# Import python libs
|
||||
try: # python 3
|
||||
|
@ -19,8 +18,6 @@ except ImportError: # python 2
|
|||
import getpass
|
||||
|
||||
# Import salt libs
|
||||
from salt.ext import six
|
||||
import salt.utils.mac_utils
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
|
||||
|
@ -47,35 +44,10 @@ def __virtual__():
|
|||
|
||||
def _atrun_enabled():
|
||||
'''
|
||||
Check to see if atrun is enabled on the system
|
||||
Check to see if atrun is running and enabled on the system
|
||||
'''
|
||||
name = 'com.apple.atrun'
|
||||
services = __utils__['mac_utils.available_services']()
|
||||
label = None
|
||||
|
||||
if name in services:
|
||||
label = services[name]['plist']['Label']
|
||||
else:
|
||||
for service in six.itervalues(services):
|
||||
if service['file_path'].lower() == name:
|
||||
# Match on full path
|
||||
label = service['plist']['Label']
|
||||
break
|
||||
basename, ext = os.path.splitext(service['file_name'])
|
||||
if basename.lower() == name:
|
||||
# Match on basename
|
||||
label = service['plist']['Label']
|
||||
break
|
||||
|
||||
if not label:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Collect information on service: will raise an error if it fails
|
||||
salt.utils.mac_utils.launchctl('list',
|
||||
label,
|
||||
return_stdout=True)
|
||||
return True
|
||||
return __salt__['service.list']('com.apple.atrun')
|
||||
except CommandExecutionError:
|
||||
return False
|
||||
|
||||
|
@ -85,33 +57,11 @@ def _enable_atrun():
|
|||
Enable and start the atrun daemon
|
||||
'''
|
||||
name = 'com.apple.atrun'
|
||||
services = __utils__['mac_utils.available_services']()
|
||||
label = None
|
||||
path = None
|
||||
|
||||
if name in services:
|
||||
label = services[name]['plist']['Label']
|
||||
path = services[name]['file_path']
|
||||
else:
|
||||
for service in six.itervalues(services):
|
||||
if service['file_path'].lower() == name:
|
||||
# Match on full path
|
||||
label = service['plist']['Label']
|
||||
path = service['file_path']
|
||||
break
|
||||
basename, ext = os.path.splitext(service['file_name'])
|
||||
if basename.lower() == name:
|
||||
# Match on basename
|
||||
label = service['plist']['Label']
|
||||
path = service['file_path']
|
||||
break
|
||||
|
||||
if not label:
|
||||
try:
|
||||
__salt__['service.enable'](name)
|
||||
__salt__['service.start'](name)
|
||||
except CommandExecutionError:
|
||||
return False
|
||||
|
||||
salt.utils.mac_utils.launchctl('enable',
|
||||
'system/{0}'.format(label))
|
||||
salt.utils.mac_utils.launchctl('load', path)
|
||||
return _atrun_enabled()
|
||||
|
||||
|
||||
|
@ -259,11 +209,11 @@ def get_remote_login():
|
|||
|
||||
salt '*' system.get_remote_login
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getremotelogin')
|
||||
|
||||
enabled = salt.utils.mac_utils.validate_enabled(
|
||||
salt.utils.mac_utils.parse_return(ret))
|
||||
enabled = __utils__['mac_utils.validate_enabled'](
|
||||
__utils__['mac_utils.parse_return'](ret))
|
||||
|
||||
return enabled == 'on'
|
||||
|
||||
|
@ -285,16 +235,14 @@ def set_remote_login(enable):
|
|||
|
||||
salt '*' system.set_remote_login True
|
||||
'''
|
||||
state = salt.utils.mac_utils.validate_enabled(enable)
|
||||
state = __utils__['mac_utils.validate_enabled'](enable)
|
||||
|
||||
cmd = 'systemsetup -f -setremotelogin {0}'.format(state)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
state,
|
||||
get_remote_login,
|
||||
normalize_ret=True,
|
||||
)
|
||||
return __utils__['mac_utils.confirm_updated'](state,
|
||||
get_remote_login,
|
||||
normalize_ret=True)
|
||||
|
||||
|
||||
def get_remote_events():
|
||||
|
@ -310,11 +258,11 @@ def get_remote_events():
|
|||
|
||||
salt '*' system.get_remote_events
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getremoteappleevents')
|
||||
|
||||
enabled = salt.utils.mac_utils.validate_enabled(
|
||||
salt.utils.mac_utils.parse_return(ret))
|
||||
enabled = __utils__['mac_utils.validate_enabled'](
|
||||
__utils__['mac_utils.parse_return'](ret))
|
||||
|
||||
return enabled == 'on'
|
||||
|
||||
|
@ -337,12 +285,12 @@ def set_remote_events(enable):
|
|||
|
||||
salt '*' system.set_remote_events On
|
||||
'''
|
||||
state = salt.utils.mac_utils.validate_enabled(enable)
|
||||
state = __utils__['mac_utils.validate_enabled'](enable)
|
||||
|
||||
cmd = 'systemsetup -setremoteappleevents {0}'.format(state)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
state,
|
||||
get_remote_events,
|
||||
normalize_ret=True,
|
||||
|
@ -362,10 +310,10 @@ def get_computer_name():
|
|||
|
||||
salt '*' system.get_computer_name
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getcomputername')
|
||||
|
||||
return salt.utils.mac_utils.parse_return(ret)
|
||||
return __utils__['mac_utils.parse_return'](ret)
|
||||
|
||||
|
||||
def set_computer_name(name):
|
||||
|
@ -384,9 +332,9 @@ def set_computer_name(name):
|
|||
salt '*' system.set_computer_name "Mike's Mac"
|
||||
'''
|
||||
cmd = 'systemsetup -setcomputername "{0}"'.format(name)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
name,
|
||||
get_computer_name,
|
||||
)
|
||||
|
@ -405,10 +353,10 @@ def get_subnet_name():
|
|||
|
||||
salt '*' system.get_subnet_name
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getlocalsubnetname')
|
||||
|
||||
return salt.utils.mac_utils.parse_return(ret)
|
||||
return __utils__['mac_utils.parse_return'](ret)
|
||||
|
||||
|
||||
def set_subnet_name(name):
|
||||
|
@ -431,9 +379,9 @@ def set_subnet_name(name):
|
|||
salt '*' system.set_subnet_name "Mike's Mac"
|
||||
'''
|
||||
cmd = 'systemsetup -setlocalsubnetname "{0}"'.format(name)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
name,
|
||||
get_subnet_name,
|
||||
)
|
||||
|
@ -452,10 +400,10 @@ def get_startup_disk():
|
|||
|
||||
salt '*' system.get_startup_disk
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getstartupdisk')
|
||||
|
||||
return salt.utils.mac_utils.parse_return(ret)
|
||||
return __utils__['mac_utils.parse_return'](ret)
|
||||
|
||||
|
||||
def list_startup_disks():
|
||||
|
@ -471,7 +419,7 @@ def list_startup_disks():
|
|||
|
||||
salt '*' system.list_startup_disks
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -liststartupdisks')
|
||||
|
||||
return ret.splitlines()
|
||||
|
@ -501,9 +449,9 @@ def set_startup_disk(path):
|
|||
raise SaltInvocationError(msg)
|
||||
|
||||
cmd = 'systemsetup -setstartupdisk {0}'.format(path)
|
||||
salt.utils.mac_utils.execute_return_result(cmd)
|
||||
__utils__['mac_utils.execute_return_result'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
path,
|
||||
get_startup_disk,
|
||||
)
|
||||
|
@ -524,10 +472,10 @@ def get_restart_delay():
|
|||
|
||||
salt '*' system.get_restart_delay
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getwaitforstartupafterpowerfailure')
|
||||
|
||||
return salt.utils.mac_utils.parse_return(ret)
|
||||
return __utils__['mac_utils.parse_return'](ret)
|
||||
|
||||
|
||||
def set_restart_delay(seconds):
|
||||
|
@ -565,9 +513,9 @@ def set_restart_delay(seconds):
|
|||
raise SaltInvocationError(msg)
|
||||
|
||||
cmd = 'systemsetup -setwaitforstartupafterpowerfailure {0}'.format(seconds)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
seconds,
|
||||
get_restart_delay,
|
||||
)
|
||||
|
@ -587,11 +535,11 @@ def get_disable_keyboard_on_lock():
|
|||
|
||||
salt '*' system.get_disable_keyboard_on_lock
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getdisablekeyboardwhenenclosurelockisengaged')
|
||||
|
||||
enabled = salt.utils.mac_utils.validate_enabled(
|
||||
salt.utils.mac_utils.parse_return(ret))
|
||||
enabled = __utils__['mac_utils.validate_enabled'](
|
||||
__utils__['mac_utils.parse_return'](ret))
|
||||
|
||||
return enabled == 'on'
|
||||
|
||||
|
@ -614,13 +562,13 @@ def set_disable_keyboard_on_lock(enable):
|
|||
|
||||
salt '*' system.set_disable_keyboard_on_lock False
|
||||
'''
|
||||
state = salt.utils.mac_utils.validate_enabled(enable)
|
||||
state = __utils__['mac_utils.validate_enabled'](enable)
|
||||
|
||||
cmd = 'systemsetup -setdisablekeyboardwhenenclosurelockisengaged ' \
|
||||
'{0}'.format(state)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
state,
|
||||
get_disable_keyboard_on_lock,
|
||||
normalize_ret=True,
|
||||
|
@ -640,10 +588,10 @@ def get_boot_arch():
|
|||
|
||||
salt '*' system.get_boot_arch
|
||||
'''
|
||||
ret = salt.utils.mac_utils.execute_return_result(
|
||||
ret = __utils__['mac_utils.execute_return_result'](
|
||||
'systemsetup -getkernelbootarchitecturesetting')
|
||||
|
||||
arch = salt.utils.mac_utils.parse_return(ret)
|
||||
arch = __utils__['mac_utils.parse_return'](ret)
|
||||
|
||||
if 'default' in arch:
|
||||
return 'default'
|
||||
|
@ -688,9 +636,9 @@ def set_boot_arch(arch='default'):
|
|||
raise SaltInvocationError(msg)
|
||||
|
||||
cmd = 'systemsetup -setkernelbootarchitecture {0}'.format(arch)
|
||||
salt.utils.mac_utils.execute_return_success(cmd)
|
||||
__utils__['mac_utils.execute_return_success'](cmd)
|
||||
|
||||
return salt.utils.mac_utils.confirm_updated(
|
||||
return __utils__['mac_utils.confirm_updated'](
|
||||
arch,
|
||||
get_boot_arch,
|
||||
)
|
||||
|
|
|
@ -429,6 +429,103 @@ class _policy_info(object):
|
|||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined',
|
||||
}
|
||||
self.force_key_protection = {
|
||||
0: 'User input is not required when new keys are stored and used',
|
||||
1: 'User is prompted when the key is first used',
|
||||
2: 'User must enter a password each time they use a key',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.krb_encryption_types = {
|
||||
0: 'No minimum',
|
||||
1: 'DES_CBC_CRC',
|
||||
2: 'DES_CBD_MD5',
|
||||
4: 'RC4_MHAC_MD5',
|
||||
8: 'AES128_HMAC_SHA1',
|
||||
16: 'AES256_HMAC_SHA1',
|
||||
2147483616: 'Future Encryption Types',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined',
|
||||
}
|
||||
self.lm_compat_levels = {
|
||||
0: 'Send LM & NTLM response',
|
||||
1: 'Send LM & NTLM - use NTLMv2 session security if negotiated',
|
||||
2: 'Send NTLM response only',
|
||||
3: 'Send NTLMv2 response only',
|
||||
4: 'Send NTLMv2 response only. Refuse LM',
|
||||
5: 'Send NTLMv2 response only. Refuse LM & NTLM',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined',
|
||||
}
|
||||
self.ldap_signing_reqs = {
|
||||
0: 'None',
|
||||
1: 'Negotiate signing',
|
||||
2: 'Require signing',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined',
|
||||
}
|
||||
self.ntlm_session_security_levels = {
|
||||
0: 'No minimum',
|
||||
524288: 'Require NTLMv2 session security',
|
||||
536870912: 'Require 128-bit encryption',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined',
|
||||
}
|
||||
self.ntlm_audit_settings = {
|
||||
0: 'Disable',
|
||||
1: 'Enable auditing for domain accounts',
|
||||
2: 'Enable auditing for all accounts',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.ntlm_domain_audit_settings = {
|
||||
0: 'Disable',
|
||||
1: 'Enable for domain accounts to domain servers',
|
||||
3: 'Enable for domain accounts',
|
||||
5: 'Enable for domain servers',
|
||||
7: 'Enable all',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.incoming_ntlm_settings = {
|
||||
0: 'Allow all',
|
||||
1: 'Deny all domain accounts',
|
||||
2: 'Deny all accounts',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.ntlm_domain_auth_settings = {
|
||||
0: 'Disable',
|
||||
1: 'Deny for domain accounts to domain servers',
|
||||
3: 'Deny for domain accounts',
|
||||
5: 'Deny for domain servers',
|
||||
7: 'Deny all',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.outgoing_ntlm_settings = {
|
||||
0: 'Allow all',
|
||||
1: 'Audit all',
|
||||
2: 'Deny all',
|
||||
None: 'Not Defined',
|
||||
'(value not set)': 'Not Defined'
|
||||
}
|
||||
self.enabled_one_disabled_zero_no_not_defined = {
|
||||
0: 'Disabled',
|
||||
1: 'Enabled',
|
||||
}
|
||||
self.enabled_one_disabled_zero_no_not_defined_transform = {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.enabled_one_disabled_zero_no_not_defined,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.enabled_one_disabled_zero_no_not_defined,
|
||||
'value_lookup': True,
|
||||
},
|
||||
}
|
||||
self.policies = {
|
||||
'Machine': {
|
||||
'lgpo_section': 'Computer Configuration',
|
||||
|
@ -547,12 +644,12 @@ class _policy_info(object):
|
|||
'Policy': 'Network access: Allow anonymous SID/Name '
|
||||
'translation',
|
||||
'lgpo_section': self.password_policy_gpedit_path,
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'Secedit': {
|
||||
'Option': 'LSAAnonymousNameLookup',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'RestrictAnonymousSam': {
|
||||
'Policy': 'Network access: Do not allow anonymous '
|
||||
|
@ -567,6 +664,20 @@ class _policy_info(object):
|
|||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'RestrictRemoteSAM': {
|
||||
'Policy': 'Network access: Restrict clients allowed to '
|
||||
'make remote calls to SAM',
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\Lsa',
|
||||
'Value': 'RestrictRemoteSAM',
|
||||
'Type': 'REG_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_string_put_transform'
|
||||
}
|
||||
},
|
||||
'RestrictAnonymous': {
|
||||
'Policy': 'Network access: Do not allow anonymous '
|
||||
'enumeration of SAM accounts and shares',
|
||||
|
@ -618,6 +729,9 @@ class _policy_info(object):
|
|||
'Value': 'NullSessionPipes',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'RemoteRegistryExactPaths': {
|
||||
'Policy': 'Network access: Remotely accessible '
|
||||
|
@ -631,6 +745,9 @@ class _policy_info(object):
|
|||
'Value': 'Machine',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'RemoteRegistryPaths': {
|
||||
'Policy': 'Network access: Remotely accessible '
|
||||
|
@ -643,6 +760,9 @@ class _policy_info(object):
|
|||
'Value': 'Machine',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'RestrictNullSessAccess': {
|
||||
'Policy': 'Network access: Restrict anonymous access '
|
||||
|
@ -669,6 +789,9 @@ class _policy_info(object):
|
|||
'Value': 'NullSessionShares',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'ForceGuest': {
|
||||
'Policy': 'Network access: Sharing and security model '
|
||||
|
@ -757,32 +880,32 @@ class _policy_info(object):
|
|||
'PasswordComplexity': {
|
||||
'Policy': 'Password must meet complexity requirements',
|
||||
'lgpo_section': self.password_policy_gpedit_path,
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'Secedit': {
|
||||
'Option': 'PasswordComplexity',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'ClearTextPasswords': {
|
||||
'Policy': 'Store passwords using reversible encryption',
|
||||
'lgpo_section': self.password_policy_gpedit_path,
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'Secedit': {
|
||||
'Option': 'ClearTextPassword',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'AdminAccountStatus': {
|
||||
'Policy': 'Accounts: Administrator account status',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Secedit': {
|
||||
'Option': 'EnableAdminAccount',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'NoConnectedUser': {
|
||||
'Policy': 'Accounts: Block Microsoft accounts',
|
||||
|
@ -810,13 +933,13 @@ class _policy_info(object):
|
|||
},
|
||||
'GuestAccountStatus': {
|
||||
'Policy': 'Accounts: Guest account status',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Secedit': {
|
||||
'Option': 'EnableGuestAccount',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'LimitBlankPasswordUse': {
|
||||
'Policy': 'Accounts: Limit local account use of blank '
|
||||
|
@ -1193,6 +1316,9 @@ class _policy_info(object):
|
|||
'Value': 'legalnoticetext',
|
||||
'Type': 'REG_SZ',
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_string_put_transform'
|
||||
}
|
||||
},
|
||||
'legalnoticecaption': {
|
||||
'Policy': 'Interactive logon: Message title for users '
|
||||
|
@ -1205,6 +1331,9 @@ class _policy_info(object):
|
|||
'Value': 'legalnoticecaption',
|
||||
'Type': 'REG_SZ',
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_string_put_transform'
|
||||
}
|
||||
},
|
||||
'DontDisplayLockedUserId': {
|
||||
'Policy': 'Interactive logon: Display user information '
|
||||
|
@ -2306,7 +2435,7 @@ class _policy_info(object):
|
|||
},
|
||||
},
|
||||
'SeTakeOwnershipPrivilege': {
|
||||
'Policy': 'Take ownership of files and other objects',
|
||||
'Policy': 'Take ownership of files or other objects',
|
||||
'lgpo_section': self.user_rights_assignment_gpedit_path,
|
||||
'Settings': None,
|
||||
'LsaRights': {
|
||||
|
@ -2345,6 +2474,481 @@ class _policy_info(object):
|
|||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'ForceKeyProtection': {
|
||||
'Policy': 'System Cryptography: Force strong key protection for '
|
||||
'user keys stored on the computer',
|
||||
'Settings': self.force_key_protection.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'Software\\Policies\\Microsoft\\Cryptography',
|
||||
'Value': 'ForceKeyProtection',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.force_key_protection,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.force_key_protection,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'FIPSAlgorithmPolicy': {
|
||||
'Policy': 'System Cryptography: Use FIPS compliant algorithms '
|
||||
'for encryption, hashing, and signing',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\Lsa\\FIPSAlgorithmPolicy',
|
||||
'Value': 'Enabled',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'MachineAccessRestriction': {
|
||||
'Policy': 'DCOM: Machine Access Restrictions in Security Descriptor '
|
||||
'Definition Language (SDDL) syntax',
|
||||
'Settings': None,
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'Software\\Policies\\Microsoft\\Windows NT\\DCOM',
|
||||
'Value': 'MachineAccessRestriction',
|
||||
'Type': 'REG_SZ',
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_string_put_transform'
|
||||
}
|
||||
},
|
||||
'MachineLaunchRestriction': {
|
||||
'Policy': 'DCOM: Machine Launch Restrictions in Security Descriptor '
|
||||
'Definition Language (SDDL) syntax',
|
||||
'Settings': None,
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'Software\\Policies\\Microsoft\\Windows NT\\DCOM',
|
||||
'Value': 'MachineLaunchRestriction',
|
||||
'Type': 'REG_SZ',
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_string_put_transform'
|
||||
}
|
||||
},
|
||||
'UseMachineId': {
|
||||
'Policy': 'Network security: Allow Local System to use computer '
|
||||
'identity for NTLM',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||
'Value': 'UseMachineId',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'allownullsessionfallback': {
|
||||
'Policy': 'Network security: Allow LocalSystem NULL session fallback',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa\\MSV1_0',
|
||||
'Value': 'allownullsessionfallback',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'AllowOnlineID': {
|
||||
'Policy': 'Network security: Allow PKU2U authentication requests '
|
||||
'to this computer to use online identities.',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa\\pku2u',
|
||||
'Value': 'AllowOnlineID',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'KrbSupportedEncryptionTypes': {
|
||||
'Policy': 'Network security: Configure encryption types allowed '
|
||||
'for Kerberos',
|
||||
'Settings': None,
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\policies'
|
||||
'\\system\\Kerberos\\Parameters',
|
||||
'Value': 'SupportedEncryptionTypes',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup_bitwise_add',
|
||||
'Put': '_dict_lookup_bitwise_add',
|
||||
'GetArgs': {
|
||||
'lookup': self.krb_encryption_types,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.krb_encryption_types,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'NoLMHash': {
|
||||
'Policy': 'Network security: Do not store LAN Manager hash value '
|
||||
'on next password change',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||
'Value': 'NoLMHash',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'ForceLogoffWhenHourExpire': {
|
||||
'Policy': 'Network security: Force logoff when logon hours expire',
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Settings': self.enabled_one_disabled_zero_no_not_defined.keys(),
|
||||
'Secedit': {
|
||||
'Option': 'ForceLogoffWhenHourExpire',
|
||||
'Section': 'System Access',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_no_not_defined_transform,
|
||||
},
|
||||
'LmCompatibilityLevel': {
|
||||
'Policy': 'Network security: LAN Manager authentication level',
|
||||
'Settings': self.lm_compat_levels.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa',
|
||||
'Value': 'LmCompatibilityLevel',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.lm_compat_levels,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.lm_compat_levels,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'LDAPClientIntegrity': {
|
||||
'Policy': 'Network security: LDAP client signing requirements',
|
||||
'Settings': self.ldap_signing_reqs.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Services\\ldap',
|
||||
'Value': 'LDAPClientIntegrity',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.ldap_signing_reqs,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ldap_signing_reqs,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'NTLMMinClientSec': {
|
||||
'Policy': 'Network security: Minimum session security for NTLM SSP based '
|
||||
'(including secure RPC) clients',
|
||||
'Settings': None,
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\Lsa\\MSV1_0',
|
||||
'Value': 'NTLMMinClientSec',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup_bitwise_add',
|
||||
'Put': '_dict_lookup_bitwise_add',
|
||||
'GetArgs': {
|
||||
'lookup': self.ntlm_session_security_levels,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ntlm_session_security_levels,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'NTLMMinServerSec': {
|
||||
'Policy': 'Network security: Minimum session security for NTLM SSP based '
|
||||
'(including secure RPC) servers',
|
||||
'Settings': None,
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\Lsa\\MSV1_0',
|
||||
'Value': 'NTLMMinServerSec',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup_bitwise_add',
|
||||
'Put': '_dict_lookup_bitwise_add',
|
||||
'GetArgs': {
|
||||
'lookup': self.ntlm_session_security_levels,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ntlm_session_security_levels,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'ClientAllowedNTLMServers': {
|
||||
'Policy': 'Network security: Restrict NTLM: Add remote server'
|
||||
' exceptions for NTLM authentication',
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\Lsa\\MSV1_0',
|
||||
'Value': 'ClientAllowedNTLMServers',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'DCAllowedNTLMServers': {
|
||||
'Policy': 'Network security: Restrict NTLM: Add server exceptions'
|
||||
' in this domain',
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Services\\Netlogon\\Parameters',
|
||||
'Value': 'DCAllowedNTLMServers',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'AuditReceivingNTLMTraffic': {
|
||||
'Policy': 'Network security: Restrict NTLM: Audit Incoming NTLM Traffic',
|
||||
'Settings': self.ntlm_audit_settings.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\LSA\\MSV1_0',
|
||||
'Value': 'AuditReceivingNTLMTraffic',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.ntlm_audit_settings,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ntlm_audit_settings,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'AuditNTLMInDomain': {
|
||||
'Policy': 'Network security: Restrict NTLM: Audit NTLM '
|
||||
'authentication in this domain',
|
||||
'Settings': self.ntlm_domain_audit_settings.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters',
|
||||
'Value': 'AuditNTLMInDomain',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.ntlm_domain_audit_settings,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ntlm_domain_audit_settings,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'RestrictReceivingNTLMTraffic': {
|
||||
'Policy': 'Network security: Restrict NTLM: Incoming'
|
||||
' NTLM traffic',
|
||||
'Settings': self.incoming_ntlm_settings.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\LSA\\MSV1_0',
|
||||
'Value': 'RestrictReceivingNTLMTraffic',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.incoming_ntlm_settings,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.incoming_ntlm_settings,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'RestrictNTLMInDomain': {
|
||||
'Policy': 'Network security: Restrict NTLM: NTLM '
|
||||
'authentication in this domain',
|
||||
'Settings': self.ntlm_domain_auth_settings.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Services\\Netlogon\\Parameters',
|
||||
'Value': 'RestrictNTLMInDomain',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.ntlm_domain_auth_settings,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.ntlm_domain_auth_settings,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'RestrictSendingNTLMTraffic': {
|
||||
'Policy': 'Network security: Restrict NTLM: Outgoing NTLM'
|
||||
' traffic to remote servers',
|
||||
'Settings': self.outgoing_ntlm_settings.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SYSTEM\\CurrentControlSet\\Control\\Lsa\\MSV1_0',
|
||||
'Value': 'RestrictSendingNTLMTraffic',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.outgoing_ntlm_settings,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.outgoing_ntlm_settings,
|
||||
'value_lookup': True,
|
||||
},
|
||||
},
|
||||
},
|
||||
'ShutdownWithoutLogon': {
|
||||
'Policy': 'Shutdown: Allow system to be shut down '
|
||||
'without having to log on',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\policies\\system',
|
||||
'Value': 'ShutdownWithoutLogon',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'ClearPageFileAtShutdown': {
|
||||
'Policy': 'Shutdown: Clear virtual memory pagefile',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\'
|
||||
'SESSION MANAGER\\MEMORY MANAGEMENT',
|
||||
'Value': 'ClearPageFileAtShutdown',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'ObCaseInsensitive': {
|
||||
'Policy': 'System objects: Require case insensitivity for '
|
||||
'non-Windows subsystems',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\'
|
||||
'SESSION MANAGER\\Kernel',
|
||||
'Value': 'ObCaseInsensitive',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'ProtectionMode': {
|
||||
'Policy': 'System objects: Strengthen default permissions of '
|
||||
'internal system objects (e.g. Symbolic Links)',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\'
|
||||
'SESSION MANAGER',
|
||||
'Value': 'ProtectionMode',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
'OptionalSubsystems': {
|
||||
'Policy': 'System settings: Optional subsystems',
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'System\\CurrentControlSet\\Control\\'
|
||||
'SESSION MANAGER\\SubSystems',
|
||||
'Value': 'optional',
|
||||
'Type': 'REG_MULTI_SZ'
|
||||
},
|
||||
'Transform': {
|
||||
'Put': '_multi_string_put_transform'
|
||||
}
|
||||
},
|
||||
'AuthenticodeEnabled': {
|
||||
'Policy': 'System settings: Use Certificate Rules on Windows'
|
||||
' Executables for Software Restriction Policies',
|
||||
'Settings': self.enabled_one_disabled_zero.keys(),
|
||||
'lgpo_section': self.security_options_gpedit_path,
|
||||
'Registry': {
|
||||
'Hive': 'HKEY_LOCAL_MACHINE',
|
||||
'Path': 'SOFTWARE\\Policies\\Microsoft\\Windows\\safer\\codeidentifiers',
|
||||
'Value': 'AuthenticodeEnabled',
|
||||
'Type': 'REG_DWORD',
|
||||
},
|
||||
'Transform': self.enabled_one_disabled_zero_transform,
|
||||
},
|
||||
}
|
||||
},
|
||||
'User': {
|
||||
|
@ -2686,6 +3290,76 @@ class _policy_info(object):
|
|||
return v
|
||||
return 'Invalid Value'
|
||||
|
||||
@classmethod
|
||||
def _dict_lookup_bitwise_add(cls, item, **kwargs):
|
||||
'''
|
||||
kwarg value_lookup bool to determine if item_list should be compared to keys
|
||||
or values
|
||||
|
||||
kwarg test_zero is used to determine if 0 should be tested when value_lookup is false
|
||||
lookup should be a dict with integers for keys
|
||||
|
||||
if value_lookup is True, item is expected to be a list
|
||||
the function will return the sum of the keys whose values are in the item list
|
||||
if value_lookup is False, item is expected to be an integer
|
||||
the function will return the values for the keys
|
||||
which successfully "bitwise and" with item
|
||||
'''
|
||||
value_lookup = kwargs.get('value_lookup', False)
|
||||
test_zero = kwargs.get('test_zero', False)
|
||||
ret_val = None
|
||||
if str(item).lower() == 'not defined':
|
||||
return None
|
||||
if value_lookup:
|
||||
if not isinstance(item, list):
|
||||
return 'Invalid Value'
|
||||
ret_val = 0
|
||||
else:
|
||||
if not isinstance(item, six.integer_types):
|
||||
return 'Invalid Value'
|
||||
ret_val = []
|
||||
if 'lookup' in kwargs:
|
||||
for k, v in six.iteritems(kwargs['lookup']):
|
||||
if value_lookup:
|
||||
if six.text_type(v).lower() in [z.lower() for z in item]:
|
||||
ret_val = ret_val + k
|
||||
else:
|
||||
do_test = True
|
||||
if not test_zero:
|
||||
if k == 0:
|
||||
do_test = False
|
||||
if do_test and isinstance(k, int) and item & k == k:
|
||||
ret_val.append(v)
|
||||
else:
|
||||
return 'Invalid Value'
|
||||
return ret_val
|
||||
|
||||
@classmethod
|
||||
def _multi_string_put_transform(cls, item, **kwargs):
|
||||
'''
|
||||
transform for a REG_MULTI_SZ to properly handle "Not Defined"
|
||||
'''
|
||||
if isinstance(item, list):
|
||||
return item
|
||||
elif isinstance(item, six.string_types):
|
||||
if item.lower() == 'not defined':
|
||||
return None
|
||||
else:
|
||||
return item.split(',')
|
||||
else:
|
||||
return 'Invalid Value'
|
||||
|
||||
@classmethod
|
||||
def _string_put_transform(cls, item, **kwargs):
|
||||
'''
|
||||
transfrom for a REG_SZ to properly handle "Not Defined"
|
||||
'''
|
||||
if isinstance(item, six.string_types):
|
||||
if item.lower() == 'not defined':
|
||||
return None
|
||||
else:
|
||||
return item
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
|
@ -5664,10 +6338,15 @@ def set_(computer_policy=None, user_policy=None,
|
|||
_regedits[regedit]['value'],
|
||||
_regedits[regedit]['policy']['Registry']['Type'])
|
||||
else:
|
||||
_ret = __salt__['reg.delete_value'](
|
||||
_ret = __salt__['reg.read_value'](
|
||||
_regedits[regedit]['policy']['Registry']['Hive'],
|
||||
_regedits[regedit]['policy']['Registry']['Path'],
|
||||
_regedits[regedit]['policy']['Registry']['Value'])
|
||||
if _ret['success'] and _ret['vdata'] != '(value not set)':
|
||||
_ret = __salt__['reg.delete_value'](
|
||||
_regedits[regedit]['policy']['Registry']['Hive'],
|
||||
_regedits[regedit]['policy']['Registry']['Path'],
|
||||
_regedits[regedit]['policy']['Registry']['Value'])
|
||||
if not _ret:
|
||||
msg = ('Error while attempting to set policy {0} via the registry.'
|
||||
' Some changes may not be applied as expected')
|
||||
|
|
|
@ -29,19 +29,20 @@ def __virtual__():
|
|||
|
||||
def ext_pillar(minion_id, pillar, *args, **kwargs):
|
||||
'''
|
||||
Node definitions path will be retrieved from args - or set to default -
|
||||
then added to 'salt_data' dict that is passed to the 'get_pillars' function.
|
||||
'salt_data' dict is a convenient way to pass all the required datas to the function
|
||||
It contains:
|
||||
- __opts__
|
||||
- __salt__
|
||||
- __grains__
|
||||
- __pillar__
|
||||
- minion_id
|
||||
- path
|
||||
|
||||
If successfull the function will return a pillar dict for minion_id
|
||||
Compile pillar data
|
||||
'''
|
||||
# Node definitions path will be retrieved from args (or set to default),
|
||||
# then added to 'salt_data' dict that is passed to the 'get_pillars'
|
||||
# function. The dictionary contains:
|
||||
# - __opts__
|
||||
# - __salt__
|
||||
# - __grains__
|
||||
# - __pillar__
|
||||
# - minion_id
|
||||
# - path
|
||||
#
|
||||
# If successful, the function will return a pillar dict for minion_id.
|
||||
|
||||
# If path has not been set, make a default
|
||||
for i in args:
|
||||
if 'path' not in i:
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
r'''
|
||||
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
|
||||
|
||||
Execution of Ansible modules from within states
|
||||
===============================================
|
||||
|
||||
|
|
|
@ -34,6 +34,12 @@ Management of the Salt beacons
|
|||
'''
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from salt.ext import six
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def present(name,
|
||||
save=False,
|
||||
|
@ -54,7 +60,7 @@ def present(name,
|
|||
'comment': []}
|
||||
|
||||
current_beacons = __salt__['beacons.list'](return_yaml=False)
|
||||
beacon_data = [kwargs]
|
||||
beacon_data = [{k: v} for k, v in six.iteritems(kwargs)]
|
||||
|
||||
if name in current_beacons:
|
||||
|
||||
|
|
|
@ -281,8 +281,8 @@ def latest(name,
|
|||
identity=None,
|
||||
https_user=None,
|
||||
https_pass=None,
|
||||
onlyif=False,
|
||||
unless=False,
|
||||
onlyif=None,
|
||||
unless=None,
|
||||
refspec_branch='*',
|
||||
refspec_tag='*',
|
||||
output_encoding=None,
|
||||
|
@ -2197,8 +2197,8 @@ def detached(name,
|
|||
identity=None,
|
||||
https_user=None,
|
||||
https_pass=None,
|
||||
onlyif=False,
|
||||
unless=False,
|
||||
onlyif=None,
|
||||
unless=None,
|
||||
output_encoding=None,
|
||||
**kwargs):
|
||||
'''
|
||||
|
@ -3409,18 +3409,65 @@ def mod_run_check(cmd_kwargs, onlyif, unless):
|
|||
Otherwise, returns ``True``
|
||||
'''
|
||||
cmd_kwargs = copy.deepcopy(cmd_kwargs)
|
||||
cmd_kwargs['python_shell'] = True
|
||||
if onlyif:
|
||||
if __salt__['cmd.retcode'](onlyif, **cmd_kwargs) != 0:
|
||||
cmd_kwargs.update({
|
||||
'use_vt': False,
|
||||
'bg': False,
|
||||
'ignore_retcode': True,
|
||||
'python_shell': True,
|
||||
})
|
||||
|
||||
if onlyif is not None:
|
||||
if not isinstance(onlyif, list):
|
||||
onlyif = [onlyif]
|
||||
|
||||
for command in onlyif:
|
||||
if not isinstance(command, six.string_types) and command:
|
||||
# Boolean or some other non-string which resolves to True
|
||||
continue
|
||||
try:
|
||||
if __salt__['cmd.retcode'](command, **cmd_kwargs) == 0:
|
||||
# Command exited with a zero retcode
|
||||
continue
|
||||
except Exception as exc:
|
||||
log.exception(
|
||||
'The following onlyif command raised an error: %s',
|
||||
command
|
||||
)
|
||||
return {
|
||||
'comment': 'onlyif raised error ({0}), see log for '
|
||||
'more details'.format(exc),
|
||||
'result': False
|
||||
}
|
||||
|
||||
return {'comment': 'onlyif condition is false',
|
||||
'skip_watch': True,
|
||||
'result': True}
|
||||
|
||||
if unless:
|
||||
if __salt__['cmd.retcode'](unless, **cmd_kwargs) == 0:
|
||||
if unless is not None:
|
||||
if not isinstance(unless, list):
|
||||
unless = [unless]
|
||||
|
||||
for command in unless:
|
||||
if not isinstance(command, six.string_types) and not command:
|
||||
# Boolean or some other non-string which resolves to False
|
||||
break
|
||||
try:
|
||||
if __salt__['cmd.retcode'](command, **cmd_kwargs) != 0:
|
||||
# Command exited with a non-zero retcode
|
||||
break
|
||||
except Exception as exc:
|
||||
log.exception(
|
||||
'The following unless command raised an error: %s',
|
||||
command
|
||||
)
|
||||
return {
|
||||
'comment': 'unless raised error ({0}), see log for '
|
||||
'more details'.format(exc),
|
||||
'result': False
|
||||
}
|
||||
else:
|
||||
return {'comment': 'unless condition is true',
|
||||
'skip_watch': True,
|
||||
'result': True}
|
||||
|
||||
# No reason to stop, return True
|
||||
return True
|
||||
|
|
|
@ -515,16 +515,16 @@ def mounted(name,
|
|||
if re.match(regex, _device):
|
||||
_device_mismatch_is_ignored = _device
|
||||
break
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = "An umount would have been forced " \
|
||||
+ "because devices do not match. Watched: " \
|
||||
+ device
|
||||
elif _device_mismatch_is_ignored:
|
||||
if _device_mismatch_is_ignored:
|
||||
ret['result'] = True
|
||||
ret['comment'] = "An umount will not be forced " \
|
||||
+ "because device matched device_name_regex: " \
|
||||
+ _device_mismatch_is_ignored
|
||||
elif __opts__['test']:
|
||||
ret['result'] = None
|
||||
ret['comment'] = "An umount would have been forced " \
|
||||
+ "because devices do not match. Watched: " \
|
||||
+ device
|
||||
else:
|
||||
ret['changes']['umount'] = "Forced unmount because devices " \
|
||||
+ "don't match. Wanted: " + device
|
||||
|
@ -764,7 +764,7 @@ def swap(name, persist=True, config='/etc/fstab'):
|
|||
else:
|
||||
fstab_data = __salt__['mount.fstab'](config)
|
||||
if __opts__['test']:
|
||||
if name not in fstab_data:
|
||||
if name not in fstab_data and name not in [fstab_data[item]['device'] for item in fstab_data]:
|
||||
ret['result'] = None
|
||||
if name in on_:
|
||||
ret['comment'] = ('Swap {0} is set to be added to the '
|
||||
|
|
|
@ -1720,7 +1720,7 @@ def installed(
|
|||
try:
|
||||
action = 'pkg.hold' if kwargs['hold'] else 'pkg.unhold'
|
||||
hold_ret = __salt__[action](
|
||||
name=name, pkgs=desired, sources=sources
|
||||
name=name, pkgs=desired
|
||||
)
|
||||
except (CommandExecutionError, SaltInvocationError) as exc:
|
||||
comment.append(six.text_type(exc))
|
||||
|
|
|
@ -62,7 +62,7 @@ def present(name, value, config=None):
|
|||
if __opts__['test']:
|
||||
current = __salt__['sysctl.show']()
|
||||
configured = __salt__['sysctl.show'](config_file=config)
|
||||
if not configured:
|
||||
if configured is None:
|
||||
ret['result'] = None
|
||||
ret['comment'] = (
|
||||
'Sysctl option {0} might be changed, we failed to check '
|
||||
|
|
|
@ -108,7 +108,7 @@ Saltclass Examples
|
|||
|
||||
``<saltclass_path>/nodes/lausanne/qls.node1.yml``
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: jinja
|
||||
|
||||
environment: base
|
||||
|
||||
|
@ -228,19 +228,20 @@ def __virtual__():
|
|||
|
||||
def top(**kwargs):
|
||||
'''
|
||||
Node definitions path will be retrieved from __opts__ - or set to default -
|
||||
then added to 'salt_data' dict that is passed to the 'get_tops' function.
|
||||
'salt_data' dict is a convenient way to pass all the required datas to the function
|
||||
It contains:
|
||||
- __opts__
|
||||
- empty __salt__
|
||||
- __grains__
|
||||
- empty __pillar__
|
||||
- minion_id
|
||||
- path
|
||||
|
||||
If successfull the function will return a top dict for minion_id
|
||||
Compile tops
|
||||
'''
|
||||
# Node definitions path will be retrieved from args (or set to default),
|
||||
# then added to 'salt_data' dict that is passed to the 'get_pillars'
|
||||
# function. The dictionary contains:
|
||||
# - __opts__
|
||||
# - __salt__
|
||||
# - __grains__
|
||||
# - __pillar__
|
||||
# - minion_id
|
||||
# - path
|
||||
#
|
||||
# If successful, the function will return a pillar dict for minion_id.
|
||||
|
||||
# If path has not been set, make a default
|
||||
_opts = __opts__['master_tops']['saltclass']
|
||||
if 'path' not in _opts:
|
||||
|
|
|
@ -92,7 +92,7 @@ class PubServerChannel(object):
|
|||
raise Exception('Channels are only defined for ZeroMQ and raet')
|
||||
# return NewKindOfChannel(opts, **kwargs)
|
||||
|
||||
def pre_fork(self, process_manager):
|
||||
def pre_fork(self, process_manager, kwargs=None):
|
||||
'''
|
||||
Do anything necessary pre-fork. Since this is on the master side this will
|
||||
primarily be used to create IPC channels and create our daemon process to
|
||||
|
|
|
@ -1409,18 +1409,12 @@ class TCPPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
except (KeyboardInterrupt, SystemExit):
|
||||
salt.log.setup.shutdown_multiprocessing_logging()
|
||||
|
||||
def pre_fork(self, process_manager):
|
||||
def pre_fork(self, process_manager, kwargs=None):
|
||||
'''
|
||||
Do anything necessary pre-fork. Since this is on the master side this will
|
||||
primarily be used to create IPC channels and create our daemon process to
|
||||
do the actual publishing
|
||||
'''
|
||||
kwargs = {}
|
||||
if salt.utils.platform.is_windows():
|
||||
kwargs['log_queue'] = (
|
||||
salt.log.setup.get_multiprocessing_logging_queue()
|
||||
)
|
||||
|
||||
process_manager.add_process(self._publish_daemon, kwargs=kwargs)
|
||||
|
||||
def publish(self, load):
|
||||
|
|
|
@ -18,6 +18,7 @@ from random import randint
|
|||
# Import Salt Libs
|
||||
import salt.auth
|
||||
import salt.crypt
|
||||
import salt.log.setup
|
||||
import salt.utils.event
|
||||
import salt.utils.files
|
||||
import salt.utils.minions
|
||||
|
@ -767,11 +768,15 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
def connect(self):
|
||||
return tornado.gen.sleep(5)
|
||||
|
||||
def _publish_daemon(self):
|
||||
def _publish_daemon(self, log_queue=None):
|
||||
'''
|
||||
Bind to the interface specified in the configuration file
|
||||
'''
|
||||
salt.utils.process.appendproctitle(self.__class__.__name__)
|
||||
if log_queue:
|
||||
salt.log.setup.set_multiprocessing_logging_queue(log_queue)
|
||||
salt.log.setup.setup_multiprocessing_logging(log_queue)
|
||||
|
||||
# Set up the context
|
||||
context = zmq.Context(1)
|
||||
# Prepare minion publish socket
|
||||
|
@ -818,8 +823,10 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
# Catch and handle EINTR from when this process is sent
|
||||
# SIGUSR1 gracefully so we don't choke and die horribly
|
||||
try:
|
||||
log.trace('Getting data from puller %s', pull_uri)
|
||||
log.debug('Publish daemon getting data from puller %s', pull_uri)
|
||||
package = pull_sock.recv()
|
||||
log.debug('Publish daemon received payload. size=%d', len(package))
|
||||
|
||||
unpacked_package = salt.payload.unpackage(package)
|
||||
if six.PY3:
|
||||
unpacked_package = salt.transport.frame.decode_embedded_strs(unpacked_package)
|
||||
|
@ -863,7 +870,7 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
if context.closed is False:
|
||||
context.term()
|
||||
|
||||
def pre_fork(self, process_manager):
|
||||
def pre_fork(self, process_manager, kwargs=None):
|
||||
'''
|
||||
Do anything necessary pre-fork. Since this is on the master side this will
|
||||
primarily be used to create IPC channels and create our daemon process to
|
||||
|
@ -871,7 +878,7 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
|
||||
:param func process_manager: A ProcessManager, from salt.utils.process.ProcessManager
|
||||
'''
|
||||
process_manager.add_process(self._publish_daemon)
|
||||
process_manager.add_process(self._publish_daemon, kwargs=kwargs)
|
||||
|
||||
def publish(self, load):
|
||||
'''
|
||||
|
@ -916,8 +923,14 @@ class ZeroMQPubServerChannel(salt.transport.server.PubServerChannel):
|
|||
log.debug("Publish Side Match: %s", match_ids)
|
||||
# Send list of miions thru so zmq can target them
|
||||
int_payload['topic_lst'] = match_ids
|
||||
payload = self.serial.dumps(int_payload)
|
||||
log.debug(
|
||||
'Sending payload to publish daemon. jid=%s size=%d',
|
||||
load.get('jid', None), len(payload),
|
||||
)
|
||||
pub_sock.send(payload)
|
||||
log.debug('Sent payload to publish daemon.')
|
||||
|
||||
pub_sock.send(self.serial.dumps(int_payload))
|
||||
pub_sock.close()
|
||||
context.term()
|
||||
|
||||
|
|
|
@ -828,6 +828,9 @@ def wait_for_winrm(host, port, username, password, timeout=900, use_ssl=True, ve
|
|||
'''
|
||||
Wait until WinRM connection can be established.
|
||||
'''
|
||||
# Ensure the winrm service is listening before attempting to connect
|
||||
wait_for_port(host=host, port=port, timeout=timeout)
|
||||
|
||||
start = time.time()
|
||||
log.debug(
|
||||
'Attempting WinRM connection to host %s on port %s',
|
||||
|
|
|
@ -6,6 +6,22 @@ import sys
|
|||
import salt.payload
|
||||
|
||||
|
||||
def _trim_dict_in_dict(data, max_val_size, replace_with):
|
||||
'''
|
||||
Takes a dictionary, max_val_size and replace_with
|
||||
and recursively loops through and replaces any values
|
||||
that are greater than max_val_size.
|
||||
'''
|
||||
for key in data:
|
||||
if isinstance(data[key], dict):
|
||||
_trim_dict_in_dict(data[key],
|
||||
max_val_size,
|
||||
replace_with)
|
||||
else:
|
||||
if sys.getsizeof(data[key]) > max_val_size:
|
||||
data[key] = replace_with
|
||||
|
||||
|
||||
def trim_dict(
|
||||
data,
|
||||
max_dict_bytes,
|
||||
|
@ -63,8 +79,13 @@ def trim_dict(
|
|||
max_val_size = float(max_dict_bytes * (percent / 100))
|
||||
try:
|
||||
for key in data:
|
||||
if sys.getsizeof(data[key]) > max_val_size:
|
||||
data[key] = replace_with
|
||||
if isinstance(data[key], dict):
|
||||
_trim_dict_in_dict(data[key],
|
||||
max_val_size,
|
||||
replace_with)
|
||||
else:
|
||||
if sys.getsizeof(data[key]) > max_val_size:
|
||||
data[key] = replace_with
|
||||
percent = percent - stepper_size
|
||||
max_val_size = float(max_dict_bytes * (percent / 100))
|
||||
if use_bin_type:
|
||||
|
|
|
@ -15,8 +15,7 @@ except ImportError:
|
|||
# Import 3rd-party libs
|
||||
import copy
|
||||
import logging
|
||||
from salt.ext import six
|
||||
from salt.serializers.yamlex import merge_recursive as _yamlex_merge_recursive
|
||||
import salt.ext.six as six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -94,6 +93,7 @@ def merge_recurse(obj_a, obj_b, merge_lists=False):
|
|||
|
||||
|
||||
def merge_aggregate(obj_a, obj_b):
|
||||
from salt.serializers.yamlex import merge_recursive as _yamlex_merge_recursive
|
||||
return _yamlex_merge_recursive(obj_a, obj_b, level=1)
|
||||
|
||||
|
||||
|
|
|
@ -2980,7 +2980,7 @@ class GitPillar(GitBase):
|
|||
if repo.env:
|
||||
env = repo.env
|
||||
else:
|
||||
env = 'base' if repo.branch == repo.base else repo.branch
|
||||
env = 'base' if repo.branch == repo.base else repo.get_checkout_target()
|
||||
if repo._mountpoint:
|
||||
if self.link_mountpoint(repo):
|
||||
self.pillar_dirs[repo.linkdir] = env
|
||||
|
|
|
@ -103,10 +103,12 @@ def store_job(opts, load, event=None, mminion=None):
|
|||
log.error(emsg)
|
||||
raise KeyError(emsg)
|
||||
|
||||
try:
|
||||
mminion.returners[savefstr](load['jid'], load)
|
||||
except KeyError as e:
|
||||
log.error("Load does not contain 'jid': %s", e)
|
||||
if job_cache != 'local_cache':
|
||||
try:
|
||||
mminion.returners[savefstr](load['jid'], load)
|
||||
except KeyError as e:
|
||||
log.error("Load does not contain 'jid': %s", e)
|
||||
|
||||
mminion.returners[fstr](load)
|
||||
|
||||
if (opts.get('job_cache_store_endtime')
|
||||
|
|
|
@ -11,6 +11,7 @@ import subprocess
|
|||
import os
|
||||
import plistlib
|
||||
import time
|
||||
import xml.parsers.expat
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.cmdmod
|
||||
|
@ -40,6 +41,11 @@ __salt__ = {
|
|||
'cmd.run': salt.modules.cmdmod._run_quiet,
|
||||
}
|
||||
|
||||
if six.PY2:
|
||||
class InvalidFileException(Exception):
|
||||
pass
|
||||
plistlib.InvalidFileException = InvalidFileException
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
|
@ -301,6 +307,12 @@ def launchctl(sub_cmd, *args, **kwargs):
|
|||
def _available_services(refresh=False):
|
||||
'''
|
||||
This is a helper function for getting the available macOS services.
|
||||
|
||||
The strategy is to look through the known system locations for
|
||||
launchd plist files, parse them, and use their information for
|
||||
populating the list of services. Services can run without a plist
|
||||
file present, but normally services which have an automated startup
|
||||
will have a plist file, so this is a minor compromise.
|
||||
'''
|
||||
try:
|
||||
if __context__['available_services'] and not refresh:
|
||||
|
@ -316,6 +328,15 @@ def _available_services(refresh=False):
|
|||
'/System/Library/LaunchAgents',
|
||||
'/System/Library/LaunchDaemons',
|
||||
]
|
||||
|
||||
try:
|
||||
for user in os.listdir('/Users/'):
|
||||
agent_path = '/Users/{}/Library/LaunchAgents'.format(user)
|
||||
if os.path.isdir(agent_path):
|
||||
launchd_paths.append(agent_path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
_available_services = dict()
|
||||
for launch_dir in launchd_paths:
|
||||
for root, dirs, files in salt.utils.path.os_walk(launch_dir):
|
||||
|
@ -328,39 +349,59 @@ def _available_services(refresh=False):
|
|||
# Follow symbolic links of files in _launchd_paths
|
||||
file_path = os.path.join(root, file_name)
|
||||
true_path = os.path.realpath(file_path)
|
||||
|
||||
log.trace('Gathering service info for %s', true_path)
|
||||
# ignore broken symlinks
|
||||
if not os.path.exists(true_path):
|
||||
continue
|
||||
|
||||
try:
|
||||
# This assumes most of the plist files
|
||||
# will be already in XML format
|
||||
plist = plistlib.readPlist(true_path)
|
||||
if six.PY2:
|
||||
# py2 plistlib can't read binary plists, and
|
||||
# uses a different API than py3.
|
||||
plist = plistlib.readPlist(true_path)
|
||||
else:
|
||||
with salt.utils.files.fopen(true_path, 'rb') as handle:
|
||||
plist = plistlib.load(handle)
|
||||
|
||||
except Exception:
|
||||
# If plistlib is unable to read the file we'll need to use
|
||||
# the system provided plutil program to do the conversion
|
||||
except plistlib.InvalidFileException:
|
||||
# Raised in python3 if the file is not XML.
|
||||
# There's nothing we can do; move on to the next one.
|
||||
msg = 'Unable to parse "%s" as it is invalid XML: InvalidFileException.'
|
||||
logging.warning(msg, true_path)
|
||||
continue
|
||||
|
||||
except xml.parsers.expat.ExpatError:
|
||||
# Raised by py2 for all errors.
|
||||
# Raised by py3 if the file is XML, but with errors.
|
||||
if six.PY3:
|
||||
# There's an error in the XML, so move on.
|
||||
msg = 'Unable to parse "%s" as it is invalid XML: xml.parsers.expat.ExpatError.'
|
||||
logging.warning(msg, true_path)
|
||||
continue
|
||||
|
||||
# Use the system provided plutil program to attempt
|
||||
# conversion from binary.
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'.format(
|
||||
true_path)
|
||||
plist_xml = __salt__['cmd.run'](cmd)
|
||||
if six.PY2:
|
||||
try:
|
||||
plist_xml = __salt__['cmd.run'](cmd)
|
||||
plist = plistlib.readPlistFromString(plist_xml)
|
||||
else:
|
||||
plist = plistlib.loads(
|
||||
salt.utils.stringutils.to_bytes(plist_xml))
|
||||
except xml.parsers.expat.ExpatError:
|
||||
# There's still an error in the XML, so move on.
|
||||
msg = 'Unable to parse "%s" as it is invalid XML: xml.parsers.expat.ExpatError.'
|
||||
logging.warning(msg, true_path)
|
||||
continue
|
||||
|
||||
try:
|
||||
_available_services[plist.Label.lower()] = {
|
||||
'file_name': file_name,
|
||||
'file_path': true_path,
|
||||
'plist': plist}
|
||||
except AttributeError:
|
||||
# Handle malformed plist files
|
||||
_available_services[os.path.basename(file_name).lower()] = {
|
||||
# not all launchd plists contain a Label key
|
||||
_available_services[plist['Label'].lower()] = {
|
||||
'file_name': file_name,
|
||||
'file_path': true_path,
|
||||
'plist': plist}
|
||||
except KeyError:
|
||||
log.debug('Service %s does not contain a'
|
||||
' Label key. Skipping.', true_path)
|
||||
continue
|
||||
|
||||
# put this in __context__ as this is a time consuming function.
|
||||
# a fix for this issue. https://github.com/saltstack/salt/issues/48414
|
||||
|
|
|
@ -40,8 +40,8 @@ class TimedProc(object):
|
|||
|
||||
if self.timeout and not isinstance(self.timeout, (int, float)):
|
||||
raise salt.exceptions.TimedProcTimeoutError('Error: timeout {0} must be a number'.format(self.timeout))
|
||||
if six.PY2 and kwargs.get('shell', False):
|
||||
args = salt.utils.stringutils.to_bytes(args)
|
||||
if kwargs.get('shell', False):
|
||||
args = salt.utils.data.decode(args, to_str=True)
|
||||
|
||||
try:
|
||||
self.process = subprocess.Popen(args, **kwargs)
|
||||
|
|
|
@ -16,6 +16,11 @@ import time
|
|||
# Import salt libs
|
||||
import salt.utils.data
|
||||
from salt.utils.timeout import wait_for
|
||||
import salt.ext.six as six
|
||||
|
||||
# Workaround for 'reload' builtin of py2.7
|
||||
if six.PY3:
|
||||
from importlib import reload # pylint: disable=no-name-in-module
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -135,13 +140,6 @@ def vb_get_manager():
|
|||
'''
|
||||
global _virtualboxManager
|
||||
if _virtualboxManager is None and HAS_LIBS:
|
||||
try:
|
||||
from importlib import reload
|
||||
except ImportError:
|
||||
# If we get here, we are in py2 and reload is a built-in.
|
||||
pass
|
||||
|
||||
# Reloading the API extends sys.paths for subprocesses of multiprocessing, since they seem to share contexts
|
||||
reload(vboxapi)
|
||||
_virtualboxManager = vboxapi.VirtualBoxManager(None, None)
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ except ImportError:
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.crypt
|
||||
import salt.utils.data
|
||||
import salt.utils.stringutils
|
||||
from salt.ext.six import string_types
|
||||
from salt.log.setup import LOG_LEVELS
|
||||
|
@ -685,7 +686,7 @@ class Terminal(object):
|
|||
stdout = None
|
||||
else:
|
||||
if self.stream_stdout:
|
||||
self.stream_stdout.write(stdout)
|
||||
self.stream_stdout.write(salt.utils.data.encode(stdout))
|
||||
self.stream_stdout.flush()
|
||||
|
||||
if self.stdout_logger:
|
||||
|
|
|
@ -271,7 +271,7 @@ def make_inheritable(token):
|
|||
win32con.DUPLICATE_SAME_ACCESS)
|
||||
|
||||
|
||||
def runas_system(cmd, username, password):
|
||||
def runas_system(cmd, username, password, cwd=None):
|
||||
# This only works as system, when salt is running as a service for example
|
||||
|
||||
# Check for a domain
|
||||
|
@ -310,8 +310,8 @@ def runas_system(cmd, username, password):
|
|||
|
||||
except win32security.error as exc:
|
||||
# User doesn't have admin, use existing token
|
||||
if exc[0] == winerror.ERROR_NO_SUCH_LOGON_SESSION \
|
||||
or exc[0] == winerror.ERROR_PRIVILEGE_NOT_HELD:
|
||||
if exc.winerror == winerror.ERROR_NO_SUCH_LOGON_SESSION \
|
||||
or exc.winerror == winerror.ERROR_PRIVILEGE_NOT_HELD:
|
||||
elevated_token = token
|
||||
else:
|
||||
raise
|
||||
|
@ -352,7 +352,7 @@ def runas_system(cmd, username, password):
|
|||
1,
|
||||
0,
|
||||
user_environment,
|
||||
None,
|
||||
cwd,
|
||||
startup_info)
|
||||
|
||||
hProcess, hThread, PId, TId = \
|
||||
|
@ -397,7 +397,7 @@ def runas(cmd, username, password, cwd=None):
|
|||
# This only works when not running under the system account
|
||||
# Debug mode for example
|
||||
if salt.utils.win_functions.get_current_user() == 'SYSTEM':
|
||||
return runas_system(cmd, username, password)
|
||||
return runas_system(cmd, username, password, cwd)
|
||||
|
||||
# Create a pipe to set as stdout in the child. The write handle needs to be
|
||||
# inheritable.
|
||||
|
|
|
@ -8,18 +8,28 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
# Import Salt Testing Libs
|
||||
from tests.support.case import ShellCase
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
|
||||
|
||||
class BatchTest(ShellCase):
|
||||
'''
|
||||
Integration tests for the salt.cli.batch module
|
||||
'''
|
||||
if salt.utils.platform.is_windows():
|
||||
run_timeout = 90
|
||||
else:
|
||||
run_timeout = 30
|
||||
|
||||
def test_batch_run(self):
|
||||
'''
|
||||
Tests executing a simple batch command to help catch regressions
|
||||
'''
|
||||
ret = 'Executing run on [{0}]'.format(repr('sub_minion'))
|
||||
cmd = self.run_salt('"*minion" test.echo "batch testing" -b 50%')
|
||||
cmd = self.run_salt(
|
||||
'"*minion" test.echo "batch testing" -b 50%',
|
||||
timeout=self.run_timeout,
|
||||
)
|
||||
self.assertIn(ret, cmd)
|
||||
|
||||
def test_batch_run_number(self):
|
||||
|
@ -28,7 +38,10 @@ class BatchTest(ShellCase):
|
|||
a percentage with full batch CLI call.
|
||||
'''
|
||||
ret = "Executing run on [{0}, {1}]".format(repr('minion'), repr('sub_minion'))
|
||||
cmd = self.run_salt('"*minion" test.ping --batch-size 2')
|
||||
cmd = self.run_salt(
|
||||
'"*minion" test.ping --batch-size 2',
|
||||
timeout=self.run_timeout,
|
||||
)
|
||||
self.assertIn(ret, cmd)
|
||||
|
||||
def test_batch_run_grains_targeting(self):
|
||||
|
@ -45,7 +58,10 @@ class BatchTest(ShellCase):
|
|||
os_grain = item
|
||||
|
||||
os_grain = os_grain.strip()
|
||||
cmd = self.run_salt('-C "G@os:{0} and not localhost" -b 25% test.ping'.format(os_grain))
|
||||
cmd = self.run_salt(
|
||||
'-C "G@os:{0} and not localhost" -b 25% test.ping'.format(os_grain),
|
||||
timeout=self.run_timeout,
|
||||
)
|
||||
self.assertIn(sub_min_ret, cmd)
|
||||
self.assertIn(min_ret, cmd)
|
||||
|
||||
|
@ -53,5 +69,9 @@ class BatchTest(ShellCase):
|
|||
'''
|
||||
Test that a failed state returns a non-zero exit code in batch mode
|
||||
'''
|
||||
cmd = self.run_salt(' "*minion" state.single test.fail_without_changes name=test_me -b 33%', with_retcode=True)
|
||||
cmd = self.run_salt(
|
||||
' "*" state.single test.fail_without_changes name=test_me -b 25%',
|
||||
with_retcode=True,
|
||||
timeout=self.run_timeout,
|
||||
)
|
||||
self.assertEqual(cmd[-1], 2)
|
||||
|
|
|
@ -15,6 +15,7 @@ from tests.support.helpers import expensiveTest, generate_random_name
|
|||
# Import Salt Libs
|
||||
from salt.config import cloud_providers_config
|
||||
|
||||
|
||||
# Create the cloud instance name to be used throughout the tests
|
||||
INSTANCE_NAME = generate_random_name('CLOUD-TEST-')
|
||||
PROVIDER_NAME = 'digitalocean'
|
||||
|
@ -80,7 +81,7 @@ class DigitalOceanTest(ShellCase):
|
|||
'''
|
||||
_list_locations = self.run_cloud('--list-locations {0}'.format(PROVIDER_NAME))
|
||||
self.assertIn(
|
||||
'San Francisco 1',
|
||||
'San Francisco 2',
|
||||
[i.strip() for i in _list_locations]
|
||||
)
|
||||
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
ec2-test:
|
||||
provider: ec2-config
|
||||
image: ami-98aa1cf0
|
||||
size: m1.large
|
||||
sh_username: ec2-user
|
||||
image: ami-3ecc8f46
|
||||
size: c5.large
|
||||
sh_username: centos
|
||||
script_args: '-P'
|
||||
ec2-win2012r2-test:
|
||||
provider: ec2-config
|
||||
size: m1.large
|
||||
image: ami-eb1ecd96
|
||||
size: c5.large
|
||||
image: ami-02e27664434db6def
|
||||
smb_port: 445
|
||||
win_installer: ''
|
||||
win_username: Administrator
|
||||
|
@ -19,8 +19,8 @@ ec2-win2012r2-test:
|
|||
deploy: True
|
||||
ec2-win2016-test:
|
||||
provider: ec2-config
|
||||
size: m1.large
|
||||
image: ami-ed14c790
|
||||
size: c5.large
|
||||
image: ami-017bf00eb0d4c7182
|
||||
smb_port: 445
|
||||
win_installer: ''
|
||||
win_username: Administrator
|
||||
|
|
|
@ -5,6 +5,10 @@ Tests for various minion timeouts
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import sys
|
||||
|
||||
import salt.utils.platform
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ShellCase
|
||||
|
@ -21,8 +25,18 @@ class MinionTimeoutTestCase(ShellCase):
|
|||
'''
|
||||
# Launch the command
|
||||
sleep_length = 30
|
||||
ret = self.run_salt('minion test.sleep {0}'.format(sleep_length), timeout=45)
|
||||
self.assertTrue(isinstance(ret, list), 'Return is not a list. Minion'
|
||||
if salt.utils.platform.is_windows():
|
||||
popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))}
|
||||
else:
|
||||
popen_kwargs = None
|
||||
ret = self.run_salt(
|
||||
'minion test.sleep {0}'.format(sleep_length),
|
||||
timeout=45,
|
||||
catch_stderr=True,
|
||||
popen_kwargs=popen_kwargs,
|
||||
)
|
||||
self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion'
|
||||
' may have returned error: {0}'.format(ret))
|
||||
self.assertTrue('True' in ret[1], 'Minion did not return True after '
|
||||
'{0} seconds.'.format(sleep_length))
|
||||
self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret))
|
||||
self.assertTrue('True' in ret[0][1], 'Minion did not return True after '
|
||||
'{0} seconds. ret={1}'.format(sleep_length, ret))
|
||||
|
|
|
@ -31,6 +31,9 @@ class BeaconsAddDeleteTest(ModuleCase):
|
|||
if os.path.isfile(self.beacons_config_file_path):
|
||||
os.unlink(self.beacons_config_file_path)
|
||||
|
||||
# Reset beacons
|
||||
self.run_function('beacons.reset')
|
||||
|
||||
def test_add_and_delete(self):
|
||||
'''
|
||||
Test adding and deleting a beacon
|
||||
|
@ -81,6 +84,9 @@ class BeaconsTest(ModuleCase):
|
|||
self.run_function('beacons.delete', ['ps'])
|
||||
self.run_function('beacons.save')
|
||||
|
||||
# Reset beacons
|
||||
self.run_function('beacons.reset')
|
||||
|
||||
def test_disable(self):
|
||||
'''
|
||||
Test disabling beacons
|
||||
|
|
|
@ -367,6 +367,12 @@ class WinSystemModuleTest(ModuleCase):
|
|||
'''
|
||||
Validate the date/time functions in the win_system module
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if subprocess.call('w32tm /resync', shell=True) != 0:
|
||||
log.error("Re-syncing time failed")
|
||||
|
||||
def test_get_computer_name(self):
|
||||
'''
|
||||
Test getting the computer name
|
||||
|
@ -400,6 +406,7 @@ class WinSystemModuleTest(ModuleCase):
|
|||
|
||||
@flaky
|
||||
@destructiveTest
|
||||
@flaky
|
||||
def test_set_system_time(self):
|
||||
'''
|
||||
Test setting the system time
|
||||
|
|
|
@ -358,6 +358,38 @@ class GitPythonMixin(object):
|
|||
"available on the salt master"]}
|
||||
)
|
||||
|
||||
def test_includes_enabled_solves___env___with_mountpoint(self):
|
||||
'''
|
||||
Test with git_pillar_includes enabled and using "__env__" as the branch
|
||||
name for the configured repositories.
|
||||
The "gitinfo" repository contains top.sls file with a local reference
|
||||
and also referencing external "nowhere.foo" which is provided by "webinfo"
|
||||
repository mounted as "nowhere".
|
||||
'''
|
||||
ret = self.get_pillar('''\
|
||||
file_ignore_regex: []
|
||||
file_ignore_glob: []
|
||||
git_pillar_provider: gitpython
|
||||
cachedir: {cachedir}
|
||||
extension_modules: {extmods}
|
||||
ext_pillar:
|
||||
- git:
|
||||
- __env__ {url_extra_repo}:
|
||||
- name: gitinfo
|
||||
- __env__ {url}:
|
||||
- name: webinfo
|
||||
- mountpoint: nowhere
|
||||
''')
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{'branch': 'master',
|
||||
'motd': 'The force will be with you. Always.',
|
||||
'mylist': ['master'],
|
||||
'mydict': {'master': True,
|
||||
'nested_list': ['master'],
|
||||
'nested_dict': {'master': True}}}
|
||||
)
|
||||
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
|
@ -413,7 +445,12 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin):
|
|||
username=cls.username,
|
||||
password=cls.password,
|
||||
port=cls.nginx_port)
|
||||
cls.url_extra_repo = 'http://{username}:{password}@127.0.0.1:{port}/extra_repo.git'.format(
|
||||
username=cls.username,
|
||||
password=cls.password,
|
||||
port=cls.nginx_port)
|
||||
cls.ext_opts['url'] = cls.url
|
||||
cls.ext_opts['url_extra_repo'] = cls.url_extra_repo
|
||||
cls.ext_opts['username'] = cls.username
|
||||
cls.ext_opts['password'] = cls.password
|
||||
|
||||
|
@ -1192,6 +1229,40 @@ class TestPygit2SSH(GitPillarSSHTestBase):
|
|||
''')
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_includes_enabled_solves___env___with_mountpoint(self):
|
||||
'''
|
||||
Test with git_pillar_includes enabled and using "__env__" as the branch
|
||||
name for the configured repositories.
|
||||
The "gitinfo" repository contains top.sls file with a local reference
|
||||
and also referencing external "nowhere.foo" which is provided by "webinfo"
|
||||
repository mounted as "nowhere".
|
||||
'''
|
||||
ret = self.get_pillar('''\
|
||||
file_ignore_regex: []
|
||||
file_ignore_glob: []
|
||||
git_pillar_provider: pygit2
|
||||
git_pillar_pubkey: {pubkey_nopass}
|
||||
git_pillar_privkey: {privkey_nopass}
|
||||
cachedir: {cachedir}
|
||||
extension_modules: {extmods}
|
||||
ext_pillar:
|
||||
- git:
|
||||
- __env__ {url_extra_repo}:
|
||||
- name: gitinfo
|
||||
- __env__ {url}:
|
||||
- name: webinfo
|
||||
- mountpoint: nowhere
|
||||
''')
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{'branch': 'master',
|
||||
'motd': 'The force will be with you. Always.',
|
||||
'mylist': ['master'],
|
||||
'mydict': {'master': True,
|
||||
'nested_list': ['master'],
|
||||
'nested_dict': {'master': True}}}
|
||||
)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(_windows_or_mac(), 'minion is windows or mac')
|
||||
|
@ -1439,6 +1510,38 @@ class TestPygit2HTTP(GitPillarHTTPTestBase):
|
|||
''')
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_includes_enabled_solves___env___with_mountpoint(self):
|
||||
'''
|
||||
Test with git_pillar_includes enabled and using "__env__" as the branch
|
||||
name for the configured repositories.
|
||||
The "gitinfo" repository contains top.sls file with a local reference
|
||||
and also referencing external "nowhere.foo" which is provided by "webinfo"
|
||||
repository mounted as "nowhere".
|
||||
'''
|
||||
ret = self.get_pillar('''\
|
||||
file_ignore_regex: []
|
||||
file_ignore_glob: []
|
||||
git_pillar_provider: pygit2
|
||||
cachedir: {cachedir}
|
||||
extension_modules: {extmods}
|
||||
ext_pillar:
|
||||
- git:
|
||||
- __env__ {url_extra_repo}:
|
||||
- name: gitinfo
|
||||
- __env__ {url}:
|
||||
- name: webinfo
|
||||
- mountpoint: nowhere
|
||||
''')
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{'branch': 'master',
|
||||
'motd': 'The force will be with you. Always.',
|
||||
'mylist': ['master'],
|
||||
'mydict': {'master': True,
|
||||
'nested_list': ['master'],
|
||||
'nested_dict': {'master': True}}}
|
||||
)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(_windows_or_mac(), 'minion is windows or mac')
|
||||
|
@ -1887,3 +1990,44 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
|
|||
- env: base
|
||||
''')
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_includes_enabled_solves___env___with_mountpoint(self):
|
||||
'''
|
||||
Test with git_pillar_includes enabled and using "__env__" as the branch
|
||||
name for the configured repositories.
|
||||
The "gitinfo" repository contains top.sls file with a local reference
|
||||
and also referencing external "nowhere.foo" which is provided by "webinfo"
|
||||
repository mounted as "nowhere".
|
||||
'''
|
||||
ret = self.get_pillar('''\
|
||||
file_ignore_regex: []
|
||||
file_ignore_glob: []
|
||||
git_pillar_provider: pygit2
|
||||
git_pillar_user: {user}
|
||||
git_pillar_password: {password}
|
||||
git_pillar_insecure_auth: True
|
||||
cachedir: {cachedir}
|
||||
extension_modules: {extmods}
|
||||
ext_pillar:
|
||||
- git:
|
||||
- __env__ {url_extra_repo}:
|
||||
- name: gitinfo
|
||||
- user: {user}
|
||||
- password: {password}
|
||||
- insecure_auth: True
|
||||
- __env__ {url}:
|
||||
- name: webinfo
|
||||
- mountpoint: nowhere
|
||||
- user: {user}
|
||||
- password: {password}
|
||||
- insecure_auth: True
|
||||
''')
|
||||
self.assertEqual(
|
||||
ret,
|
||||
{'branch': 'master',
|
||||
'motd': 'The force will be with you. Always.',
|
||||
'mylist': ['master'],
|
||||
'mydict': {'master': True,
|
||||
'nested_list': ['master'],
|
||||
'nested_dict': {'master': True}}}
|
||||
)
|
||||
|
|
|
@ -37,3 +37,40 @@ class ManageTest(ShellCase):
|
|||
'''
|
||||
ret = self.run_run_plus('jobs.list_jobs')
|
||||
self.assertIsInstance(ret['return'], dict)
|
||||
|
||||
|
||||
class LocalCacheTargetTest(ShellCase):
|
||||
'''
|
||||
Test that a job stored in the local_cache has target information
|
||||
'''
|
||||
|
||||
def test_target_info(self):
|
||||
'''
|
||||
This is a test case for issue #48734
|
||||
|
||||
PR #43454 fixed an issue where "jobs.lookup_jid" was not working
|
||||
correctly with external job caches. However, this fix for external
|
||||
job caches broke some inner workings of job storage when using the
|
||||
local_cache.
|
||||
|
||||
We need to preserve the previous behavior for the local_cache, but
|
||||
keep the new behavior for other external job caches.
|
||||
|
||||
If "savefstr" is called in the local cache, the target data does not
|
||||
get written to the local_cache, and the target-type gets listed as a
|
||||
"list" type instead of "glob".
|
||||
|
||||
This is a regression test for fixing the local_cache behavior.
|
||||
'''
|
||||
self.run_salt('minion test.echo target_info_test')
|
||||
ret = self.run_run_plus('jobs.list_jobs')
|
||||
for item in ret['return'].values():
|
||||
if item['Function'] == 'test.echo' and \
|
||||
item['Arguments'][0] == 'target_info_test':
|
||||
job_ret = item
|
||||
tgt = job_ret['Target']
|
||||
tgt_type = job_ret['Target-type']
|
||||
|
||||
assert tgt != 'unknown-target'
|
||||
assert tgt in ['minion', 'sub_minion']
|
||||
assert tgt_type == 'glob'
|
||||
|
|
|
@ -327,29 +327,6 @@ class MatchTest(ShellCase, ShellCaseCommonTestsMixin):
|
|||
data = self.run_salt('-d "*" user')
|
||||
self.assertIn('user.add:', data)
|
||||
|
||||
@flaky
|
||||
def test_salt_documentation_arguments_not_assumed(self):
|
||||
'''
|
||||
Test to see if we're not auto-adding '*' and 'sys.doc' to the call
|
||||
'''
|
||||
os_family = self.run_call('--local grains.get os_family')[1].strip()
|
||||
if os_family == 'Arch':
|
||||
self.skipTest('This test is failing in Arch due to a bug in salt-testing. '
|
||||
'Skipping until salt-testing can be upgraded. For more information, '
|
||||
'see https://github.com/saltstack/salt-jenkins/issues/324.')
|
||||
data = self.run_salt('-d -t 20')
|
||||
if data:
|
||||
assert 'user.add:' in data
|
||||
data = self.run_salt('"*" -d -t 20')
|
||||
if data:
|
||||
assert 'user.add:' in data
|
||||
data = self.run_salt('"*" -d user -t 20')
|
||||
assert 'user.add:' in data
|
||||
data = self.run_salt('"*" sys.doc -d user -t 20')
|
||||
assert 'user.add:' in data
|
||||
data = self.run_salt('"*" sys.doc user -t 20')
|
||||
assert 'user.add:' in data
|
||||
|
||||
def test_salt_documentation_too_many_arguments(self):
|
||||
'''
|
||||
Test to see if passing additional arguments shows an error
|
||||
|
|
52
tests/integration/states/test_beacon.py
Normal file
52
tests/integration/states/test_beacon.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Integration tests for the beacon states
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@destructiveTest
|
||||
class BeaconStateTestCase(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
Test beacon states
|
||||
'''
|
||||
def setUp(self):
|
||||
'''
|
||||
'''
|
||||
self.run_function('beacons.reset')
|
||||
|
||||
def tearDown(self):
|
||||
self.run_function('beacons.reset')
|
||||
|
||||
def test_present_absent(self):
|
||||
kwargs = {'/': '38%', 'interval': 5}
|
||||
ret = self.run_state(
|
||||
'beacon.present',
|
||||
name='diskusage',
|
||||
**kwargs
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
ret = self.run_function('beacons.list', return_yaml=False)
|
||||
self.assertTrue('diskusage' in ret)
|
||||
self.assertTrue({'interval': 5} in ret['diskusage'])
|
||||
self.assertTrue({'/': '38%'} in ret['diskusage'])
|
||||
|
||||
ret = self.run_state(
|
||||
'beacon.absent',
|
||||
name='diskusage',
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
ret = self.run_function('beacons.list', return_yaml=False)
|
||||
self.assertEqual(ret, {'beacons': {}})
|
|
@ -251,6 +251,7 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
# FIXME A timeout of zero or disabling timeouts may not return results!
|
||||
timeout=15,
|
||||
raw=False,
|
||||
popen_kwargs=None,
|
||||
log_output=None):
|
||||
'''
|
||||
Execute a script with the given argument string
|
||||
|
@ -285,11 +286,12 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
|
||||
tmp_file = tempfile.SpooledTemporaryFile()
|
||||
|
||||
popen_kwargs = {
|
||||
popen_kwargs = popen_kwargs or {}
|
||||
popen_kwargs = dict({
|
||||
'shell': True,
|
||||
'stdout': tmp_file,
|
||||
'universal_newlines': True,
|
||||
}
|
||||
}, **popen_kwargs)
|
||||
|
||||
if catch_stderr is True:
|
||||
popen_kwargs['stderr'] = subprocess.PIPE
|
||||
|
@ -488,7 +490,7 @@ class ShellCase(ShellTestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixi
|
|||
except OSError:
|
||||
os.chdir(INTEGRATION_TEST_DIR)
|
||||
|
||||
def run_salt(self, arg_str, with_retcode=False, catch_stderr=False, timeout=60): # pylint: disable=W0221
|
||||
def run_salt(self, arg_str, with_retcode=False, catch_stderr=False, timeout=60, popen_kwargs=None): # pylint: disable=W0221
|
||||
'''
|
||||
Execute salt
|
||||
'''
|
||||
|
@ -497,7 +499,8 @@ class ShellCase(ShellTestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixi
|
|||
arg_str,
|
||||
with_retcode=with_retcode,
|
||||
catch_stderr=catch_stderr,
|
||||
timeout=timeout)
|
||||
timeout=timeout,
|
||||
popen_kwargs=popen_kwargs)
|
||||
log.debug('Result of run_salt for command \'%s\': %s', arg_str, ret)
|
||||
return ret
|
||||
|
||||
|
@ -928,6 +931,7 @@ class ClientCase(AdaptedConfigurationTestCaseMixin, TestCase):
|
|||
else:
|
||||
raise
|
||||
|
||||
|
||||
# ----- Backwards Compatible Imports -------------------------------------------------------------------------------->
|
||||
from tests.support.mixins import ShellCaseCommonTestsMixin # pylint: disable=unused-import
|
||||
# <---- Backwards Compatible Imports ---------------------------------------------------------------------------------
|
||||
|
|
|
@ -133,9 +133,13 @@ class SSHDMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
|
|||
cls.url = 'ssh://{username}@127.0.0.1:{port}/~/repo.git'.format(
|
||||
username=cls.username,
|
||||
port=cls.sshd_port)
|
||||
cls.url_extra_repo = 'ssh://{username}@127.0.0.1:{port}/~/extra_repo.git'.format(
|
||||
username=cls.username,
|
||||
port=cls.sshd_port)
|
||||
home = '/root/.ssh'
|
||||
cls.ext_opts = {
|
||||
'url': cls.url,
|
||||
'url_extra_repo': cls.url_extra_repo,
|
||||
'privkey_nopass': os.path.join(home, cls.id_rsa_nopass),
|
||||
'pubkey_nopass': os.path.join(home, cls.id_rsa_nopass + '.pub'),
|
||||
'privkey_withpass': os.path.join(home, cls.id_rsa_withpass),
|
||||
|
@ -193,7 +197,8 @@ class WebserverMixin(ModuleCase, ProcessManager, SaltReturnAssertsMixin):
|
|||
# get_unused_localhost_port() return identical port numbers.
|
||||
cls.uwsgi_port = get_unused_localhost_port()
|
||||
cls.url = 'http://127.0.0.1:{port}/repo.git'.format(port=cls.nginx_port)
|
||||
cls.ext_opts = {'url': cls.url}
|
||||
cls.url_extra_repo = 'http://127.0.0.1:{port}/extra_repo.git'.format(port=cls.nginx_port)
|
||||
cls.ext_opts = {'url': cls.url, 'url_extra_repo': cls.url_extra_repo}
|
||||
# Add auth params if present (if so this will trigger the spawned
|
||||
# server to turn on HTTP basic auth).
|
||||
for credential_param in ('user', 'password'):
|
||||
|
@ -250,7 +255,7 @@ class GitTestBase(ModuleCase):
|
|||
Base class for all gitfs/git_pillar tests. Must be subclassed and paired
|
||||
with either SSHDMixin or WebserverMixin to provide the server.
|
||||
'''
|
||||
case = port = bare_repo = admin_repo = None
|
||||
case = port = bare_repo = base_extra_repo = admin_repo = admin_extra_repo = None
|
||||
maxDiff = None
|
||||
git_opts = '-c user.name="Foo Bar" -c user.email=foo@bar.com'
|
||||
ext_opts = {}
|
||||
|
@ -468,6 +473,61 @@ class GitPillarTestBase(GitTestBase, LoaderModuleMockMixin):
|
|||
'''))
|
||||
_push('top_only', 'add top_only branch')
|
||||
|
||||
def make_extra_repo(self, root_dir, user='root'):
|
||||
self.bare_extra_repo = os.path.join(root_dir, 'extra_repo.git')
|
||||
self.admin_extra_repo = os.path.join(root_dir, 'admin_extra')
|
||||
|
||||
for dirname in (self.bare_extra_repo, self.admin_extra_repo):
|
||||
shutil.rmtree(dirname, ignore_errors=True)
|
||||
|
||||
# Create bare extra repo
|
||||
self.run_function(
|
||||
'git.init',
|
||||
[self.bare_extra_repo],
|
||||
user=user,
|
||||
bare=True)
|
||||
|
||||
# Clone bare repo
|
||||
self.run_function(
|
||||
'git.clone',
|
||||
[self.admin_extra_repo],
|
||||
url=self.bare_extra_repo,
|
||||
user=user)
|
||||
|
||||
def _push(branch, message):
|
||||
self.run_function(
|
||||
'git.add',
|
||||
[self.admin_extra_repo, '.'],
|
||||
user=user)
|
||||
self.run_function(
|
||||
'git.commit',
|
||||
[self.admin_extra_repo, message],
|
||||
user=user,
|
||||
git_opts=self.git_opts,
|
||||
)
|
||||
self.run_function(
|
||||
'git.push',
|
||||
[self.admin_extra_repo],
|
||||
remote='origin',
|
||||
ref=branch,
|
||||
user=user,
|
||||
)
|
||||
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(self.admin_extra_repo, 'top.sls'), 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
"{{saltenv}}":
|
||||
'*':
|
||||
- motd
|
||||
- nowhere.foo
|
||||
'''))
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(self.admin_extra_repo, 'motd.sls'), 'w') as fp_:
|
||||
fp_.write(textwrap.dedent('''\
|
||||
motd: The force will be with you. Always.
|
||||
'''))
|
||||
_push('master', 'initial commit')
|
||||
|
||||
|
||||
class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
|
||||
'''
|
||||
|
@ -536,6 +596,7 @@ class GitPillarSSHTestBase(GitPillarTestBase, SSHDMixin):
|
|||
)
|
||||
)
|
||||
self.make_repo(root_dir, user=self.username)
|
||||
self.make_extra_repo(root_dir, user=self.username)
|
||||
|
||||
def get_pillar(self, ext_pillar_conf):
|
||||
'''
|
||||
|
@ -582,3 +643,4 @@ class GitPillarHTTPTestBase(GitPillarTestBase, WebserverMixin):
|
|||
self.spawn_server() # pylint: disable=E1120
|
||||
|
||||
self.make_repo(self.repo_dir)
|
||||
self.make_extra_repo(self.repo_dir)
|
||||
|
|
|
@ -821,15 +821,44 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'container1': {'Config': {},
|
||||
'HostConfig': {
|
||||
'Ulimits': [
|
||||
{u'Hard': -1, u'Soft': -1, u'Name': u'core'},
|
||||
{u'Hard': 65536, u'Soft': 65536, u'Name': u'nofile'}
|
||||
{'Hard': -1, 'Soft': -1, 'Name': 'core'},
|
||||
{'Hard': 65536, 'Soft': 65536, 'Name': 'nofile'}
|
||||
]
|
||||
}},
|
||||
'container2': {'Config': {},
|
||||
'HostConfig': {
|
||||
'Ulimits': [
|
||||
{u'Hard': 65536, u'Soft': 65536, u'Name': u'nofile'},
|
||||
{u'Hard': -1, u'Soft': -1, u'Name': u'core'}
|
||||
{'Hard': 65536, 'Soft': 65536, 'Name': 'nofile'},
|
||||
{'Hard': -1, 'Soft': -1, 'Name': 'core'}
|
||||
]
|
||||
}},
|
||||
}[id_]
|
||||
|
||||
inspect_container_mock = MagicMock(side_effect=_inspect_container_effect)
|
||||
|
||||
with patch.object(docker_mod, 'inspect_container', inspect_container_mock):
|
||||
ret = docker_mod.compare_container('container1', 'container2')
|
||||
self.assertEqual(ret, {})
|
||||
|
||||
def test_compare_container_env_order(self):
|
||||
'''
|
||||
Test comparing two containers when the order of the Env HostConfig
|
||||
values are different, but the values are the same.
|
||||
'''
|
||||
def _inspect_container_effect(id_):
|
||||
return {
|
||||
'container1': {'Config': {},
|
||||
'HostConfig': {
|
||||
'Env': [
|
||||
'FOO=bar',
|
||||
'HELLO=world',
|
||||
]
|
||||
}},
|
||||
'container2': {'Config': {},
|
||||
'HostConfig': {
|
||||
'Env': [
|
||||
'HELLO=world',
|
||||
'FOO=bar',
|
||||
]
|
||||
}},
|
||||
}[id_]
|
||||
|
|
|
@ -236,17 +236,64 @@ class MountTestCase(TestCase, LoaderModuleMockMixin):
|
|||
mock_swp = MagicMock(return_value=[name])
|
||||
mock_fs = MagicMock(return_value={'none': {'device': name,
|
||||
'fstype': 'xfs'}})
|
||||
mock_fs_diff = MagicMock(return_value={'none': {'device': 'something_else',
|
||||
'fstype': 'xfs'}})
|
||||
mock_aixfs = MagicMock(return_value={name: {'dev': name,
|
||||
'fstype': 'jfs2'}})
|
||||
mock_emt = MagicMock(return_value={})
|
||||
with patch.dict(mount.__grains__, {'os': 'test'}):
|
||||
with patch.dict(mount.__salt__, {'mount.swaps': mock_swp,
|
||||
'mount.fstab': mock_fs_diff,
|
||||
'file.is_link': mock_f}):
|
||||
with patch.dict(mount.__opts__, {'test': True}):
|
||||
comt = ('Swap {0} is set to be added to the '
|
||||
'fstab and to be activated'.format(name))
|
||||
ret.update({'comment': comt})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
with patch.dict(mount.__opts__, {'test': False}):
|
||||
comt = ('Swap {0} already active'.format(name))
|
||||
ret.update({'comment': comt, 'result': True})
|
||||
self.assertDictEqual(mount.swap(name, persist=False), ret)
|
||||
|
||||
with patch.dict(mount.__salt__, {'mount.fstab': mock_emt,
|
||||
'mount.set_fstab': mock}):
|
||||
comt = ('Swap {0} already active'.format(name))
|
||||
ret.update({'comment': comt, 'result': True})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
comt = ('Swap /mnt/sdb already active. '
|
||||
'Added new entry to the fstab.')
|
||||
ret.update({'comment': comt, 'result': True,
|
||||
'changes': {'persist': 'new'}})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
comt = ('Swap /mnt/sdb already active. '
|
||||
'Updated the entry in the fstab.')
|
||||
ret.update({'comment': comt, 'result': True,
|
||||
'changes': {'persist': 'update'}})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
comt = ('Swap /mnt/sdb already active. '
|
||||
'However, the fstab was not found.')
|
||||
ret.update({'comment': comt, 'result': False,
|
||||
'changes': {}})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
ret = {'name': name,
|
||||
'result': None,
|
||||
'comment': '',
|
||||
'changes': {}}
|
||||
|
||||
mock = MagicMock(side_effect=['present', 'new', 'change', 'bad config'])
|
||||
mock_emt = MagicMock(return_value={})
|
||||
with patch.dict(mount.__grains__, {'os': 'test'}):
|
||||
with patch.dict(mount.__salt__, {'mount.swaps': mock_swp,
|
||||
'mount.fstab': mock_fs,
|
||||
'file.is_link': mock_f}):
|
||||
with patch.dict(mount.__opts__, {'test': True}):
|
||||
comt = ('Swap {0} is set to be added to the '
|
||||
'fstab and to be activated'.format(name))
|
||||
ret.update({'comment': comt})
|
||||
comt = ('Swap {0} already active'.format(name))
|
||||
ret.update({'comment': comt, 'result': True})
|
||||
self.assertDictEqual(mount.swap(name), ret)
|
||||
|
||||
with patch.dict(mount.__opts__, {'test': False}):
|
||||
|
|
|
@ -46,6 +46,9 @@ class SysctlTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
ret = {'name': name, 'result': None, 'changes': {}, 'comment': comment}
|
||||
|
||||
comment_empty = ('Sysctl option {0} would be changed to {1}'
|
||||
''.format(name, value))
|
||||
|
||||
comment1 = ('Sysctl option {0} set to be changed to {1}'
|
||||
.format(name, value))
|
||||
|
||||
|
@ -91,10 +94,15 @@ class SysctlTestCase(TestCase, LoaderModuleMockMixin):
|
|||
return [name]
|
||||
|
||||
with patch.dict(sysctl.__opts__, {'test': True}):
|
||||
mock = MagicMock(return_value=False)
|
||||
mock = MagicMock(return_value=None)
|
||||
with patch.dict(sysctl.__salt__, {'sysctl.show': mock}):
|
||||
self.assertDictEqual(sysctl.present(name, value), ret)
|
||||
|
||||
mock = MagicMock(return_value=[])
|
||||
with patch.dict(sysctl.__salt__, {'sysctl.show': mock}):
|
||||
ret.update({'comment': comment_empty})
|
||||
self.assertDictEqual(sysctl.present(name, value), ret)
|
||||
|
||||
with patch.dict(sysctl.__salt__, {'sysctl.show': mock_current}):
|
||||
ret.update({'comment': comment1})
|
||||
self.assertDictEqual(sysctl.present(name, value), ret)
|
||||
|
|
35
tests/unit/utils/test_dicttrim.py
Normal file
35
tests/unit/utils/test_dicttrim.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.dicttrim as dicttrimmer
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DictTrimTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.old_dict = {'a': 'b', 'c': 'x' * 10000}
|
||||
self.new_dict = {'a': 'b', 'c': 'VALUE_TRIMMED'}
|
||||
|
||||
def test_trim_dict(self):
|
||||
ret = dicttrimmer.trim_dict(self.old_dict, 1000)
|
||||
self.assertEqual(ret, self.new_dict)
|
||||
|
||||
|
||||
class RecursiveDictTrimTestCase(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.old_dict = {'a': {'b': 1, 'c': 2, 'e': 'x' * 10000, 'f': '3'}}
|
||||
self.new_dict = {'a': {'b': 1, 'c': 2, 'e': 'VALUE_TRIMMED', 'f': '3'}}
|
||||
|
||||
def test_trim_dict(self):
|
||||
ret = dicttrimmer.trim_dict(self.old_dict, 1000)
|
||||
self.assertEqual(ret, self.new_dict)
|
|
@ -5,11 +5,19 @@ mac_utils tests
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import plistlib
|
||||
import xml.parsers.expat
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON, call
|
||||
from tests.support.mock import (
|
||||
call,
|
||||
MagicMock,
|
||||
mock_open,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
patch
|
||||
)
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
|
||||
# Import Salt libs
|
||||
|
@ -215,259 +223,223 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
def test_available_services(self, mock_read_plist, mock_exists, mock_os_walk):
|
||||
def test_available_services_result(self, mock_exists, mock_os_walk):
|
||||
'''
|
||||
test available_services
|
||||
test available_services results are properly formed dicts.
|
||||
'''
|
||||
mock_os_walk.side_effect = [
|
||||
[('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])],
|
||||
[('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])],
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
|
||||
mock_read_plist.side_effect = [
|
||||
MagicMock(Label='com.apple.lla1'),
|
||||
MagicMock(Label='com.apple.lla2'),
|
||||
MagicMock(Label='com.apple.lld1'),
|
||||
MagicMock(Label='com.apple.lld2'),
|
||||
MagicMock(Label='com.apple.slla1'),
|
||||
MagicMock(Label='com.apple.slla2'),
|
||||
MagicMock(Label='com.apple.slld1'),
|
||||
MagicMock(Label='com.apple.slld2'),
|
||||
]
|
||||
|
||||
results = {'/Library/LaunchAgents': ['com.apple.lla1.plist']}
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_exists.return_value = True
|
||||
ret = mac_utils._available_services()
|
||||
|
||||
# Make sure it's a dict with 8 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
self.assertEqual(len(ret), 8)
|
||||
plists = [{'Label': 'com.apple.lla1'}]
|
||||
ret = _run_available_services(plists)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_name'],
|
||||
'com.apple.lla1.plist')
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/Library/LaunchAgents', 'com.apple.lla1.plist')))
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_name'],
|
||||
'com.apple.slld2.plist')
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/System/Library/LaunchDaemons', 'com.apple.slld2.plist')))
|
||||
expected = {
|
||||
'com.apple.lla1': {
|
||||
'file_name': 'com.apple.lla1.plist',
|
||||
'file_path': '/Library/LaunchAgents/com.apple.lla1.plist',
|
||||
'plist': plists[0]}}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
@patch('os.listdir')
|
||||
@patch('os.path.isdir')
|
||||
def test_available_services_dirs(self,
|
||||
mock_isdir,
|
||||
mock_listdir,
|
||||
mock_exists,
|
||||
mock_os_walk):
|
||||
'''
|
||||
test available_services checks all of the expected dirs.
|
||||
'''
|
||||
results = {
|
||||
'/Library/LaunchAgents': ['com.apple.lla1.plist'],
|
||||
'/Library/LaunchDaemons': ['com.apple.lld1.plist'],
|
||||
'/System/Library/LaunchAgents': ['com.apple.slla1.plist'],
|
||||
'/System/Library/LaunchDaemons': ['com.apple.slld1.plist'],
|
||||
'/Users/saltymcsaltface/Library/LaunchAgents': [
|
||||
'com.apple.uslla1.plist']}
|
||||
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_listdir.return_value = ['saltymcsaltface']
|
||||
mock_isdir.return_value = True
|
||||
mock_exists.return_value = True
|
||||
|
||||
plists = [
|
||||
{'Label': 'com.apple.lla1'},
|
||||
{'Label': 'com.apple.lld1'},
|
||||
{'Label': 'com.apple.slla1'},
|
||||
{'Label': 'com.apple.slld1'},
|
||||
{'Label': 'com.apple.uslla1'}]
|
||||
ret = _run_available_services(plists)
|
||||
|
||||
self.assertEqual(len(ret), 5)
|
||||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist' if six.PY2 else 'plistlib.load')
|
||||
def test_available_services_broken_symlink(self, mock_read_plist, mock_exists, mock_os_walk):
|
||||
'''
|
||||
test available_services
|
||||
test available_services when it encounters a broken symlink.
|
||||
'''
|
||||
mock_os_walk.side_effect = [
|
||||
[('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])],
|
||||
[('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])],
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
results = {'/Library/LaunchAgents': ['com.apple.lla1.plist', 'com.apple.lla2.plist']}
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_exists.side_effect = [True, False]
|
||||
|
||||
mock_read_plist.side_effect = [
|
||||
MagicMock(Label='com.apple.lla1'),
|
||||
MagicMock(Label='com.apple.lla2'),
|
||||
MagicMock(Label='com.apple.lld1'),
|
||||
MagicMock(Label='com.apple.lld2'),
|
||||
MagicMock(Label='com.apple.slld1'),
|
||||
MagicMock(Label='com.apple.slld2'),
|
||||
]
|
||||
plists = [{'Label': 'com.apple.lla1'}]
|
||||
ret = _run_available_services(plists)
|
||||
|
||||
mock_exists.side_effect = [True, True, True, True, False, False, True, True]
|
||||
ret = mac_utils._available_services()
|
||||
|
||||
# Make sure it's a dict with 6 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
self.assertEqual(len(ret), 6)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_name'],
|
||||
'com.apple.lla1.plist')
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/Library/LaunchAgents', 'com.apple.lla1.plist')))
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_name'],
|
||||
'com.apple.slld2.plist')
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/System/Library/LaunchDaemons', 'com.apple.slld2.plist')))
|
||||
expected = {
|
||||
'com.apple.lla1': {
|
||||
'file_name': 'com.apple.lla1.plist',
|
||||
'file_path': '/Library/LaunchAgents/com.apple.lla1.plist',
|
||||
'plist': plists[0]}}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
@patch('salt.utils.mac_utils.__salt__')
|
||||
@patch('plistlib.readPlistFromString' if six.PY2 else 'plistlib.loads')
|
||||
def test_available_services_non_xml(self,
|
||||
mock_read_plist_from_string,
|
||||
mock_run,
|
||||
mock_read_plist,
|
||||
mock_exists,
|
||||
mock_os_walk):
|
||||
@patch('plistlib.readPlistFromString', create=True)
|
||||
def test_available_services_binary_plist(self,
|
||||
mock_read_plist_from_string,
|
||||
mock_run,
|
||||
mock_read_plist,
|
||||
mock_exists,
|
||||
mock_os_walk):
|
||||
'''
|
||||
test available_services
|
||||
test available_services handles binary plist files.
|
||||
'''
|
||||
mock_os_walk.side_effect = [
|
||||
[('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])],
|
||||
[('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])],
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
attrs = {'cmd.run': MagicMock(return_value='<some xml>')}
|
||||
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
results = {'/Library/LaunchAgents': ['com.apple.lla1.plist']}
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_exists.return_value = True
|
||||
mock_read_plist.side_effect = Exception()
|
||||
mock_read_plist_from_string.side_effect = [
|
||||
MagicMock(Label='com.apple.lla1'),
|
||||
MagicMock(Label='com.apple.lla2'),
|
||||
MagicMock(Label='com.apple.lld1'),
|
||||
MagicMock(Label='com.apple.lld2'),
|
||||
MagicMock(Label='com.apple.slla1'),
|
||||
MagicMock(Label='com.apple.slla2'),
|
||||
MagicMock(Label='com.apple.slld1'),
|
||||
MagicMock(Label='com.apple.slld2'),
|
||||
]
|
||||
|
||||
ret = mac_utils._available_services()
|
||||
plists = [{'Label': 'com.apple.lla1'}]
|
||||
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'
|
||||
calls = [
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),),
|
||||
]
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
if six.PY2:
|
||||
attrs = {'cmd.run': MagicMock()}
|
||||
|
||||
# Make sure it's a dict with 8 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
self.assertEqual(len(ret), 8)
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_name'],
|
||||
'com.apple.lla1.plist')
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{}"'.format(
|
||||
'/Library/LaunchAgents/com.apple.lla1.plist')
|
||||
calls = [call.cmd.run(cmd)]
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/Library/LaunchAgents', 'com.apple.lla1.plist')))
|
||||
mock_read_plist.side_effect = xml.parsers.expat.ExpatError
|
||||
mock_read_plist_from_string.side_effect = plists
|
||||
ret = mac_utils._available_services()
|
||||
else:
|
||||
# Py3 plistlib knows how to handle binary plists without
|
||||
# any extra work, so this test doesn't really do anything
|
||||
# new.
|
||||
ret = _run_available_services(plists)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_name'],
|
||||
'com.apple.slld2.plist')
|
||||
expected = {
|
||||
'com.apple.lla1': {
|
||||
'file_name': 'com.apple.lla1.plist',
|
||||
'file_path': '/Library/LaunchAgents/com.apple.lla1.plist',
|
||||
'plist': plists[0]}}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/System/Library/LaunchDaemons', 'com.apple.slld2.plist')))
|
||||
if six.PY2:
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
def test_available_services_invalid_file(self, mock_exists, mock_os_walk):
|
||||
'''
|
||||
test available_services excludes invalid files.
|
||||
|
||||
The py3 plistlib raises an InvalidFileException when a plist
|
||||
file cannot be parsed. This test only asserts things for py3.
|
||||
'''
|
||||
if six.PY3:
|
||||
results = {'/Library/LaunchAgents': ['com.apple.lla1.plist']}
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_exists.return_value = True
|
||||
|
||||
plists = [{'Label': 'com.apple.lla1'}]
|
||||
|
||||
mock_load = MagicMock()
|
||||
mock_load.side_effect = plistlib.InvalidFileException
|
||||
with patch('salt.utils.files.fopen', mock_open()):
|
||||
with patch('plistlib.load', mock_load):
|
||||
ret = mac_utils._available_services()
|
||||
|
||||
self.assertEqual(len(ret), 0)
|
||||
|
||||
@patch('salt.utils.mac_utils.__salt__')
|
||||
@patch('plistlib.readPlistFromString' if six.PY2 else 'plistlib.loads')
|
||||
def test_available_services_non_xml_malformed_plist(self,
|
||||
mock_read_plist_from_string,
|
||||
mock_run,
|
||||
mock_read_plist,
|
||||
mock_exists,
|
||||
mock_os_walk):
|
||||
@patch('plistlib.readPlist')
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
def test_available_services_expat_error(self,
|
||||
mock_exists,
|
||||
mock_os_walk,
|
||||
mock_read_plist,
|
||||
mock_run):
|
||||
'''
|
||||
test available_services
|
||||
test available_services excludes files with expat errors.
|
||||
|
||||
Poorly formed XML will raise an ExpatError on py2. It will
|
||||
also be raised by some almost-correct XML on py3.
|
||||
'''
|
||||
mock_os_walk.side_effect = [
|
||||
[('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])],
|
||||
[('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])],
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
attrs = {'cmd.run': MagicMock(return_value='<some xml>')}
|
||||
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
results = {'/Library/LaunchAgents': ['com.apple.lla1.plist']}
|
||||
mock_os_walk.side_effect = _get_walk_side_effects(results)
|
||||
mock_exists.return_value = True
|
||||
mock_read_plist.side_effect = Exception()
|
||||
mock_read_plist_from_string.return_value = 'malformedness'
|
||||
|
||||
ret = mac_utils._available_services()
|
||||
if six.PY3:
|
||||
mock_load = MagicMock()
|
||||
mock_load.side_effect = xml.parsers.expat.ExpatError
|
||||
with patch('salt.utils.files.fopen', mock_open()):
|
||||
with patch('plistlib.load', mock_load):
|
||||
ret = mac_utils._available_services()
|
||||
else:
|
||||
attrs = {'cmd.run': MagicMock()}
|
||||
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'
|
||||
calls = [
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),),
|
||||
]
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
# Make sure it's a dict with 8 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
self.assertEqual(len(ret), 8)
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{}"'.format(
|
||||
'/Library/LaunchAgents/com.apple.lla1.plist')
|
||||
calls = [call.cmd.run(cmd)]
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1.plist']['file_name'],
|
||||
'com.apple.lla1.plist')
|
||||
mock_raise_expat_error = MagicMock(
|
||||
side_effect=xml.parsers.expat.ExpatError)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.lla1.plist']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/Library/LaunchAgents', 'com.apple.lla1.plist')))
|
||||
with patch('plistlib.readPlist', mock_raise_expat_error):
|
||||
with patch('plistlib.readPlistFromString', mock_raise_expat_error):
|
||||
ret = mac_utils._available_services()
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2.plist']['file_name'],
|
||||
'com.apple.slld2.plist')
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
|
||||
self.assertEqual(
|
||||
ret['com.apple.slld2.plist']['file_path'],
|
||||
os.path.realpath(
|
||||
os.path.join('/System/Library/LaunchDaemons', 'com.apple.slld2.plist')))
|
||||
self.assertEqual(len(ret), 0)
|
||||
|
||||
|
||||
def _get_walk_side_effects(results):
|
||||
'''
|
||||
Data generation helper function for service tests.
|
||||
'''
|
||||
def walk_side_effect(*args, **kwargs):
|
||||
return [(args[0], [], results.get(args[0], []))]
|
||||
return walk_side_effect
|
||||
|
||||
|
||||
def _run_available_services(plists):
|
||||
if six.PY2:
|
||||
mock_read_plist = MagicMock()
|
||||
mock_read_plist.side_effect = plists
|
||||
with patch('plistlib.readPlist', mock_read_plist):
|
||||
ret = mac_utils._available_services()
|
||||
else:
|
||||
mock_load = MagicMock()
|
||||
mock_load.side_effect = plists
|
||||
with patch('salt.utils.files.fopen', mock_open()):
|
||||
with patch('plistlib.load', mock_load):
|
||||
ret = mac_utils._available_services()
|
||||
return ret
|
||||
|
|
22
tests/unit/utils/test_timed_subprocess.py
Normal file
22
tests/unit/utils/test_timed_subprocess.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.timed_subprocess as timed_subprocess
|
||||
|
||||
|
||||
class TestTimedSubprocess(TestCase):
|
||||
|
||||
def test_timedproc_with_shell_true_and_list_args(self):
|
||||
'''
|
||||
This test confirms the fix for the regression introduced in 1f7d50d.
|
||||
The TimedProc dunder init would result in a traceback if the args were
|
||||
passed as a list and shell=True was set.
|
||||
'''
|
||||
p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True)
|
||||
del p # Don't need this anymore
|
Loading…
Add table
Reference in a new issue