mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into its
This commit is contained in:
commit
f0bcf5f3e1
96 changed files with 6948 additions and 1955 deletions
|
@ -10,7 +10,7 @@ runTestSuite(
|
|||
nox_env_name: 'runtests-zeromq',
|
||||
nox_passthrough_opts: '--unit',
|
||||
python_version: 'py3',
|
||||
testrun_timeout: 9,
|
||||
testrun_timeout: 10,
|
||||
use_spot_instances: false)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
|
29
.codecov.yml
29
.codecov.yml
|
@ -5,8 +5,14 @@ codecov:
|
|||
|
||||
branch: master
|
||||
|
||||
notify:
|
||||
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
|
||||
# notify:
|
||||
# require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
|
||||
# after_n_builds: 46 # Only notify after N builds
|
||||
# # This value is the output of:
|
||||
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
|
||||
|
||||
# Disable Notifications
|
||||
notify: off
|
||||
|
||||
ignore:
|
||||
- ^*.py$ # python files at the repo root, ie, setup.py
|
||||
|
@ -66,14 +72,17 @@ flags:
|
|||
paths:
|
||||
- tests/
|
||||
|
||||
comment:
|
||||
layout: "reach, diff, flags, files"
|
||||
#comment:
|
||||
# layout: "reach, diff, flags, files"
|
||||
# after_n_builds: 46 # Only comment on PRs after N builds
|
||||
# # This value is the output of:
|
||||
# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
|
||||
|
||||
behavior: new # Comment posting behaviour
|
||||
# default: update, if exists. Otherwise post new.
|
||||
# once: update, if exists. Otherwise post new. Skip if deleted.
|
||||
# new: delete old and post new.
|
||||
# spammy: post new (do not delete old comments).
|
||||
#
|
||||
# behavior: new # Comment posting behaviour
|
||||
# # default: update, if exists. Otherwise post new.
|
||||
# # once: update, if exists. Otherwise post new. Skip if deleted.
|
||||
# # new: delete old and post new.
|
||||
# # spammy: post new (do not delete old comments).
|
||||
#
|
||||
# Disable Comments
|
||||
comment: off
|
||||
|
|
39
CHANGELOG.md
39
CHANGELOG.md
|
@ -6,7 +6,44 @@ This changelog follows [keepachangelog](https://keepachangelog.com/en/1.0.0/) fo
|
|||
This project versioning is _similar_ to [Semantic Versioning](https://semver.org), and is documented in [SEP 14](https://github.com/saltstack/salt-enhancement-proposals/pull/20/files).
|
||||
Versions are `MAJOR.PATCH`.
|
||||
|
||||
## Unreleased (Neon)
|
||||
### 3000.1
|
||||
|
||||
### Removed
|
||||
|
||||
### Deprecated
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
- [#56082](https://github.com/saltstack/salt/pull/56082) - Fix saltversioninfo grain for new version
|
||||
- [#56143](https://github.com/saltstack/salt/pull/56143) - Use encoding when caching pillar data
|
||||
- [#56172](https://github.com/saltstack/salt/pull/56172) - Only change mine data if using new allow_tgt feature
|
||||
- [#56094](https://github.com/saltstack/salt/pull/56094) - Fix type error in TornadoImporter
|
||||
- [#56174](https://github.com/saltstack/salt/pull/56174) - MySQL module fixes
|
||||
- [#56149](https://github.com/saltstack/salt/pull/56149) - Fix to scheduler for use of when and splay
|
||||
- [#56197](https://github.com/saltstack/salt/pull/56197) - Allows use of inline powershell for cmd.script args
|
||||
- [#55894](https://github.com/saltstack/salt/pull/55894) - pdbedit module should check for version 4.8.x or newer
|
||||
- [#55906](https://github.com/saltstack/salt/pull/55906) - smartos.vm_present could not handle nics with vrrp_vrid property
|
||||
- [#56218](https://github.com/saltstack/salt/pull/56218) - Changed StrictVersion checking of setuptools to LooseVersion
|
||||
- [#56099](https://github.com/saltstack/salt/pull/56099) - Fix Windows and macOS requirements handling in setup.py
|
||||
- [#56068](https://github.com/saltstack/salt/pull/56068) - Update the bootstrap script to latest version, v2020.02.24
|
||||
- [#56185](https://github.com/saltstack/salt/pull/56185) - Fix regression in service states with reload argument
|
||||
- [#56341](https://github.com/saltstack/salt/pull/56341) - Revert "Don't remove one directory level from slspath"
|
||||
- [#56290](https://github.com/saltstack/salt/pull/56290) - Ensures popping lgpo.secedit_data does not throw KeyError
|
||||
- [#56339](https://github.com/saltstack/salt/pull/56339) - Fix win_dns_client when used with scheduler
|
||||
- [#56215](https://github.com/saltstack/salt/pull/56215) - Fix for unless requisite when pip is not installed
|
||||
- [#56060](https://github.com/saltstack/salt/pull/56060) - Fix regex string for Del and DelVals
|
||||
- [#56337](https://github.com/saltstack/salt/pull/56337) - Handle Adapter Type 53 and Undefined Types
|
||||
- [#56160](https://github.com/saltstack/salt/pull/56160) - Fix issue with existing reg_dword entries
|
||||
- [#56358](https://github.com/saltstack/salt/pull/56358) - Fix version instantiation when minor is an empty string
|
||||
- [#56272](https://github.com/saltstack/salt/pull/56272) - Properly resolve the policy name
|
||||
- [#56310](https://github.com/saltstack/salt/pull/56310) - Only process ADMX files when loading policies
|
||||
- [#56327](https://github.com/saltstack/salt/pull/56327) - keep cache_copied_files variable a list
|
||||
- [#56360](https://github.com/saltstack/salt/pull/56360) - dont require virtualenv.virtualenv_version call, removed in 20.0.10
|
||||
|
||||
### Added
|
||||
|
||||
## 3000 - Neon [2020-02-10]
|
||||
|
||||
### Removed
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-API" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-API" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-api \- salt-api Command
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-CALL" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-CALL" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-call \- salt-call Documentation
|
||||
.
|
||||
|
@ -198,17 +198,6 @@ Some outputters are formatted only for data returned from specific functions.
|
|||
If an outputter is used that does not support the data passed into it, then
|
||||
Salt will fall back on the \fBpprint\fP outputter and display the return data
|
||||
using the Python \fBpprint\fP standard library module.
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.INDENT 0.0
|
||||
.TP
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-CLOUD" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-CLOUD" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-cloud \- Salt Cloud Command
|
||||
.
|
||||
|
@ -247,17 +247,6 @@ Some outputters are formatted only for data returned from specific functions.
|
|||
If an outputter is used that does not support the data passed into it, then
|
||||
Salt will fall back on the \fBpprint\fP outputter and display the return data
|
||||
using the Python \fBpprint\fP standard library module.
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.INDENT 0.0
|
||||
.TP
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-CP" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-CP" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-cp \- salt-cp Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-KEY" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-KEY" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-key \- salt-key Documentation
|
||||
.
|
||||
|
@ -153,17 +153,6 @@ Some outputters are formatted only for data returned from specific functions.
|
|||
If an outputter is used that does not support the data passed into it, then
|
||||
Salt will fall back on the \fBpprint\fP outputter and display the return data
|
||||
using the Python \fBpprint\fP standard library module.
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.INDENT 0.0
|
||||
.TP
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-MASTER" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-MASTER" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-master \- salt-master Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-MINION" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-MINION" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-minion \- salt-minion Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-PROXY" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-PROXY" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-proxy \- salt-proxy Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-RUN" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-RUN" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-run \- salt-run Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-SSH" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-SSH" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-ssh \- salt-ssh Documentation
|
||||
.
|
||||
|
@ -286,17 +286,6 @@ Some outputters are formatted only for data returned from specific functions.
|
|||
If an outputter is used that does not support the data passed into it, then
|
||||
Salt will fall back on the \fBpprint\fP outputter and display the return data
|
||||
using the Python \fBpprint\fP standard library module.
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.INDENT 0.0
|
||||
.TP
|
||||
|
@ -348,6 +337,17 @@ output. One of \(aqfull\(aq, \(aqterse\(aq, \(aqmixed\(aq, \(aqchanges\(aq or
|
|||
Override the configured state_verbose value for minion
|
||||
output. Set to True or False. Default: none.
|
||||
.UNINDENT
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 0.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.SH SEE ALSO
|
||||
.sp
|
||||
\fBsalt(7)\fP
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-SYNDIC" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-SYNDIC" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-syndic \- salt-syndic Documentation
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT-UNITY" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT-UNITY" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt-unity \- salt-unity Command
|
||||
.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SALT" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SALT" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
salt \- salt
|
||||
.
|
||||
|
@ -267,17 +267,6 @@ Some outputters are formatted only for data returned from specific functions.
|
|||
If an outputter is used that does not support the data passed into it, then
|
||||
Salt will fall back on the \fBpprint\fP outputter and display the return data
|
||||
using the Python \fBpprint\fP standard library module.
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.INDENT 0.0
|
||||
.TP
|
||||
|
@ -329,6 +318,17 @@ output. One of \(aqfull\(aq, \(aqterse\(aq, \(aqmixed\(aq, \(aqchanges\(aq or
|
|||
Override the configured state_verbose value for minion
|
||||
output. Set to True or False. Default: none.
|
||||
.UNINDENT
|
||||
.sp
|
||||
\fBNOTE:\fP
|
||||
.INDENT 0.0
|
||||
.INDENT 3.5
|
||||
If using \fB\-\-out=json\fP, you will probably want \fB\-\-static\fP as well.
|
||||
Without the static option, you will get a separate JSON string per minion
|
||||
which makes JSON output invalid as a whole.
|
||||
This is due to using an iterative outputter. So if you want to feed it
|
||||
to a JSON parser, use \fB\-\-static\fP as well.
|
||||
.UNINDENT
|
||||
.UNINDENT
|
||||
.SH SEE ALSO
|
||||
.sp
|
||||
\fBsalt(7)\fP
|
||||
|
|
4084
doc/man/salt.7
4084
doc/man/salt.7
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
.\" Man page generated from reStructuredText.
|
||||
.
|
||||
.TH "SPM" "1" "Jan 15, 2020" "3000" "Salt"
|
||||
.TH "SPM" "1" "Mar 10, 2020" "3000.1" "Salt"
|
||||
.SH NAME
|
||||
spm \- Salt Package Manager Command
|
||||
.
|
||||
|
|
|
@ -96,10 +96,11 @@ include option.
|
|||
slspath
|
||||
=======
|
||||
|
||||
The `slspath` variable contains the path to the current sls file. The value
|
||||
of `slspath` in files referenced in the current sls depends on the reference
|
||||
method. For jinja includes `slspath` is the path to the current file. For
|
||||
salt includes `slspath` is the path to the included file.
|
||||
The `slspath` variable contains the path to the directory of the current sls
|
||||
file. The value of `slspath` in files referenced in the current sls depends on
|
||||
the reference method. For jinja includes `slspath` is the path to the current
|
||||
directory of the file. For salt includes `slspath` is the path to the directory
|
||||
of the included file.
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
|
|
555
doc/topics/releases/3000.1.rst
Normal file
555
doc/topics/releases/3000.1.rst
Normal file
|
@ -0,0 +1,555 @@
|
|||
.. _release-3000-1:
|
||||
|
||||
=========================
|
||||
Salt 3000.1 Release Notes
|
||||
=========================
|
||||
|
||||
Version 3000.1 is a bugfix release for :ref:`3000 <release-3000>`.
|
||||
|
||||
Statistics
|
||||
==========
|
||||
|
||||
- Total Merges: **39**
|
||||
- Total Issue References: **14**
|
||||
- Total PR References: **40**
|
||||
|
||||
- Contributors: **15** (`Ch3LL`_, `UtahDave`_, `cmcmarrow`_, `dwoz`_, `frogunder`_, `garethgreenaway`_, `lorengordon`_, `mchugh19`_, `oeuftete`_, `raddessi`_, `s0undt3ch`_, `sjorge`_, `terminalmage`_, `twangboy`_, `waynew`_)
|
||||
|
||||
|
||||
Changelog for v3000..v3000.1
|
||||
============================
|
||||
|
||||
*Generated at: 2020-03-13 17:24:15 UTC*
|
||||
|
||||
* **PR** `#56365`_: (`Ch3LL`_) Update 3000.1 changelog
|
||||
@ *2020-03-13 17:21:02 UTC*
|
||||
|
||||
* 62857a9f0e Merge pull request `#56365`_ from Ch3LL/changelog_3000.1
|
||||
|
||||
* 851d7d8fc7 Update 3000.1 changelog
|
||||
|
||||
* **PR** `#56360`_: (`Ch3LL`_) Use virtualenv 20.0.10 for macosx tests
|
||||
@ *2020-03-13 16:39:57 UTC*
|
||||
|
||||
* a660e96fb2 Merge pull request `#56360`_ from Ch3LL/mac_virtual
|
||||
|
||||
* abda125086 Update static requirements files
|
||||
|
||||
* dca3390c0c skip zcbuildout state test when virtualenv 20.0.0
|
||||
|
||||
* fdeae1f0e1 Use virtualenv 20.0.10 for macosx tests
|
||||
|
||||
* **ISSUE** `#56324`_: (`kiemlicz`_) Cannot use Saltcheck module (refs: `#56327`_)
|
||||
|
||||
* **PR** `#56327`_: (`mchugh19`_) keep cache_copied_files variable a list
|
||||
@ *2020-03-13 16:39:32 UTC*
|
||||
|
||||
* d8fc07274c Merge pull request `#56327`_ from mchugh19/56324
|
||||
|
||||
* e1e2df2b1d Merge branch 'master' into 56324
|
||||
|
||||
* 6d7572706d use includes for saltcheck integration test
|
||||
|
||||
* 5b7073b01d keep cache_copied_files variable a list
|
||||
|
||||
* **PR** `#56023`_: (`cmcmarrow`_) add fix for bin_env
|
||||
@ *2020-03-13 16:39:09 UTC*
|
||||
|
||||
* 2d31d61ab9 Merge pull request `#56023`_ from cmcmarrow/binary_fix_pip_bin_env
|
||||
|
||||
* f780fd013e Merge branch 'master' into binary_fix_pip_bin_env
|
||||
|
||||
* d642c7c270 Merge branch 'master' into binary_fix_pip_bin_env
|
||||
|
||||
* 82b39d26f8 Merge branch 'binary_fix_pip_bin_env' of https://github.com/cmcmarrow/salt into binary_fix_pip_bin_env
|
||||
|
||||
* a5fa22528a Merge branch 'master' into binary_fix_pip_bin_env
|
||||
|
||||
* 10ca0fa5d6 fix space
|
||||
|
||||
* 22fff6c180 fix _pip_bin_env
|
||||
|
||||
* d5425e6630 Merge branch 'master' into binary_fix_pip_bin_env
|
||||
|
||||
* ceda3fde00 add full support for when missing kwargs or args
|
||||
|
||||
* cf4ab215b5 add fix for bin_env
|
||||
|
||||
* **PR** `#56310`_: (`twangboy`_) Only process ADMX files when loading policies
|
||||
@ *2020-03-11 23:47:50 UTC*
|
||||
|
||||
* 19bb6aae0c Merge pull request `#56310`_ from twangboy/fix_lgpo_admx
|
||||
|
||||
* 4bc5b05586 Fix some lint
|
||||
|
||||
* ce282f9754 Mark destructive test
|
||||
|
||||
* 8af09c31ca Add test case
|
||||
|
||||
* 547d35cf4c Only process ADMX files when loading policies
|
||||
|
||||
* **PR** `#56272`_: (`twangboy`_) Properly resolve the policy name
|
||||
@ *2020-03-11 22:11:02 UTC*
|
||||
|
||||
* 2d78931eaf Merge pull request `#56272`_ from twangboy/fix_lgpo_names
|
||||
|
||||
* 233ed12472 Add comments to helper function
|
||||
|
||||
* a7369e41d7 Remove redundent code... some more
|
||||
|
||||
* 1db3052b94 Consolidate duplicate code
|
||||
|
||||
* b25b56299d Fix failing PY2 tests
|
||||
|
||||
* 36a24ac28f Only load adml data once
|
||||
|
||||
* bc13be6850 Add some tests
|
||||
|
||||
* 2e9be6e461 Properly resolve the policy name
|
||||
|
||||
* **PR** `#56358`_: (`s0undt3ch`_) Fix version instantiation when minor is an empty string
|
||||
@ *2020-03-11 22:10:04 UTC*
|
||||
|
||||
* c6c6e2e3d9 Merge pull request `#56358`_ from s0undt3ch/hotfix/version-parsing
|
||||
|
||||
* cb22e78502 Fix version instantiation when minor is an empty string
|
||||
|
||||
* **PR** `#56160`_: (`twangboy`_) Fix issue with existing reg_dword entries
|
||||
@ *2020-03-11 22:09:21 UTC*
|
||||
|
||||
* b33047c574 Merge pull request `#56160`_ from twangboy/fix_reg_dword
|
||||
|
||||
* 80650c2a8c Make test more explicit
|
||||
|
||||
* 67dd1c18c7 Fix issue with existing reg_dword entries
|
||||
|
||||
* **PR** `#56337`_: (`twangboy`_) Handle Adapter Type 53 and Undefined Types
|
||||
@ *2020-03-11 22:08:19 UTC*
|
||||
|
||||
* 8f23706871 Merge pull request `#56337`_ from twangboy/fix_win_network_type
|
||||
|
||||
* 18a31a0b22 Merge branch 'master' into fix_win_network_type
|
||||
|
||||
* d92914ad91 Merge branch 'master' into fix_win_network_type
|
||||
|
||||
* 224629bdeb Handle Adapter Type 53 and Undefined Types
|
||||
|
||||
* **PR** `#56060`_: (`twangboy`_) Fix regex string for \*\*Del and \*\*DelVals
|
||||
@ *2020-03-11 22:05:43 UTC*
|
||||
|
||||
* e551ff6a31 Merge pull request `#56060`_ from twangboy/fix_lgpo_regex
|
||||
|
||||
* ca260ef582 Use byte-strings in the test
|
||||
|
||||
* 7c81874a5b Add some tests
|
||||
|
||||
* 944b022f33 Fix regex string for \*\*Del and \*\*DelVals
|
||||
|
||||
* **ISSUE** `#56131`_: (`thusoy`_) Salt v3000 crashes with unless clause if pip is missing (refs: `#56215`_)
|
||||
|
||||
* **PR** `#56215`_: (`dwoz`_) Fix for unless requisite when pip is not installed
|
||||
@ *2020-03-11 22:04:30 UTC*
|
||||
|
||||
* c9bc9431c1 Merge pull request `#56215`_ from dwoz/issue-56131
|
||||
|
||||
* 5dd8f8eabe Fix linter
|
||||
|
||||
* 7492c2f7c7 Remove crufty prints from test
|
||||
|
||||
* 8ceaa6e6f3 Only remove pip relasted modules
|
||||
|
||||
* 69b21e5e68 Add regression test for issue 56131
|
||||
|
||||
* 9d23f55adc Fix for unless requisite when pip is not installed
|
||||
|
||||
* **PR** `#56339`_: (`twangboy`_) Fix win_dns_client when used with scheduler
|
||||
@ *2020-03-11 22:01:32 UTC*
|
||||
|
||||
* 74b67ea741 Merge pull request `#56339`_ from twangboy/fix_win_dns_client
|
||||
|
||||
* d6a4fbe85d Fix bad test (lint)
|
||||
|
||||
* 47bd67373f Add some tests
|
||||
|
||||
* 995975649a Add missing import, improve docs
|
||||
|
||||
* **ISSUE** `#56288`_: (`lorengordon`_) win_lgpo: `lgpo.set` with secedit policy always generates `KeyError: u'lgpo.secedit_data'` (refs: `#56290`_)
|
||||
|
||||
* **PR** `#56290`_: (`lorengordon`_) Ensures popping lgpo.secedit_data does not throw KeyError
|
||||
@ *2020-03-11 17:27:04 UTC*
|
||||
|
||||
* 7701e8762d Merge pull request `#56290`_ from lorengordon/pop-secdata
|
||||
|
||||
* 62453bb6ce Merge branch 'master' into pop-secdata
|
||||
|
||||
* 95d5cea9ff Merge branch 'master' into pop-secdata
|
||||
|
||||
* 2979158a8b Tests the `if _secedits:` logic path in `lgpo.set_`
|
||||
|
||||
* d1f776178c Ensures popping lgpo.secedit_data does not throw KeyError
|
||||
|
||||
* **ISSUE** `#56119`_: (`finalduty`_) Release notes for v3000 do not mention changes to `slspath` variable (refs: `#56341`_)
|
||||
|
||||
* **PR** `#56341`_: (`dwoz`_) Revert "Don't remove one directory level from slspath"
|
||||
@ *2020-03-11 17:03:33 UTC*
|
||||
|
||||
* 84c60708cd Merge pull request `#56341`_ from dwoz/issue-56119
|
||||
|
||||
* afe6e84c36 Clarify slspath documentation
|
||||
|
||||
* 6dfc098fd1 Add debug for test asserts
|
||||
|
||||
* 9fa9dab8b2 Fix linter warts
|
||||
|
||||
* fd702bdd44 Add regression test for `#56119`_
|
||||
|
||||
* 3d3b673cec Revert "Don't remove one directory level from slspath"
|
||||
|
||||
* **PR** `#56185`_: (`terminalmage`_) Fix regression in service states with reload argument
|
||||
@ *2020-03-11 16:45:58 UTC*
|
||||
|
||||
* 4f9813a49c Merge pull request `#56185`_ from terminalmage/issue56167
|
||||
|
||||
* 65b3f4c9a0 Merge branch 'master' into issue56167
|
||||
|
||||
* 7b41a00b4f Merge branch 'master' into issue56167
|
||||
|
||||
* 8f7034d946 Merge branch 'master' into issue56167
|
||||
|
||||
* e9fbb634e1 Skip on OSX for now
|
||||
|
||||
* 5996280241 Add __opts__ and __grains__ just in case
|
||||
|
||||
* af3e841d08 Fix failing test
|
||||
|
||||
* 586f21aedc Add functional test
|
||||
|
||||
* 1afb9c10f0 Fix regression in service states with reload argument
|
||||
|
||||
* **PR** `#56068`_: (`s0undt3ch`_) Update the bootstrap script to latest version, v2020.02.24
|
||||
@ *2020-03-11 16:44:23 UTC*
|
||||
|
||||
* 74f8b2a926 Merge pull request `#56068`_ from s0undt3ch/hotfix/update-bootstrap
|
||||
|
||||
* ce83b190ed Merge branch 'master' into hotfix/update-bootstrap
|
||||
|
||||
* ccd231d82d Merge branch 'master' into hotfix/update-bootstrap
|
||||
|
||||
* 88a52f88ff Update the bootstrap script to latest version, v2020.02.24
|
||||
|
||||
* 012fb5bc4b Update the bootstrap script to latest version, v2020.02.04
|
||||
|
||||
* **PR** `#56321`_: (`oeuftete`_) Tidy up formatting in boto_secgroup docs
|
||||
@ *2020-03-11 08:14:48 UTC*
|
||||
|
||||
* 154257e2e9 Merge pull request `#56321`_ from oeuftete/tidy-boto-secgroup-docs
|
||||
|
||||
* 616f11b33e Merge branch 'master' into tidy-boto-secgroup-docs
|
||||
|
||||
* 412bb4d62d Tidy up formatting in boto_secgroup docs
|
||||
|
||||
* **PR** `#56336`_: (`Ch3LL`_) Fix test_issue_2594_non_invalidated_cache test (update zope.interface)
|
||||
@ *2020-03-11 00:08:43 UTC*
|
||||
|
||||
* 11d33e3d90 Merge pull request `#56336`_ from Ch3LL/fix_56330
|
||||
|
||||
* bbf37e090b Fix test_issue_2594_non_invalidated_cache test (update zope.interface)
|
||||
|
||||
* **PR** `#56346`_: (`frogunder`_) Update man pages to 3000.1
|
||||
@ *2020-03-11 00:07:31 UTC*
|
||||
|
||||
* a640bd30fc Merge pull request `#56346`_ from frogunder/man_pages_3000_1
|
||||
|
||||
* 6d7c1b6482 Update man pages to 3000.1
|
||||
|
||||
* **PR** `#56099`_: (`s0undt3ch`_) Fix Windows and macOS requirements handling in setup.py
|
||||
@ *2020-03-11 00:04:37 UTC*
|
||||
|
||||
* 2f783d247e Merge pull request `#56099`_ from s0undt3ch/hotfix/requirements
|
||||
|
||||
* c19f4a3fae Merge branch 'master' into hotfix/requirements
|
||||
|
||||
* 1e7bc8fc8f Also take into account macOS requirements
|
||||
|
||||
* 7eef14952c Also include req_win.txt
|
||||
|
||||
* **PR** `#56218`_: (`raddessi`_) Changed StrictVersion checking of setuptools to LooseVersion
|
||||
@ *2020-03-10 23:59:54 UTC*
|
||||
|
||||
* 7c4d879073 Merge pull request `#56218`_ from raddessi/master.v3000-conda
|
||||
|
||||
* 44556f6d57 Merge branch 'master' into master.v3000-conda
|
||||
|
||||
* 566c03b786 Merge branch 'master' into master.v3000-conda
|
||||
|
||||
* 82773a9799 Removed now-unused StrictVersion import from setup.py
|
||||
|
||||
* 31bb0f7cd1 Changed StrictVersion checking of setuptools to LooseVersion
|
||||
|
||||
* **PR** `#56128`_: (`waynew`_) Update CHANGELOG with release date and unreleased.
|
||||
@ *2020-03-10 23:58:15 UTC*
|
||||
|
||||
* acbd3556e5 Merge pull request `#56128`_ from waynew/master
|
||||
|
||||
* e3216db3e5 Update CHANGELOG with release date and unreleased.
|
||||
|
||||
* **PR** `#55937`_: (`twangboy`_) Update windows build scripts
|
||||
@ *2020-03-10 23:55:55 UTC*
|
||||
|
||||
* 12140545ab Merge pull request `#55937`_ from twangboy/update_deps
|
||||
|
||||
* f00a504a48 Add back the pylauncher
|
||||
|
||||
* 30b9c32356 Revert changes to req and req_win
|
||||
|
||||
* 98dc0e970c Fix some warts in the build_env scripts
|
||||
|
||||
* 8404141f65 Update dependencies
|
||||
|
||||
* **PR** `#55906`_: (`sjorge`_) smartos.vm_present could not handle nics with vrrp_vrid property
|
||||
@ *2020-03-10 23:54:44 UTC*
|
||||
|
||||
* 485a47cdf1 Merge pull request `#55906`_ from sjorge/smartos_vrrp
|
||||
|
||||
* 5bd7dd009a Merge branch 'master' into smartos_vrrp
|
||||
|
||||
* f77719c179 smartos state should handle vrrp config
|
||||
|
||||
* **ISSUE** `#55185`_: (`sjorge`_) salt.modules.pdbedit doesn't work on samba older than 4.8 (refs: `#55894`_)
|
||||
|
||||
* **PR** `#55894`_: (`sjorge`_) `#55185`_ pdbedit module should check for version 4.8.x or newer
|
||||
@ *2020-03-10 23:54:21 UTC*
|
||||
|
||||
* 1fa8555360 Merge pull request `#55894`_ from sjorge/pdbedit_55185
|
||||
|
||||
* 9dc7b71122 Merge branch 'master' into pdbedit_55185
|
||||
|
||||
* **ISSUE** `#56195`_: (`lorengordon`_) Windows: Using inline powershell in args with `cmd.script` and `shell: powershell` (refs: `#56197`_)
|
||||
|
||||
* **PR** `#56197`_: (`lorengordon`_) Allows use of inline powershell for cmd.script args
|
||||
@ *2020-03-10 23:52:47 UTC*
|
||||
|
||||
* 3e57d58db2 Merge pull request `#56197`_ from lorengordon/file-or-no-file
|
||||
|
||||
* fcd1699f5e Allows use of inline powershell for cmd.script args
|
||||
|
||||
* be2e67c0a0 Tests that powershell processes inline powershell in args
|
||||
|
||||
* **ISSUE** `#53152`_: (`jbeaird`_) daily highstate fails after 2019.2 upgrade (refs: `#56149`_)
|
||||
|
||||
* **PR** `#56149`_: (`garethgreenaway`_) [master] Fix to scheduler for use of when and splay
|
||||
@ *2020-03-10 23:52:16 UTC*
|
||||
|
||||
* 547c73e4cc Merge pull request `#56149`_ from garethgreenaway/53152_fix_schedule_when_splay
|
||||
|
||||
* 8f068f6f9b Fix for when using a combination of when and splay. Previously comparing the wrong value when determining if the job should be run and next_fire_time updated. This resulted in multiple job runs when `when` and `splay` were used together. Code updated and test updated to ensure only one run at the specific time. Skip eval tests is dateutil.parser is unavailable.
|
||||
|
||||
* **PR** `#56345`_: (`s0undt3ch`_) Bump Windows Py3 builds timeout to 10 hours
|
||||
@ *2020-03-10 20:43:41 UTC*
|
||||
|
||||
* 192ce76a95 Merge pull request `#56345`_ from s0undt3ch/hotfix/win-py3-timeouts
|
||||
|
||||
* 11bdc38ae3 Bump Windows 2019 Py3 builds timeout to 10 hours
|
||||
|
||||
* **PR** `#55888`_: (`s0undt3ch`_) Disable codecov PR comments and status checks
|
||||
@ *2020-03-10 15:45:52 UTC*
|
||||
|
||||
* a204906c80 Merge pull request `#55888`_ from s0undt3ch/hotfix/coverage-reporting
|
||||
|
||||
* 4b8dc8a586 Disable codecov PR comments and status checks
|
||||
|
||||
* cb0f4dff87 Apply the suggestion given by the codecov team
|
||||
|
||||
* **ISSUE** `#56177`_: (`jodok`_) mysql states fail because conv is `` instead of None (refs: `#56174`_)
|
||||
|
||||
* **ISSUE** `#56170`_: (`jeffdyke`_) mariadb socket access must be enabled before highstate - salt 3K still tries empty password (refs: `#56174`_)
|
||||
|
||||
* **ISSUE** `#56124`_: (`ymasson`_) MySQL state and module broken after upgrade to 3000 (refs: `#56174`_)
|
||||
|
||||
* **PR** `#56174`_: (`garethgreenaway`_) [master] MySQL module fixes
|
||||
@ *2020-03-10 04:03:23 UTC*
|
||||
|
||||
* 3e913631bb Merge pull request `#56174`_ from garethgreenaway/56124_mysql_module_state_fixes
|
||||
|
||||
* fcc061368b Removing quotes from the plugin_status query. Updating tests to reflect changes.
|
||||
|
||||
* 3dc66393b2 Adding better error reporting around plugins. Updating tests. Only attempt to delete a user if they exist.
|
||||
|
||||
* 1337da1e4e Ensure _mysql_user_exists is using auth_socket. Updating mysql and mariadb chpass functions to ensure that the respective plugins are enabled before attempting to use them.
|
||||
|
||||
* 34995ba4e8 Reworking the unix_socket code to support the differences between MySQL and MariaDB. Adding some functions to install, remove, and check the status of plugins which we can then use when adding users which will use the unix_socket & auth_socket plugins. Adding additional tests for these new functions as well as test to ensure the correct SQL is being generated when using passwordless and unix_socket options.
|
||||
|
||||
* 5bfd67c13e Minor tweak to mysql module. Fixing failing tests.
|
||||
|
||||
* e871a3ffd1 Various fixes to the mysql module to break out the handling of user management into different functions based on MySQL variant.
|
||||
|
||||
* **ISSUE** `#56063`_: (`terminalmage`_) [master] Traceback in esxi grain module on import (refs: `#56094`_)
|
||||
|
||||
* **PR** `#56094`_: (`dwoz`_) Fix type error in TornadoImporter
|
||||
@ *2020-03-10 01:39:08 UTC*
|
||||
|
||||
* 211c88bfbc Merge pull request `#56094`_ from dwoz/fix_56063
|
||||
|
||||
* 7b1632e8e3 Fix type error in TornadoImporter
|
||||
|
||||
* **PR** `#56172`_: (`Ch3LL`_) Only change mine data if using new allow_tgt feature
|
||||
@ *2020-03-10 01:34:27 UTC*
|
||||
|
||||
* fb5252fc53 Merge pull request `#56172`_ from Ch3LL/mine_g
|
||||
|
||||
* f4c9c2a5cf Fix docs
|
||||
|
||||
* 6c914caec8 Use different targeting for windows/linux
|
||||
|
||||
* f6348127dc Only change mine data if using new allow_tgt feature
|
||||
|
||||
* **ISSUE** `#56121`_: (`githubcdr`_) salt-minion broken after upgrade to 3000 (refs: `#56143`_)
|
||||
|
||||
* **ISSUE** `#51854`_: (`Oloremo`_) Fluorine: minion_pillar_cache: True leads to exception (refs: `#52195`_, `#56143`_)
|
||||
|
||||
* **PR** `#56143`_: (`waynew`_) Use encoding when caching pillar data
|
||||
@ *2020-03-10 01:33:37 UTC*
|
||||
|
||||
* **PR** `#52195`_: (`waynew`_) Use encoding when caching pillar data (refs: `#56143`_)
|
||||
|
||||
* 8a8e9c9c5f Merge pull request `#56143`_ from waynew/51854-minion-pillar-cache-exception
|
||||
|
||||
* 58cc9488aa Merge branch 'master' into 51854-minion-pillar-cache-exception
|
||||
|
||||
* **PR** `#56082`_: (`Ch3LL`_) Fix saltversioninfo grain for new version
|
||||
@ *2020-03-10 01:32:11 UTC*
|
||||
|
||||
* 9f27caa7d0 Merge pull request `#56082`_ from Ch3LL/ver_grains
|
||||
|
||||
* e6abd6d31b ensure full_info/noc_info work with new versioning
|
||||
|
||||
* bcc520ccc4 Add saltversioninfo grains test
|
||||
|
||||
* 510e149b87 Fix saltversioninfo grain for new version
|
||||
|
||||
* **PR** `#56285`_: (`UtahDave`_) Add missing colon.
|
||||
@ *2020-03-09 22:22:17 UTC*
|
||||
|
||||
* 602ff3b9f1 Merge pull request `#56285`_ from UtahDave/fix_f5_doc
|
||||
|
||||
* 1034013831 Add missing colon.
|
||||
|
||||
* **PR** `#56333`_: (`Ch3LL`_) add pylint ignore in django returner
|
||||
@ *2020-03-09 20:42:42 UTC*
|
||||
|
||||
* de5184a206 Merge pull request `#56333`_ from Ch3LL/lint_fix
|
||||
|
||||
* 6a213a429a add pylint ignore in django returner
|
||||
|
||||
* 759290a055 Use encoding when caching pillar data
|
||||
|
||||
* **ISSUE** `#56080`_: (`sagetherage`_) Update release notes, pip download page and install docs on pycrpto (refs: `#56095`_)
|
||||
|
||||
* **PR** `#56095`_: (`waynew`_) Provide security advisory for PyCrypto
|
||||
@ *2020-02-10 18:42:00 UTC*
|
||||
|
||||
* 9adc2214c3 Merge pull request `#56095`_ from waynew/crypto-warning
|
||||
|
||||
* 484bc51f4b Add warnings to a couple of other places
|
||||
|
||||
* 2711c04ca9 Provide security advisory for PyCrypto
|
||||
|
||||
* **PR** `#56092`_: (`twangboy`_) Add LGPO and Network PRs to changelogs
|
||||
@ *2020-02-10 18:17:28 UTC*
|
||||
|
||||
* 1acd492bb9 Merge pull request `#56092`_ from twangboy/waynew-update-changelog
|
||||
|
||||
* 4e03620d4e Fix a docs issue
|
||||
|
||||
* e7b64277e1 Add LGPO and Network PRs to changelogs
|
||||
|
||||
* **PR** `#56115`_: (`s0undt3ch`_) Add information about the pip install salt on windows issue.
|
||||
@ *2020-02-10 17:33:44 UTC*
|
||||
|
||||
* 65d59b0ee9 Merge pull request `#56115`_ from s0undt3ch/hotfix/release-notes
|
||||
|
||||
* aeac9f36cd Add information about the pip install salt on windows issue.
|
||||
|
||||
* a5179434e7 Merge branch 'master' into pdbedit_55185
|
||||
|
||||
* 95d46d6cc8 `#55185`_ pdbedit module should check for version 4.8.x or newer
|
||||
|
||||
.. _`#51854`: https://github.com/saltstack/salt/issues/51854
|
||||
.. _`#52195`: https://github.com/saltstack/salt/pull/52195
|
||||
.. _`#53152`: https://github.com/saltstack/salt/issues/53152
|
||||
.. _`#55185`: https://github.com/saltstack/salt/issues/55185
|
||||
.. _`#55888`: https://github.com/saltstack/salt/pull/55888
|
||||
.. _`#55894`: https://github.com/saltstack/salt/pull/55894
|
||||
.. _`#55906`: https://github.com/saltstack/salt/pull/55906
|
||||
.. _`#55937`: https://github.com/saltstack/salt/pull/55937
|
||||
.. _`#56023`: https://github.com/saltstack/salt/pull/56023
|
||||
.. _`#56060`: https://github.com/saltstack/salt/pull/56060
|
||||
.. _`#56063`: https://github.com/saltstack/salt/issues/56063
|
||||
.. _`#56068`: https://github.com/saltstack/salt/pull/56068
|
||||
.. _`#56080`: https://github.com/saltstack/salt/issues/56080
|
||||
.. _`#56082`: https://github.com/saltstack/salt/pull/56082
|
||||
.. _`#56092`: https://github.com/saltstack/salt/pull/56092
|
||||
.. _`#56094`: https://github.com/saltstack/salt/pull/56094
|
||||
.. _`#56095`: https://github.com/saltstack/salt/pull/56095
|
||||
.. _`#56099`: https://github.com/saltstack/salt/pull/56099
|
||||
.. _`#56115`: https://github.com/saltstack/salt/pull/56115
|
||||
.. _`#56119`: https://github.com/saltstack/salt/issues/56119
|
||||
.. _`#56121`: https://github.com/saltstack/salt/issues/56121
|
||||
.. _`#56124`: https://github.com/saltstack/salt/issues/56124
|
||||
.. _`#56128`: https://github.com/saltstack/salt/pull/56128
|
||||
.. _`#56131`: https://github.com/saltstack/salt/issues/56131
|
||||
.. _`#56143`: https://github.com/saltstack/salt/pull/56143
|
||||
.. _`#56149`: https://github.com/saltstack/salt/pull/56149
|
||||
.. _`#56160`: https://github.com/saltstack/salt/pull/56160
|
||||
.. _`#56170`: https://github.com/saltstack/salt/issues/56170
|
||||
.. _`#56172`: https://github.com/saltstack/salt/pull/56172
|
||||
.. _`#56174`: https://github.com/saltstack/salt/pull/56174
|
||||
.. _`#56177`: https://github.com/saltstack/salt/issues/56177
|
||||
.. _`#56185`: https://github.com/saltstack/salt/pull/56185
|
||||
.. _`#56195`: https://github.com/saltstack/salt/issues/56195
|
||||
.. _`#56197`: https://github.com/saltstack/salt/pull/56197
|
||||
.. _`#56215`: https://github.com/saltstack/salt/pull/56215
|
||||
.. _`#56218`: https://github.com/saltstack/salt/pull/56218
|
||||
.. _`#56272`: https://github.com/saltstack/salt/pull/56272
|
||||
.. _`#56285`: https://github.com/saltstack/salt/pull/56285
|
||||
.. _`#56288`: https://github.com/saltstack/salt/issues/56288
|
||||
.. _`#56290`: https://github.com/saltstack/salt/pull/56290
|
||||
.. _`#56310`: https://github.com/saltstack/salt/pull/56310
|
||||
.. _`#56321`: https://github.com/saltstack/salt/pull/56321
|
||||
.. _`#56324`: https://github.com/saltstack/salt/issues/56324
|
||||
.. _`#56327`: https://github.com/saltstack/salt/pull/56327
|
||||
.. _`#56333`: https://github.com/saltstack/salt/pull/56333
|
||||
.. _`#56336`: https://github.com/saltstack/salt/pull/56336
|
||||
.. _`#56337`: https://github.com/saltstack/salt/pull/56337
|
||||
.. _`#56339`: https://github.com/saltstack/salt/pull/56339
|
||||
.. _`#56341`: https://github.com/saltstack/salt/pull/56341
|
||||
.. _`#56345`: https://github.com/saltstack/salt/pull/56345
|
||||
.. _`#56346`: https://github.com/saltstack/salt/pull/56346
|
||||
.. _`#56358`: https://github.com/saltstack/salt/pull/56358
|
||||
.. _`#56360`: https://github.com/saltstack/salt/pull/56360
|
||||
.. _`#56365`: https://github.com/saltstack/salt/pull/56365
|
||||
.. _`Ch3LL`: https://github.com/Ch3LL
|
||||
.. _`Oloremo`: https://github.com/Oloremo
|
||||
.. _`UtahDave`: https://github.com/UtahDave
|
||||
.. _`cmcmarrow`: https://github.com/cmcmarrow
|
||||
.. _`dwoz`: https://github.com/dwoz
|
||||
.. _`finalduty`: https://github.com/finalduty
|
||||
.. _`frogunder`: https://github.com/frogunder
|
||||
.. _`garethgreenaway`: https://github.com/garethgreenaway
|
||||
.. _`githubcdr`: https://github.com/githubcdr
|
||||
.. _`jbeaird`: https://github.com/jbeaird
|
||||
.. _`jeffdyke`: https://github.com/jeffdyke
|
||||
.. _`jodok`: https://github.com/jodok
|
||||
.. _`kiemlicz`: https://github.com/kiemlicz
|
||||
.. _`lorengordon`: https://github.com/lorengordon
|
||||
.. _`mchugh19`: https://github.com/mchugh19
|
||||
.. _`oeuftete`: https://github.com/oeuftete
|
||||
.. _`raddessi`: https://github.com/raddessi
|
||||
.. _`s0undt3ch`: https://github.com/s0undt3ch
|
||||
.. _`sagetherage`: https://github.com/sagetherage
|
||||
.. _`sjorge`: https://github.com/sjorge
|
||||
.. _`terminalmage`: https://github.com/terminalmage
|
||||
.. _`thusoy`: https://github.com/thusoy
|
||||
.. _`twangboy`: https://github.com/twangboy
|
||||
.. _`waynew`: https://github.com/waynew
|
||||
.. _`ymasson`: https://github.com/ymasson
|
|
@ -592,6 +592,19 @@ Enhancements to chroot
|
|||
:py:func:`highstate<salt.modules.chroot.highstate>` that allow executing
|
||||
states in sls files or running apply/highstate inside of a chroot.
|
||||
|
||||
Minion-side ACL
|
||||
---------------
|
||||
|
||||
Salt has had master-side ACL for the salt mine for some time, where the master
|
||||
configuration contained `mine_get` that specified which minions could request
|
||||
which functions. However, now you can specify which minions can access a function
|
||||
in the salt mine function definition itself (or when calling :py:func:`mine.send <salt.modules.mine.send>`).
|
||||
This targeting works the same as the generic minion targeting as specified
|
||||
:ref:`here <targeting>`. The parameters used are ``allow_tgt`` and ``allow_tgt_type``.
|
||||
See also :ref:`the documentation of the Salt Mine <mine_minion-side-acl>`. Please
|
||||
note that if you want to use this new feature both your minion and masters will need
|
||||
to be on atleast version 3000.
|
||||
|
||||
Deprecations
|
||||
============
|
||||
|
||||
|
|
|
@ -16,14 +16,3 @@ also support the syntax used in :py:mod:`module.run <salt.states.module.run>`.
|
|||
The old syntax for the mine_function - as a dict, or as a list with dicts that
|
||||
contain more than exactly one key - is still supported but discouraged in favor
|
||||
of the more uniform syntax of module.run.
|
||||
|
||||
Minion-side ACL
|
||||
---------------
|
||||
|
||||
Salt has had master-side ACL for the salt mine for some time, where the master
|
||||
configuration contained `mine_get` that specified which minions could request
|
||||
which functions. However, now you can specify which minions can access a function
|
||||
in the salt mine function definition itself (or when calling :py:func:`mine.send <salt.modules.mine.send>`).
|
||||
This targeting works the same as the generic minion targeting as specified
|
||||
:ref:`here <targeting>`. The parameters used are ``allow_tgt`` and ``allow_tgt_type``.
|
||||
See also :ref:`the documentation of the Salt Mine <mine_minion-side-acl>`.
|
||||
|
|
|
@ -221,31 +221,32 @@ If ($NoPipDependencies -eq $false) {
|
|||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --disable-pip-version-check --no-cache-dir install -r $($script_path)\req.txt" "pip install"
|
||||
}
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Cleaning Up PyWin32
|
||||
#==============================================================================
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Cleaning Up PyWin32 . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
If ( Test-Path "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32" -PathType Container ) {
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Cleaning Up PyWin32 . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
|
||||
# Move DLL's to Python Root
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
# The dlls have to be in Python directory and the site-packages\win32 directory
|
||||
Copy-Item "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['Python2Dir'])" -Force
|
||||
Move-Item "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['SitePkgs2Dir'])\win32" -Force
|
||||
# Move DLL's to Python Root
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
# The dlls have to be in Python directory and the site-packages\win32 directory
|
||||
Copy-Item "$( $ini['Settings']['SitePkgs2Dir'] )\pywin32_system32\*.dll" "$( $ini['Settings']['Python2Dir'] )" -Force
|
||||
Move-Item "$( $ini['Settings']['SitePkgs2Dir'] )\pywin32_system32\*.dll" "$( $ini['Settings']['SitePkgs2Dir'] )\win32" -Force
|
||||
|
||||
# Create gen_py directory
|
||||
Write-Output " - $script_name :: Creating gen_py Directory . . ."
|
||||
New-Item -Path "$($ini['Settings']['SitePkgs2Dir'])\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
# Create gen_py directory
|
||||
Write-Output " - $script_name :: Creating gen_py Directory . . ."
|
||||
New-Item -Path "$( $ini['Settings']['SitePkgs2Dir'] )\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
Remove-Item "$($ini['Settings']['SitePkgs2Dir'])\pywin32_system32"
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
Remove-Item "$( $ini['Settings']['SitePkgs2Dir'] )\pywin32_system32"
|
||||
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$($ini['Settings']['Scripts2Dir'])\pywin32_*" -Force -Recurse
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$( $ini['Settings']['Scripts2Dir'] )\pywin32_*" -Force -Recurse
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Copy DLLs to Python Directory
|
||||
|
|
|
@ -223,37 +223,28 @@ If ($NoPipDependencies -eq $false) {
|
|||
#==============================================================================
|
||||
# Cleaning Up PyWin32
|
||||
#==============================================================================
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Cleaning Up PyWin32 . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
If (Test-Path "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32" -PathType Container ) {
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Cleaning Up PyWin32 . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
|
||||
# Move DLL's to Python Root
|
||||
# The dlls have to be in Python directory and the site-packages\win32 directory
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
Copy-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['Python3Dir'])" -Force
|
||||
Move-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32\*.dll" "$($ini['Settings']['SitePkgs3Dir'])\win32" -Force
|
||||
# Move DLL's to Python Root
|
||||
# The dlls have to be in Python directory and the site-packages\win32 directory
|
||||
Write-Output " - $script_name :: Moving PyWin32 DLLs . . ."
|
||||
Copy-Item "$( $ini['Settings']['SitePkgs3Dir'] )\pywin32_system32\*.dll" "$( $ini['Settings']['Python3Dir'] )" -Force
|
||||
Move-Item "$( $ini['Settings']['SitePkgs3Dir'] )\pywin32_system32\*.dll" "$( $ini['Settings']['SitePkgs3Dir'] )\win32" -Force
|
||||
|
||||
# Create gen_py directory
|
||||
Write-Output " - $script_name :: Creating gen_py Directory . . ."
|
||||
New-Item -Path "$($ini['Settings']['SitePkgs3Dir'])\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
# Create gen_py directory
|
||||
Write-Output " - $script_name :: Creating gen_py Directory . . ."
|
||||
New-Item -Path "$( $ini['Settings']['SitePkgs3Dir'] )\win32com\gen_py" -ItemType Directory -Force | Out-Null
|
||||
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
Remove-Item "$($ini['Settings']['SitePkgs3Dir'])\pywin32_system32"
|
||||
# Remove pywin32_system32 directory
|
||||
Write-Output " - $script_name :: Removing pywin32_system32 Directory . . ."
|
||||
Remove-Item "$( $ini['Settings']['SitePkgs3Dir'] )\pywin32_system32"
|
||||
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$($ini['Settings']['Scripts3Dir'])\pywin32_*" -Force -Recurse
|
||||
|
||||
#==============================================================================
|
||||
# Fix PyCrypto
|
||||
#==============================================================================
|
||||
If ($NoPipDependencies -eq $false) {
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Fixing PyCrypto . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
$nt_file = "$($ini['Settings']['Python3Dir'])\Lib\site-packages\Crypto\Random\OSRNG\nt.py"
|
||||
(Get-Content $nt_file) | Foreach-Object {$_ -replace '^import winrandom$', 'from Crypto.Random.OSRNG import winrandom'} | Set-Content $nt_file
|
||||
# Remove PyWin32 PostInstall and testall Scripts
|
||||
Write-Output " - $script_name :: Removing PyWin32 scripts . . ."
|
||||
Remove-Item "$( $ini['Settings']['Scripts3Dir'] )\pywin32_*" -Force -Recurse
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
|
|
|
@ -15,7 +15,7 @@ Function Start_Process_and_test_exitcode {
|
|||
Begin { Write-Host "Executing Command: $fun $args" }
|
||||
|
||||
Process {
|
||||
$p = Start-Process "$fun" -ArgumentList "$args" -Wait -PassThru
|
||||
$p = Start-Process "$fun" -ArgumentList "$args" -Wait -NoNewWindow -PassThru
|
||||
If ($p.ExitCode -ne 0) {
|
||||
Write-Error "$descr returned exitcode $p.ExitCode."
|
||||
exit $p.ExitCode
|
||||
|
|
|
@ -25,7 +25,7 @@ rfc3987
|
|||
salttesting==2017.6.1
|
||||
strict_rfc3339
|
||||
supervisor==3.3.5; python_version < '3'
|
||||
virtualenv
|
||||
virtualenv==20.0.10
|
||||
watchdog
|
||||
yamlordereddictloader
|
||||
vcert~=0.7.0
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
# pip-compile -o requirements/static/py2.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
|
@ -30,16 +31,18 @@ cherrypy==17.4.1
|
|||
click==7.0
|
||||
clustershell==1.8.1
|
||||
configparser==4.0.2 # via importlib-metadata
|
||||
contextlib2==0.5.5 # via cherrypy, importlib-metadata
|
||||
contextlib2==0.6.0.post1 # via cherrypy, importlib-metadata, importlib-resources, virtualenv
|
||||
cookies==2.2.1 # via responses
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
funcsigs==1.0.2 # via mock, pytest
|
||||
functools32==3.2.3.post2 # via jsonschema
|
||||
future==0.17.1 # via python-jose
|
||||
|
@ -50,7 +53,8 @@ gitdb==0.6.4
|
|||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
importlib-metadata==0.23 # via pluggy, pytest
|
||||
importlib-metadata==0.23 # via importlib-resources, pluggy, pytest, virtualenv
|
||||
importlib-resources==1.3.1 # via virtualenv
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
|
@ -77,7 +81,7 @@ netaddr==0.7.19 # via junos-eznc
|
|||
oscrypto==1.2.0 # via certvalidator
|
||||
packaging==19.2 # via pytest
|
||||
paramiko==2.4.2 # via junos-eznc, ncclient, scp
|
||||
pathlib2==2.3.3 # via importlib-metadata, pytest
|
||||
pathlib2==2.3.3 # via importlib-metadata, importlib-resources, pytest, virtualenv
|
||||
pathtools==0.1.2 # via watchdog
|
||||
pluggy==0.13.1 # via pytest
|
||||
portend==2.4 # via cherrypy
|
||||
|
@ -116,16 +120,17 @@ scp==0.13.2 # via junos-eznc
|
|||
selectors2==2.0.1 # via ncclient
|
||||
setproctitle==1.1.10
|
||||
singledispatch==3.4.0.3 ; python_version < "3.4"
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, vcert, websocket-client
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, singledispatch, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
supervisor==3.3.5 ; python_version < "3"
|
||||
tempora==1.14.1 # via portend
|
||||
timelib==0.2.4
|
||||
typing==3.7.4.1 # via importlib-resources
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
|
@ -136,6 +141,6 @@ wrapt==1.11.1 # via aws-xray-sdk
|
|||
xmltodict==0.12.0 # via moto
|
||||
yamlordereddictloader==0.4.0
|
||||
zc.lockfile==1.4 # via cherrypy
|
||||
zipp==0.6.0 # via importlib-metadata
|
||||
zipp==0.6.0 # via importlib-metadata, importlib-resources
|
||||
# Passthrough dependencies from pkg/osx/req.txt
|
||||
pyobjc==5.1.2
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
# pip-compile -o requirements/static/py3.5/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
|
@ -29,12 +30,14 @@ clustershell==1.8.1
|
|||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
|
@ -42,7 +45,8 @@ gitdb==0.6.4
|
|||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
importlib-metadata==0.23 # via pluggy, pytest
|
||||
importlib-metadata==0.23 # via importlib-resources, pluggy, pytest, virtualenv
|
||||
importlib-resources==1.3.1 # via virtualenv
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
|
@ -104,7 +108,7 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, websocket-client
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pathlib2, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
|
@ -112,7 +116,7 @@ tempora==1.14.1 # via portend
|
|||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
|
@ -122,6 +126,6 @@ wrapt==1.11.1 # via aws-xray-sdk
|
|||
xmltodict==0.12.0 # via moto
|
||||
yamlordereddictloader==0.4.0
|
||||
zc.lockfile==1.4 # via cherrypy
|
||||
zipp==0.6.0 # via importlib-metadata
|
||||
zipp==0.6.0 # via importlib-metadata, importlib-resources
|
||||
# Passthrough dependencies from pkg/osx/req.txt
|
||||
pyobjc==5.1.2
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
# pip-compile -o requirements/static/py3.6/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
|
@ -29,12 +30,14 @@ clustershell==1.8.1
|
|||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
|
@ -42,7 +45,8 @@ gitdb==0.6.4
|
|||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
importlib-metadata==0.23 # via pluggy, pytest
|
||||
importlib-metadata==0.23 # via importlib-resources, pluggy, pytest, virtualenv
|
||||
importlib-resources==1.3.1 # via virtualenv
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
|
@ -103,7 +107,7 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, websocket-client
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
|
@ -111,7 +115,7 @@ tempora==1.14.1 # via portend
|
|||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
|
@ -121,6 +125,6 @@ wrapt==1.11.1 # via aws-xray-sdk
|
|||
xmltodict==0.12.0 # via moto
|
||||
yamlordereddictloader==0.4.0
|
||||
zc.lockfile==1.4 # via cherrypy
|
||||
zipp==0.6.0 # via importlib-metadata
|
||||
zipp==0.6.0 # via importlib-metadata, importlib-resources
|
||||
# Passthrough dependencies from pkg/osx/req.txt
|
||||
pyobjc==5.1.2
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
# pip-compile -o requirements/static/py3.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
appdirs==1.4.3 # via virtualenv
|
||||
argh==0.26.2 # via watchdog
|
||||
asn1crypto==1.3.0 # via certvalidator, cryptography, oscrypto
|
||||
atomicwrites==1.3.0 # via pytest
|
||||
|
@ -29,12 +30,14 @@ clustershell==1.8.1
|
|||
contextlib2==0.5.5 # via cherrypy
|
||||
croniter==0.3.29
|
||||
cryptography==2.6.1
|
||||
distlib==0.3.0 # via virtualenv
|
||||
dnspython==1.16.0
|
||||
docker-pycreds==0.4.0 # via docker
|
||||
docker==3.7.2
|
||||
docutils==0.14 # via botocore
|
||||
ecdsa==0.13.3 # via python-jose
|
||||
enum34==1.1.6
|
||||
filelock==3.0.12 # via virtualenv
|
||||
future==0.17.1 # via python-jose
|
||||
genshi==0.7.3
|
||||
gitdb2==2.0.5 # via gitpython
|
||||
|
@ -42,7 +45,7 @@ gitdb==0.6.4
|
|||
gitpython==2.1.11
|
||||
google-auth==1.6.3 # via kubernetes
|
||||
idna==2.8
|
||||
importlib-metadata==0.23 # via pluggy, pytest
|
||||
importlib-metadata==0.23 # via pluggy, pytest, virtualenv
|
||||
ipaddress==1.0.22
|
||||
jaraco.functools==2.0 # via tempora
|
||||
jinja2==2.10.1
|
||||
|
@ -103,7 +106,7 @@ s3transfer==0.2.0 # via boto3
|
|||
salttesting==2017.6.1
|
||||
scp==0.13.2 # via junos-eznc
|
||||
setproctitle==1.1.10
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, websocket-client
|
||||
six==1.12.0 # via bcrypt, cheroot, cherrypy, cryptography, docker, docker-pycreds, google-auth, junos-eznc, kubernetes, mock, more-itertools, moto, ncclient, packaging, pynacl, pyopenssl, pytest, python-dateutil, python-jose, pyvmomi, responses, salttesting, tempora, vcert, virtualenv, websocket-client
|
||||
smmap2==2.0.5 # via gitdb2
|
||||
smmap==0.9.0
|
||||
strict-rfc3339==0.7
|
||||
|
@ -111,7 +114,7 @@ tempora==1.14.1 # via portend
|
|||
timelib==0.2.4
|
||||
urllib3==1.24.2 # via botocore, kubernetes, python-etcd, requests
|
||||
vcert==0.7.3
|
||||
virtualenv==16.4.3
|
||||
virtualenv==20.0.10
|
||||
vultr==1.0.1
|
||||
watchdog==0.9.0
|
||||
wcwidth==0.1.7 # via pytest
|
||||
|
|
|
@ -12,7 +12,7 @@ import importlib
|
|||
|
||||
class TornadoImporter(object):
|
||||
|
||||
def find_module(self, module_name, package_path):
|
||||
def find_module(self, module_name, package_path=None):
|
||||
if module_name.startswith('tornado'):
|
||||
return self
|
||||
return None
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -617,14 +617,17 @@ class RemoteFuncs(object):
|
|||
if 'allow_tgt' in mine_entry:
|
||||
# Only determine allowed targets if any have been specified.
|
||||
# This prevents having to add a list of all minions as allowed targets.
|
||||
get_minion = checker.check_minions(
|
||||
mine_entry['allow_tgt'],
|
||||
mine_entry.get('allow_tgt_type', 'glob'))['minions']
|
||||
# the minion in allow_tgt does not exist
|
||||
if not get_minion:
|
||||
continue
|
||||
salt.utils.dictupdate.set_dict_key_value(
|
||||
minion_side_acl,
|
||||
'{}:{}'.format(minion, function),
|
||||
checker.check_minions(
|
||||
mine_entry['allow_tgt'],
|
||||
mine_entry.get('allow_tgt_type', 'glob')
|
||||
)['minions']
|
||||
)
|
||||
get_minion
|
||||
)
|
||||
if salt.utils.mine.minion_side_acl_denied(minion_side_acl, minion, function, load['id']):
|
||||
continue
|
||||
if _ret_dict:
|
||||
|
|
|
@ -27,6 +27,7 @@ from salt.ext import six
|
|||
from salt._compat import ipaddress
|
||||
from salt.utils.network import parse_host_port
|
||||
from salt.ext.six.moves import range
|
||||
from salt.template import SLS_ENCODING
|
||||
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO
|
||||
import salt.transport.client
|
||||
import salt.defaults.exitcodes
|
||||
|
@ -865,11 +866,11 @@ class SMinion(MinionBase):
|
|||
penv = 'base'
|
||||
cache_top = {penv: {self.opts['id']: ['cache']}}
|
||||
with salt.utils.files.fopen(ptop, 'wb') as fp_:
|
||||
salt.utils.yaml.safe_dump(cache_top, fp_)
|
||||
salt.utils.yaml.safe_dump(cache_top, fp_, encoding=SLS_ENCODING)
|
||||
os.chmod(ptop, 0o600)
|
||||
cache_sls = os.path.join(pdir, 'cache.sls')
|
||||
with salt.utils.files.fopen(cache_sls, 'wb') as fp_:
|
||||
salt.utils.yaml.safe_dump(self.opts['pillar'], fp_)
|
||||
salt.utils.yaml.safe_dump(self.opts['pillar'], fp_, encoding=SLS_ENCODING)
|
||||
os.chmod(cache_sls, 0o600)
|
||||
|
||||
|
||||
|
|
|
@ -239,28 +239,16 @@ def _parse_rules(sg, rules):
|
|||
def get_all_security_groups(groupnames=None, group_ids=None, filters=None,
|
||||
region=None, key=None, keyid=None, profile=None):
|
||||
'''
|
||||
Return a list of all Security Groups matching the given criteria and filters.
|
||||
Return a list of all Security Groups matching the given criteria and
|
||||
filters.
|
||||
|
||||
Note that the 'groupnames' argument only functions correctly for EC2 Classic
|
||||
and default VPC Security Groups. To find groups by name in other VPCs you'll
|
||||
want to use the 'group-name' filter instead.
|
||||
Note that the ``groupnames`` argument only functions correctly for EC2
|
||||
Classic and default VPC Security Groups. To find groups by name in other
|
||||
VPCs you'll want to use the ``group-name`` filter instead.
|
||||
|
||||
Valid keys for the filters argument are:
|
||||
description - The description of the security group.
|
||||
egress.ip-permission.prefix-list-id - The ID (prefix) of the AWS service to which the security group allows access.
|
||||
group-id - The ID of the security group.
|
||||
group-name - The name of the security group.
|
||||
ip-permission.cidr - A CIDR range that has been granted permission.
|
||||
ip-permission.from-port - The start of port range for the TCP and UDP protocols, or an ICMP type number.
|
||||
ip-permission.group-id - The ID of a security group that has been granted permission.
|
||||
ip-permission.group-name - The name of a security group that has been granted permission.
|
||||
ip-permission.protocol - The IP protocol for the permission (tcp | udp | icmp or a protocol number).
|
||||
ip-permission.to-port - The end of port range for the TCP and UDP protocols, or an ICMP code.
|
||||
ip-permission.user-id - The ID of an AWS account that has been granted permission.
|
||||
owner-id - The AWS account ID of the owner of the security group.
|
||||
tag-key - The key of a tag assigned to the security group.
|
||||
tag-value - The value of a tag assigned to the security group.
|
||||
vpc-id - The ID of the VPC specified when the security group was created.
|
||||
The valid keys for the ``filters`` argument can be found in `AWS's API
|
||||
documentation
|
||||
<https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html>`_.
|
||||
|
||||
CLI example::
|
||||
|
||||
|
@ -590,7 +578,7 @@ def set_tags(tags,
|
|||
keyid=None,
|
||||
profile=None):
|
||||
'''
|
||||
sets tags on a security group
|
||||
Sets tags on a security group.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -654,7 +642,7 @@ def delete_tags(tags,
|
|||
keyid=None,
|
||||
profile=None):
|
||||
'''
|
||||
deletes tags from a security group
|
||||
Deletes tags from a security group.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
|
|
@ -344,7 +344,7 @@ def _run(cmd,
|
|||
# The last item in the list [-1] is the current method.
|
||||
# The third item[2] in each tuple is the name of that method.
|
||||
if stack[-2][2] == 'script':
|
||||
cmd = 'Powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass -File ' + cmd
|
||||
cmd = 'Powershell -NonInteractive -NoProfile -ExecutionPolicy Bypass {0}'.format(cmd.replace('"', '\\"'))
|
||||
elif encoded_cmd:
|
||||
cmd = 'Powershell -NonInteractive -EncodedCommand {0}'.format(cmd)
|
||||
else:
|
||||
|
|
|
@ -194,10 +194,13 @@ def update(clear=False, mine_functions=None):
|
|||
log.error('Function %s in mine.update failed to execute', function_name or function_alias)
|
||||
log.debug('Error: %s', trace)
|
||||
continue
|
||||
mine_data[function_alias] = salt.utils.mine.wrap_acl_structure(
|
||||
res,
|
||||
**minion_acl
|
||||
)
|
||||
if minion_acl.get('allow_tgt'):
|
||||
mine_data[function_alias] = salt.utils.mine.wrap_acl_structure(
|
||||
res,
|
||||
**minion_acl
|
||||
)
|
||||
else:
|
||||
mine_data[function_alias] = res
|
||||
return _mine_store(mine_data, clear)
|
||||
|
||||
|
||||
|
@ -213,9 +216,13 @@ def send(name, *args, **kwargs):
|
|||
:param str mine_function: The name of the execution_module.function to run
|
||||
and whose value will be stored in the salt mine. Defaults to ``name``.
|
||||
:param str allow_tgt: Targeting specification for ACL. Specifies which minions
|
||||
are allowed to access this function.
|
||||
are allowed to access this function. Please note both your master and
|
||||
minion need to be on, at least, version 3000 for this to work properly.
|
||||
|
||||
:param str allow_tgt_type: Type of the targeting specification. This value will
|
||||
be ignored if ``allow_tgt`` is not specified.
|
||||
be ignored if ``allow_tgt`` is not specified. Please note both your
|
||||
master and minion need to be on, at least, version 3000 for this to work
|
||||
properly.
|
||||
|
||||
Remaining args and kwargs will be passed on to the function to run.
|
||||
|
||||
|
@ -252,11 +259,15 @@ def send(name, *args, **kwargs):
|
|||
log.error('Function %s in mine.send failed to execute', mine_function or name)
|
||||
log.debug('Error: %s', trace)
|
||||
return False
|
||||
mine_data[name] = salt.utils.mine.wrap_acl_structure(
|
||||
res,
|
||||
allow_tgt=allow_tgt,
|
||||
allow_tgt_type=allow_tgt_type
|
||||
)
|
||||
|
||||
if allow_tgt:
|
||||
mine_data[name] = salt.utils.mine.wrap_acl_structure(
|
||||
res,
|
||||
allow_tgt=allow_tgt,
|
||||
allow_tgt_type=allow_tgt_type
|
||||
)
|
||||
else:
|
||||
mine_data[name] = res
|
||||
return _mine_store(mine_data)
|
||||
|
||||
|
||||
|
|
|
@ -35,6 +35,8 @@ Module to provide MySQL compatibility to salt.
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import hashlib
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
|
@ -261,6 +263,12 @@ def __virtual__():
|
|||
return bool(MySQLdb), 'No python mysql client installed.' if MySQLdb is None else ''
|
||||
|
||||
|
||||
def __mysql_hash_password(password):
|
||||
_password = hashlib.sha1(password.encode()).digest()
|
||||
_password = '*{0}'.format(hashlib.sha1(_password).hexdigest().upper())
|
||||
return _password
|
||||
|
||||
|
||||
def __check_table(name, table, **connection_args):
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
|
@ -307,6 +315,9 @@ def __optimize_table(name, table, **connection_args):
|
|||
|
||||
|
||||
def __password_column(**connection_args):
|
||||
if 'mysql.password_column'in __context__:
|
||||
return __context__['mysql.password_column']
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return 'Password'
|
||||
|
@ -321,9 +332,34 @@ def __password_column(**connection_args):
|
|||
}
|
||||
_execute(cur, qry, args)
|
||||
if int(cur.rowcount) > 0:
|
||||
return 'Password'
|
||||
__context__['mysql.password_column'] = 'Password'
|
||||
else:
|
||||
return 'authentication_string'
|
||||
__context__['mysql.password_column'] = 'authentication_string'
|
||||
|
||||
return __context__['mysql.password_column']
|
||||
|
||||
|
||||
def __get_auth_plugin(user, host, **connection_args):
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return []
|
||||
cur = dbc.cursor(MySQLdb.cursors.DictCursor)
|
||||
try:
|
||||
qry = 'SELECT plugin FROM mysql.user WHERE User=%(user)s and Host=%(host)s'
|
||||
args = {'user': user, 'host': host}
|
||||
_execute(cur, qry, args)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
err = 'MySQL Error {0}: {1}'.format(*exc.args)
|
||||
__context__['mysql.error'] = err
|
||||
log.error(err)
|
||||
return 'mysql_native_password'
|
||||
results = cur.fetchall()
|
||||
log.debug(results)
|
||||
|
||||
if results:
|
||||
return results[0].get('plugin', 'mysql_native_password')
|
||||
else:
|
||||
return 'mysql_native_password'
|
||||
|
||||
|
||||
def _connect(**kwargs):
|
||||
|
@ -385,6 +421,10 @@ def _connect(**kwargs):
|
|||
# Ensure MySQldb knows the format we use for queries with arguments
|
||||
MySQLdb.paramstyle = 'pyformat'
|
||||
|
||||
for key in copy.deepcopy(connargs):
|
||||
if not connargs[key]:
|
||||
del connargs[key]
|
||||
|
||||
if connargs.get('passwd', True) is None: # If present but set to None. (Extreme edge case.)
|
||||
log.warning('MySQL password of None found. Attempting passwordless login.')
|
||||
connargs.pop('passwd')
|
||||
|
@ -855,6 +895,9 @@ def version(**connection_args):
|
|||
|
||||
salt '*' mysql.version
|
||||
'''
|
||||
if 'mysql.version' in __context__:
|
||||
return __context__['mysql.version']
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return ''
|
||||
|
@ -869,7 +912,8 @@ def version(**connection_args):
|
|||
return ''
|
||||
|
||||
try:
|
||||
return salt.utils.data.decode(cur.fetchone()[0])
|
||||
__context__['mysql.version'] = salt.utils.data.decode(cur.fetchone()[0])
|
||||
return __context__['mysql.version']
|
||||
except IndexError:
|
||||
return ''
|
||||
|
||||
|
@ -1237,6 +1281,82 @@ def user_list(**connection_args):
|
|||
return results
|
||||
|
||||
|
||||
def _mysql_user_exists(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
|
||||
server_version = salt.utils.data.decode(version(**connection_args))
|
||||
compare_version = '8.0.11'
|
||||
qry = ('SELECT User,Host FROM mysql.user WHERE User = %(user)s AND '
|
||||
'Host = %(host)s')
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
|
||||
if salt.utils.data.is_true(passwordless):
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
qry += ' AND plugin=%(unix_socket)s'
|
||||
args['unix_socket'] = 'auth_socket'
|
||||
else:
|
||||
qry += ' AND ' + password_column + ' = \'\''
|
||||
elif password:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
if auth_plugin == 'mysql_native_password':
|
||||
_password = __mysql_hash_password(six.text_type(password))
|
||||
qry += ' AND ' + password_column + ' = %(password)s'
|
||||
args['password'] = six.text_type(_password)
|
||||
else:
|
||||
err = 'Unable to verify password.'
|
||||
log.error(err)
|
||||
__context__['mysql.error'] = err
|
||||
else:
|
||||
qry += ' AND ' + password_column + ' = PASSWORD(%(password)s)'
|
||||
args['password'] = six.text_type(password)
|
||||
elif password_hash:
|
||||
qry += ' AND ' + password_column + ' = %(password)s'
|
||||
args['password'] = password_hash
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def _mariadb_user_exists(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
|
||||
qry = ('SELECT User,Host FROM mysql.user WHERE User = %(user)s AND '
|
||||
'Host = %(host)s')
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
|
||||
if salt.utils.data.is_true(passwordless):
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
qry += ' AND plugin=%(unix_socket)s'
|
||||
args['unix_socket'] = 'unix_socket'
|
||||
else:
|
||||
qry += ' AND ' + password_column + ' = \'\''
|
||||
elif password:
|
||||
qry += ' AND ' + password_column + ' = PASSWORD(%(password)s)'
|
||||
args['password'] = six.text_type(password)
|
||||
elif password_hash:
|
||||
qry += ' AND ' + password_column + ' = %(password)s'
|
||||
args['password'] = password_hash
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def user_exists(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
|
@ -1269,7 +1389,6 @@ def user_exists(user,
|
|||
err = 'MySQL Error: Unable to fetch current server version. Last error was: "{}"'.format(last_err)
|
||||
log.error(err)
|
||||
return False
|
||||
compare_version = '10.2.0' if 'MariaDB' in server_version else '8.0.11'
|
||||
dbc = _connect(**connection_args)
|
||||
# Did we fail to connect with the user we are checking
|
||||
# Its password might have previously change with the same command/state
|
||||
|
@ -1287,33 +1406,30 @@ def user_exists(user,
|
|||
if not password_column:
|
||||
password_column = __password_column(**connection_args)
|
||||
|
||||
auth_plugin = __get_auth_plugin(user, host, **connection_args)
|
||||
|
||||
cur = dbc.cursor()
|
||||
qry = ('SELECT User,Host FROM mysql.user WHERE User = %(user)s AND '
|
||||
'Host = %(host)s')
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
if 'MariaDB' in server_version:
|
||||
qry, args = _mariadb_user_exists(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
else:
|
||||
qry, args = _mysql_user_exists(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
|
||||
if salt.utils.data.is_true(passwordless):
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
qry += ' AND plugin=%(unix_socket)s'
|
||||
args['unix_socket'] = 'unix_socket'
|
||||
else:
|
||||
qry += ' AND ' + password_column + ' = \'\''
|
||||
elif password:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
run_verify = True
|
||||
else:
|
||||
_password = password
|
||||
qry += ' AND ' + password_column + ' = PASSWORD(%(password)s)'
|
||||
args['password'] = six.text_type(_password)
|
||||
elif password_hash:
|
||||
qry += ' AND ' + password_column + ' = %(password)s'
|
||||
args['password'] = password_hash
|
||||
|
||||
if run_verify:
|
||||
if not verify_login(user, password, **connection_args):
|
||||
return False
|
||||
try:
|
||||
_execute(cur, qry, args)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
|
@ -1358,6 +1474,100 @@ def user_info(user, host='localhost', **connection_args):
|
|||
return result
|
||||
|
||||
|
||||
def _mysql_user_create(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
allow_passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
|
||||
server_version = salt.utils.data.decode(version(**connection_args))
|
||||
compare_version = '8.0.11'
|
||||
|
||||
qry = 'CREATE USER %(user)s@%(host)s'
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
if password is not None:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
args['auth_plugin'] = auth_plugin
|
||||
qry += ' IDENTIFIED WITH %(auth_plugin)s BY %(password)s'
|
||||
else:
|
||||
qry += ' IDENTIFIED BY %(password)s'
|
||||
args['password'] = six.text_type(password)
|
||||
elif password_hash is not None:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
qry += ' IDENTIFIED BY %(password)s'
|
||||
else:
|
||||
qry += ' IDENTIFIED BY PASSWORD %(password)s'
|
||||
args['password'] = password_hash
|
||||
elif salt.utils.data.is_true(allow_passwordless):
|
||||
if not plugin_status('auth_socket', **connection_args):
|
||||
err = 'The auth_socket plugin is not enabled.'
|
||||
log.error(err)
|
||||
__context__['mysql.error'] = err
|
||||
qry = False
|
||||
else:
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
qry += ' IDENTIFIED WITH auth_socket'
|
||||
else:
|
||||
log.error(
|
||||
'Auth via unix_socket can be set only for host=localhost'
|
||||
)
|
||||
else:
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
qry = False
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def _mariadb_user_create(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
allow_passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
|
||||
qry = 'CREATE USER %(user)s@%(host)s'
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
if password is not None:
|
||||
qry += ' IDENTIFIED BY %(password)s'
|
||||
args['password'] = six.text_type(password)
|
||||
elif password_hash is not None:
|
||||
qry += ' IDENTIFIED BY PASSWORD %(password)s'
|
||||
args['password'] = password_hash
|
||||
elif salt.utils.data.is_true(allow_passwordless):
|
||||
if not plugin_status('unix_socket', **connection_args):
|
||||
err = 'The unix_socket plugin is not enabled.'
|
||||
log.error(err)
|
||||
__context__['mysql.error'] = err
|
||||
qry = False
|
||||
else:
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
qry += ' IDENTIFIED VIA unix_socket'
|
||||
else:
|
||||
log.error(
|
||||
'Auth via unix_socket can be set only for host=localhost'
|
||||
)
|
||||
else:
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
qry = False
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def user_create(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
|
@ -1365,6 +1575,7 @@ def user_create(user,
|
|||
allow_passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
'''
|
||||
Creates a MySQL user
|
||||
|
@ -1396,6 +1607,12 @@ def user_create(user,
|
|||
unix_socket
|
||||
If ``True`` and allow_passwordless is ``True`` then will be used unix_socket auth plugin.
|
||||
|
||||
password_column
|
||||
The password column to use in the user table.
|
||||
|
||||
auth_plugin
|
||||
The authentication plugin to use, default is to use the mysql_native_password plugin.
|
||||
|
||||
.. versionadded:: 0.16.2
|
||||
The ``allow_passwordless`` option was added.
|
||||
|
||||
|
@ -1413,7 +1630,7 @@ def user_create(user,
|
|||
err = 'MySQL Error: Unable to fetch current server version. Last error was: "{}"'.format(last_err)
|
||||
log.error(err)
|
||||
return False
|
||||
compare_version = '10.2.0' if 'MariaDB' in server_version else '8.0.11'
|
||||
|
||||
if user_exists(user, host, **connection_args):
|
||||
log.info('User \'%s\'@\'%s\' already exists', user, host)
|
||||
return False
|
||||
|
@ -1426,34 +1643,29 @@ def user_create(user,
|
|||
password_column = __password_column(**connection_args)
|
||||
|
||||
cur = dbc.cursor()
|
||||
qry = 'CREATE USER %(user)s@%(host)s'
|
||||
args = {}
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
if password is not None:
|
||||
qry += ' IDENTIFIED BY %(password)s'
|
||||
args['password'] = six.text_type(password)
|
||||
elif password_hash is not None:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
if 'MariaDB' in server_version:
|
||||
qry += ' IDENTIFIED BY PASSWORD %(password)s'
|
||||
else:
|
||||
qry += ' IDENTIFIED BY %(password)s'
|
||||
else:
|
||||
qry += ' IDENTIFIED BY PASSWORD %(password)s'
|
||||
args['password'] = password_hash
|
||||
elif salt.utils.data.is_true(allow_passwordless):
|
||||
if salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
qry += ' IDENTIFIED VIA unix_socket'
|
||||
else:
|
||||
log.error(
|
||||
'Auth via unix_socket can be set only for host=localhost'
|
||||
)
|
||||
if 'MariaDB' in server_version:
|
||||
qry, args = _mariadb_user_create(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
else:
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
return False
|
||||
qry, args = _mysql_user_create(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
|
||||
if isinstance(qry, bool):
|
||||
return qry
|
||||
|
||||
try:
|
||||
_execute(cur, qry, args)
|
||||
|
@ -1463,7 +1675,12 @@ def user_create(user,
|
|||
log.error(err)
|
||||
return False
|
||||
|
||||
if user_exists(user, host, password, password_hash, password_column=password_column, **connection_args):
|
||||
if user_exists(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
password_column=password_column,
|
||||
**connection_args):
|
||||
msg = 'User \'{0}\'@\'{1}\' has been created'.format(user, host)
|
||||
if not any((password, password_hash)):
|
||||
msg += ' with passwordless login'
|
||||
|
@ -1474,6 +1691,121 @@ def user_create(user,
|
|||
return False
|
||||
|
||||
|
||||
def _mysql_user_chpass(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
allow_passwordless=False,
|
||||
unix_socket=None,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
server_version = salt.utils.data.decode(version(**connection_args))
|
||||
compare_version = '8.0.11'
|
||||
|
||||
args = {}
|
||||
|
||||
if password is not None:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
password_sql = '%(password)s'
|
||||
else:
|
||||
password_sql = 'PASSWORD(%(password)s)'
|
||||
args['password'] = password
|
||||
elif password_hash is not None:
|
||||
password_sql = '%(password)s'
|
||||
args['password'] = password_hash
|
||||
elif not salt.utils.data.is_true(allow_passwordless):
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
return False
|
||||
else:
|
||||
password_sql = '\'\''
|
||||
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED BY %(password)s;"
|
||||
else:
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '=' + password_sql +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
if salt.utils.data.is_true(allow_passwordless) and \
|
||||
salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
if not plugin_status('auth_socket', **connection_args):
|
||||
err = 'The auth_socket plugin is not enabled.'
|
||||
log.error(err)
|
||||
__context__['mysql.error'] = err
|
||||
qry = False
|
||||
else:
|
||||
args['unix_socket'] = 'auth_socket'
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(unix_socket)s AS %(user)s;"
|
||||
else:
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '='
|
||||
+ password_sql + ', plugin=%(unix_socket)s' +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
else:
|
||||
log.error('Auth via unix_socket can be set only for host=localhost')
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def _mariadb_user_chpass(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
password_hash=None,
|
||||
allow_passwordless=False,
|
||||
unix_socket=None,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
|
||||
server_version = salt.utils.data.decode(version(**connection_args))
|
||||
compare_version = '10.4.0'
|
||||
|
||||
args = {}
|
||||
|
||||
if password is not None:
|
||||
password_sql = 'PASSWORD(%(password)s)'
|
||||
args['password'] = password
|
||||
elif password_hash is not None:
|
||||
password_sql = '%(password)s'
|
||||
args['password'] = password_hash
|
||||
elif not salt.utils.data.is_true(allow_passwordless):
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
return False
|
||||
else:
|
||||
password_sql = '\'\''
|
||||
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED BY %(password)s;"
|
||||
else:
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '=' + password_sql +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
if salt.utils.data.is_true(allow_passwordless) and \
|
||||
salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
if not plugin_status('unix_socket', **connection_args):
|
||||
err = 'The unix_socket plugin is not enabled.'
|
||||
log.error(err)
|
||||
__context__['mysql.error'] = err
|
||||
qry = False
|
||||
else:
|
||||
args['unix_socket'] = 'unix_socket'
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '='
|
||||
+ password_sql + ', plugin=%(unix_socket)s' +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
else:
|
||||
log.error('Auth via unix_socket can be set only for host=localhost')
|
||||
|
||||
return qry, args
|
||||
|
||||
|
||||
def user_chpass(user,
|
||||
host='localhost',
|
||||
password=None,
|
||||
|
@ -1526,54 +1858,44 @@ def user_chpass(user,
|
|||
err = 'MySQL Error: Unable to fetch current server version. Last error was: "{}"'.format(last_err)
|
||||
log.error(err)
|
||||
return False
|
||||
compare_version = '10.2.0' if 'MariaDB' in server_version else '8.0.11'
|
||||
args = {}
|
||||
if password is not None:
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
password_sql = '%(password)s'
|
||||
else:
|
||||
password_sql = 'PASSWORD(%(password)s)'
|
||||
args['password'] = password
|
||||
elif password_hash is not None:
|
||||
password_sql = '%(password)s'
|
||||
args['password'] = password_hash
|
||||
elif not salt.utils.data.is_true(allow_passwordless):
|
||||
log.error('password or password_hash must be specified, unless '
|
||||
'allow_passwordless=True')
|
||||
|
||||
if not user_exists(user, host, **connection_args):
|
||||
log.info('User \'%s\'@\'%s\' does not exists', user, host)
|
||||
return False
|
||||
else:
|
||||
password_sql = '\'\''
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
|
||||
if dbc is None:
|
||||
return False
|
||||
|
||||
if not password_column:
|
||||
password_column = __password_column(**connection_args)
|
||||
|
||||
auth_plugin = __get_auth_plugin(user, host, **connection_args)
|
||||
|
||||
cur = dbc.cursor()
|
||||
args['user'] = user
|
||||
args['host'] = host
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
if 'MariaDB' in server_version and password_hash is not None:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED BY PASSWORD %(password)s;"
|
||||
else:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED BY %(password)s;"
|
||||
|
||||
if 'MariaDB' in server_version:
|
||||
qry, args = _mariadb_user_chpass(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
else:
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '=' + password_sql +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
if salt.utils.data.is_true(allow_passwordless) and \
|
||||
salt.utils.data.is_true(unix_socket):
|
||||
if host == 'localhost':
|
||||
args['unix_socket'] = 'auth_socket'
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
qry = "ALTER USER %(user)s@%(host)s IDENTIFIED WITH %(unix_socket)s AS %(user)s;"
|
||||
else:
|
||||
qry = ('UPDATE mysql.user SET ' + password_column + '='
|
||||
+ password_sql + ', plugin=%(unix_socket)s' +
|
||||
' WHERE User=%(user)s AND Host = %(host)s;')
|
||||
else:
|
||||
log.error('Auth via unix_socket can be set only for host=localhost')
|
||||
qry, args = _mysql_user_chpass(user,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args)
|
||||
|
||||
try:
|
||||
result = _execute(cur, qry, args)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
|
@ -1582,8 +1904,17 @@ def user_chpass(user,
|
|||
log.error(err)
|
||||
return False
|
||||
|
||||
compare_version = '10.4.0' if 'MariaDB' in server_version else '8.0.11'
|
||||
res = False
|
||||
if salt.utils.versions.version_cmp(server_version, compare_version) >= 0:
|
||||
_execute(cur, 'FLUSH PRIVILEGES;')
|
||||
res = True
|
||||
else:
|
||||
if result:
|
||||
_execute(cur, 'FLUSH PRIVILEGES;')
|
||||
res = True
|
||||
|
||||
if res:
|
||||
log.info(
|
||||
'Password for user \'%s\'@\'%s\' has been %s',
|
||||
user, host,
|
||||
|
@ -1591,21 +1922,12 @@ def user_chpass(user,
|
|||
)
|
||||
return True
|
||||
else:
|
||||
if result:
|
||||
_execute(cur, 'FLUSH PRIVILEGES;')
|
||||
log.info(
|
||||
'Password for user \'%s\'@\'%s\' has been %s',
|
||||
user, host,
|
||||
'changed' if any((password, password_hash)) else 'cleared'
|
||||
)
|
||||
return True
|
||||
|
||||
log.info(
|
||||
'Password for user \'%s\'@\'%s\' was not %s',
|
||||
user, host,
|
||||
'changed' if any((password, password_hash)) else 'cleared'
|
||||
)
|
||||
return False
|
||||
log.info(
|
||||
'Password for user \'%s\'@\'%s\' was not %s',
|
||||
user, host,
|
||||
'changed' if any((password, password_hash)) else 'cleared'
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def user_remove(user,
|
||||
|
@ -1620,6 +1942,12 @@ def user_remove(user,
|
|||
|
||||
salt '*' mysql.user_remove frank localhost
|
||||
'''
|
||||
if not user_exists(user, host, **connection_args):
|
||||
err = 'User \'%s\'@\'%s\' does not exists', user, host
|
||||
__context__['mysql.error'] = err
|
||||
log.info(err)
|
||||
return False
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return False
|
||||
|
@ -2363,3 +2691,153 @@ def verify_login(user, password=None, **connection_args):
|
|||
del __context__['mysql.error']
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def plugins_list(**connection_args):
|
||||
'''
|
||||
Return a list of plugins and their status
|
||||
from the ``SHOW PLUGINS`` query.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' mysql.plugins_list
|
||||
'''
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return []
|
||||
cur = dbc.cursor()
|
||||
qry = 'SHOW PLUGINS'
|
||||
try:
|
||||
_execute(cur, qry)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
err = 'MySQL Error {0}: {1}'.format(*exc.args)
|
||||
__context__['mysql.error'] = err
|
||||
log.error(err)
|
||||
return []
|
||||
|
||||
ret = []
|
||||
results = cur.fetchall()
|
||||
for dbs in results:
|
||||
ret.append({'name': dbs[0], 'status': dbs[1]})
|
||||
|
||||
log.debug(ret)
|
||||
return ret
|
||||
|
||||
|
||||
def plugin_add(name, soname=None, **connection_args):
|
||||
'''
|
||||
Add a plugina.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' mysql.plugin_add auth_socket
|
||||
'''
|
||||
if not name:
|
||||
log.error('Plugin name is required.')
|
||||
return False
|
||||
|
||||
if plugin_status(name, **connection_args):
|
||||
log.error('Plugin %s is already installed.', name)
|
||||
return True
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return False
|
||||
cur = dbc.cursor()
|
||||
qry = 'INSTALL PLUGIN {0}'.format(name)
|
||||
|
||||
if soname:
|
||||
qry += ' SONAME "{0}"'.format(soname)
|
||||
else:
|
||||
qry += ' SONAME "{0}.so"'.format(name)
|
||||
|
||||
try:
|
||||
_execute(cur, qry)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
err = 'MySQL Error {0}: {1}'.format(*exc.args)
|
||||
__context__['mysql.error'] = err
|
||||
log.error(err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def plugin_remove(name, **connection_args):
|
||||
'''
|
||||
Remove a plugin.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' mysql.plugin_remove auth_socket
|
||||
'''
|
||||
if not name:
|
||||
log.error('Plugin name is required.')
|
||||
return False
|
||||
|
||||
if not plugin_status(name, **connection_args):
|
||||
log.error('Plugin %s is not installed.', name)
|
||||
return True
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return False
|
||||
cur = dbc.cursor()
|
||||
qry = 'UNINSTALL PLUGIN {0}'.format(name)
|
||||
args = {}
|
||||
args['name'] = name
|
||||
|
||||
try:
|
||||
_execute(cur, qry)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
err = 'MySQL Error {0}: {1}'.format(*exc.args)
|
||||
__context__['mysql.error'] = err
|
||||
log.error(err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def plugin_status(name, **connection_args):
|
||||
'''
|
||||
Return the status of a plugin.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' mysql.plugin_status auth_socket
|
||||
'''
|
||||
if not name:
|
||||
log.error('Plugin name is required.')
|
||||
return False
|
||||
|
||||
dbc = _connect(**connection_args)
|
||||
if dbc is None:
|
||||
return ''
|
||||
cur = dbc.cursor()
|
||||
qry = 'SELECT PLUGIN_STATUS FROM INFORMATION_SCHEMA.PLUGINS WHERE PLUGIN_NAME = %(name)s'
|
||||
args = {}
|
||||
args['name'] = name
|
||||
|
||||
try:
|
||||
_execute(cur, qry, args)
|
||||
except MySQLdb.OperationalError as exc:
|
||||
err = 'MySQL Error {0}: {1}'.format(*exc.args)
|
||||
__context__['mysql.error'] = err
|
||||
log.error(err)
|
||||
return ''
|
||||
|
||||
try:
|
||||
status = cur.fetchone()
|
||||
if status is None:
|
||||
return ''
|
||||
else:
|
||||
return status[0]
|
||||
except IndexError:
|
||||
return ''
|
||||
|
|
|
@ -11,6 +11,7 @@ Manage accounts in Samba's passdb using pdbedit
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import re
|
||||
import logging
|
||||
import hashlib
|
||||
import binascii
|
||||
|
@ -22,6 +23,7 @@ except ImportError:
|
|||
# Import Salt libs
|
||||
from salt.ext import six
|
||||
import salt.utils.path
|
||||
import salt.modules.cmdmod
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -39,14 +41,21 @@ def __virtual__():
|
|||
'''
|
||||
Provides pdbedit if available
|
||||
'''
|
||||
if salt.utils.path.which('pdbedit'):
|
||||
return __virtualname__
|
||||
return (
|
||||
False,
|
||||
'{0} module can only be loaded when pdbedit is available'.format(
|
||||
__virtualname__
|
||||
)
|
||||
)
|
||||
# NOTE: check for pdbedit command
|
||||
if not salt.utils.path.which('pdbedit'):
|
||||
return (False, 'pdbedit command is not available')
|
||||
|
||||
# NOTE: check version is >= 4.8.x
|
||||
ver = salt.modules.cmdmod.run('pdbedit -V')
|
||||
ver_regex = re.compile(r'^Version\s(\d+)\.(\d+)\.(\d+)$')
|
||||
ver_match = ver_regex.match(ver)
|
||||
if not ver_match:
|
||||
return (False, 'pdbedit -V returned an unknown version format')
|
||||
|
||||
if not (int(ver_match.group(1)) >= 4 and int(ver_match.group(2)) >= 8):
|
||||
return (False, 'pdbedit is to old, 4.8.0 or newer is required')
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def generate_nt_hash(password):
|
||||
|
|
|
@ -88,6 +88,7 @@ import shutil
|
|||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
|
@ -99,6 +100,7 @@ import salt.utils.url
|
|||
import salt.utils.versions
|
||||
from salt.ext import six
|
||||
from salt.exceptions import CommandExecutionError, CommandNotFoundError
|
||||
import salt.utils.platform
|
||||
|
||||
# This needs to be named logger so we don't shadow it in pip.install
|
||||
logger = logging.getLogger(__name__) # pylint: disable=C0103
|
||||
|
@ -122,6 +124,19 @@ def __virtual__():
|
|||
return 'pip'
|
||||
|
||||
|
||||
def _pip_bin_env(cwd, bin_env):
|
||||
"""
|
||||
Binary builds need to have the 'cwd' set when using pip on Windows. This will
|
||||
set cwd if pip is being used in 'bin_env', 'cwd' is None and salt is on windows.
|
||||
"""
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
if bin_env is not None and cwd is None and 'pip' in os.path.basename(bin_env):
|
||||
cwd = os.path.dirname(bin_env)
|
||||
|
||||
return cwd
|
||||
|
||||
|
||||
def _clear_context(bin_env=None):
|
||||
'''
|
||||
Remove the cached pip version
|
||||
|
@ -652,6 +667,8 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
|||
editable=git+https://github.com/worldcompany/djangoembed.git#egg=djangoembed upgrade=True no_deps=True
|
||||
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.append('install')
|
||||
|
||||
|
@ -666,7 +683,7 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
|||
if error:
|
||||
return error
|
||||
|
||||
cur_version = version(bin_env)
|
||||
cur_version = version(bin_env, cwd)
|
||||
|
||||
if use_wheel:
|
||||
min_version = '1.4'
|
||||
|
@ -1037,6 +1054,8 @@ def uninstall(pkgs=None,
|
|||
salt '*' pip.uninstall <package name> bin_env=/path/to/virtualenv
|
||||
salt '*' pip.uninstall <package name> bin_env=/path/to/pip_bin
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.extend(['uninstall', '-y'])
|
||||
|
||||
|
@ -1148,12 +1167,14 @@ def freeze(bin_env=None,
|
|||
|
||||
salt '*' pip.freeze bin_env=/home/code/path/to/virtualenv
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.append('freeze')
|
||||
|
||||
# Include pip, setuptools, distribute, wheel
|
||||
min_version = '8.0.3'
|
||||
cur_version = version(bin_env)
|
||||
cur_version = version(bin_env, cwd)
|
||||
if salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version):
|
||||
logger.warning(
|
||||
'The version of pip installed is %s, which is older than %s. '
|
||||
|
@ -1202,10 +1223,12 @@ def list_(prefix=None,
|
|||
|
||||
salt '*' pip.list salt
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
packages = {}
|
||||
|
||||
if prefix is None or 'pip'.startswith(prefix):
|
||||
packages['pip'] = version(bin_env)
|
||||
packages['pip'] = version(bin_env, cwd)
|
||||
|
||||
for line in freeze(bin_env=bin_env,
|
||||
user=user,
|
||||
|
@ -1249,7 +1272,7 @@ def list_(prefix=None,
|
|||
return packages
|
||||
|
||||
|
||||
def version(bin_env=None):
|
||||
def version(bin_env=None, cwd=None):
|
||||
'''
|
||||
.. versionadded:: 0.17.0
|
||||
|
||||
|
@ -1264,6 +1287,8 @@ def version(bin_env=None):
|
|||
|
||||
salt '*' pip.version
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
contextkey = 'pip.version'
|
||||
if bin_env is not None:
|
||||
contextkey = '{0}.{1}'.format(contextkey, bin_env)
|
||||
|
@ -1274,7 +1299,7 @@ def version(bin_env=None):
|
|||
cmd = _get_pip_bin(bin_env)[:]
|
||||
cmd.append('--version')
|
||||
|
||||
ret = __salt__['cmd.run_all'](cmd, python_shell=False)
|
||||
ret = __salt__['cmd.run_all'](cmd, cwd=cwd, python_shell=False)
|
||||
if ret['retcode']:
|
||||
raise CommandNotFoundError('Could not find a `pip` binary')
|
||||
|
||||
|
@ -1299,10 +1324,12 @@ def list_upgrades(bin_env=None,
|
|||
|
||||
salt '*' pip.list_upgrades
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.extend(['list', '--outdated'])
|
||||
|
||||
pip_version = version(bin_env)
|
||||
pip_version = version(bin_env, cwd)
|
||||
# Pip started supporting the ability to output json starting with 9.0.0
|
||||
min_version = '9.0'
|
||||
if salt.utils.versions.compare(ver1=pip_version,
|
||||
|
@ -1388,6 +1415,8 @@ def is_installed(pkgname=None,
|
|||
|
||||
salt '*' pip.is_installed salt
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
for line in freeze(bin_env=bin_env, user=user, cwd=cwd):
|
||||
if line.startswith('-f') or line.startswith('#'):
|
||||
# ignore -f line as it contains --find-links directory
|
||||
|
@ -1431,6 +1460,8 @@ def upgrade_available(pkg,
|
|||
|
||||
salt '*' pip.upgrade_available <package name>
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
return pkg in list_upgrades(bin_env=bin_env, user=user, cwd=cwd)
|
||||
|
||||
|
||||
|
@ -1459,6 +1490,8 @@ def upgrade(bin_env=None,
|
|||
|
||||
salt '*' pip.upgrade
|
||||
'''
|
||||
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
ret = {'changes': {},
|
||||
'result': True,
|
||||
'comment': '',
|
||||
|
@ -1544,6 +1577,7 @@ def list_all_versions(pkg,
|
|||
|
||||
salt '*' pip.list_all_versions <package name>
|
||||
'''
|
||||
cwd = _pip_bin_env(cwd, bin_env)
|
||||
cmd = _get_pip_bin(bin_env)
|
||||
cmd.extend(['install', '{0}==versions'.format(pkg)])
|
||||
|
||||
|
|
|
@ -1133,8 +1133,8 @@ class StateTestLoader(object):
|
|||
'{0}.tst'.format(split_sls[-1]))
|
||||
])
|
||||
# for this state, find matching test files and load them
|
||||
cached_copied_files = set(cached_copied_files)
|
||||
for this_cached_test_file in list(cached_copied_files):
|
||||
cached_copied_files = list(set(cached_copied_files))
|
||||
for this_cached_test_file in cached_copied_files:
|
||||
if this_cached_test_file.endswith(tuple(sls_path_names)):
|
||||
self.test_files.add(this_cached_test_file)
|
||||
cached_copied_files.remove(this_cached_test_file)
|
||||
|
|
|
@ -43,6 +43,37 @@ def __virtual__():
|
|||
return __virtualname__
|
||||
|
||||
|
||||
def virtualenv_ver(venv_bin, user=None, **kwargs):
|
||||
'''
|
||||
return virtualenv version if exists
|
||||
'''
|
||||
# Virtualenv package
|
||||
try:
|
||||
import virtualenv
|
||||
version = getattr(virtualenv, '__version__', None)
|
||||
if not version:
|
||||
version = virtualenv.virtualenv_version
|
||||
virtualenv_version_info = tuple(
|
||||
[int(i) for i in version.split('rc')[0].split('.')]
|
||||
)
|
||||
except ImportError:
|
||||
# Unable to import?? Let's parse the version from the console
|
||||
version_cmd = [venv_bin, '--version']
|
||||
ret = __salt__['cmd.run_all'](
|
||||
version_cmd, runas=user, python_shell=False, **kwargs
|
||||
)
|
||||
if ret['retcode'] > 0 or not ret['stdout'].strip():
|
||||
raise CommandExecutionError(
|
||||
'Unable to get the virtualenv version output using \'{0}\'. '
|
||||
'Returned data: {1}'.format(version_cmd, ret)
|
||||
)
|
||||
virtualenv_version_info = tuple(
|
||||
[int(i) for i in
|
||||
ret['stdout'].strip().split('rc')[0].split('.')]
|
||||
)
|
||||
return virtualenv_version_info
|
||||
|
||||
|
||||
def create(path,
|
||||
venv_bin=None,
|
||||
system_site_packages=False,
|
||||
|
@ -164,29 +195,7 @@ def create(path,
|
|||
)
|
||||
# <---- Stop the user if pyvenv only options are used ----------------
|
||||
|
||||
# Virtualenv package
|
||||
try:
|
||||
import virtualenv
|
||||
version = getattr(virtualenv, '__version__',
|
||||
virtualenv.virtualenv_version)
|
||||
virtualenv_version_info = tuple(
|
||||
[int(i) for i in version.split('rc')[0].split('.')]
|
||||
)
|
||||
except ImportError:
|
||||
# Unable to import?? Let's parse the version from the console
|
||||
version_cmd = [venv_bin, '--version']
|
||||
ret = __salt__['cmd.run_all'](
|
||||
version_cmd, runas=user, python_shell=False, **kwargs
|
||||
)
|
||||
if ret['retcode'] > 0 or not ret['stdout'].strip():
|
||||
raise CommandExecutionError(
|
||||
'Unable to get the virtualenv version output using \'{0}\'. '
|
||||
'Returned data: {1}'.format(version_cmd, ret)
|
||||
)
|
||||
virtualenv_version_info = tuple(
|
||||
[int(i) for i in
|
||||
ret['stdout'].strip().split('rc')[0].split('.')]
|
||||
)
|
||||
virtualenv_version_info = virtualenv_ver(venv_bin, user=user, **kwargs)
|
||||
|
||||
if distribute:
|
||||
if virtualenv_version_info >= (1, 10):
|
||||
|
|
|
@ -12,8 +12,10 @@ import salt.utils.platform
|
|||
|
||||
try:
|
||||
import wmi
|
||||
import salt.utils.winapi
|
||||
HAS_LIBS = True
|
||||
except ImportError:
|
||||
pass
|
||||
HAS_LIBS = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -22,15 +24,25 @@ def __virtual__():
|
|||
'''
|
||||
Only works on Windows systems
|
||||
'''
|
||||
if salt.utils.platform.is_windows():
|
||||
return 'win_dns_client'
|
||||
return (False, "Module win_dns_client: module only works on Windows systems")
|
||||
if not salt.utils.platform.is_windows():
|
||||
return False, 'Module win_dns_client: module only works on Windows ' \
|
||||
'systems'
|
||||
if not HAS_LIBS:
|
||||
return False, 'Module win_dns_client: missing required libraries'
|
||||
return 'win_dns_client'
|
||||
|
||||
|
||||
def get_dns_servers(interface='Local Area Connection'):
|
||||
'''
|
||||
Return a list of the configured DNS servers of the specified interface
|
||||
|
||||
Args:
|
||||
interface (str): The name of the network interface. This is the name as
|
||||
it appears in the Control Panel under Network Connections
|
||||
|
||||
Returns:
|
||||
list: A list of dns servers
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
@ -121,7 +133,14 @@ def dns_dhcp(interface='Local Area Connection'):
|
|||
|
||||
def get_dns_config(interface='Local Area Connection'):
|
||||
'''
|
||||
Get the type of DNS configuration (dhcp / static)
|
||||
Get the type of DNS configuration (dhcp / static).
|
||||
|
||||
Args:
|
||||
interface (str): The name of the network interface. This is the
|
||||
Description in the Network Connection Details for the device
|
||||
|
||||
Returns:
|
||||
bool: ``True`` if DNS is configured, otherwise ``False``
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
|
|
@ -4986,6 +4986,12 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
for root, dirs, files in salt.utils.path.os_walk(path):
|
||||
if root == path:
|
||||
for t_admx_file in files:
|
||||
admx_file_name, admx_file_ext = os.path.splitext(t_admx_file)
|
||||
# Only process ADMX files, any other file will cause a
|
||||
# stacktrace later on
|
||||
if not admx_file_ext == '.admx':
|
||||
log.debug('{0} is not an ADMX file'.format(t_admx_file))
|
||||
continue
|
||||
admx_file = os.path.join(root, t_admx_file)
|
||||
# Parse xml for the ADMX file
|
||||
try:
|
||||
|
@ -5001,9 +5007,6 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
namespaces['None'] = namespaces[None]
|
||||
namespaces.pop(None)
|
||||
namespace_string = 'None:'
|
||||
this_prefix = xml_tree.xpath(
|
||||
'/{0}policyDefinitions/{0}policyNamespaces/{0}target/@prefix'.format(namespace_string),
|
||||
namespaces=namespaces)[0]
|
||||
this_namespace = xml_tree.xpath(
|
||||
'/{0}policyDefinitions/{0}policyNamespaces/{0}target/@namespace'.format(namespace_string),
|
||||
namespaces=namespaces)[0]
|
||||
|
@ -5038,7 +5041,7 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
adml_file = os.path.join(
|
||||
root,
|
||||
language,
|
||||
os.path.splitext(t_admx_file)[0] + '.adml')
|
||||
admx_file_name + '.adml')
|
||||
if not __salt__['file.file_exists'](adml_file):
|
||||
log.info('An ADML file in the specified ADML language '
|
||||
'"%s" does not exist for the ADMX "%s", the '
|
||||
|
@ -5048,7 +5051,7 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
adml_file = os.path.join(
|
||||
root,
|
||||
language.split('-')[0],
|
||||
os.path.splitext(t_admx_file)[0] + '.adml')
|
||||
admx_file_name + '.adml')
|
||||
if not __salt__['file.file_exists'](adml_file):
|
||||
log.info('An ADML file in the specified ADML language '
|
||||
'code %s does not exist for the ADMX "%s", '
|
||||
|
@ -5058,7 +5061,7 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
adml_file = os.path.join(
|
||||
root,
|
||||
display_language_fallback,
|
||||
os.path.splitext(t_admx_file)[0] + '.adml')
|
||||
admx_file_name + '.adml')
|
||||
if not __salt__['file.file_exists'](adml_file):
|
||||
log.info('An ADML file in the specified ADML '
|
||||
'fallback language "%s" '
|
||||
|
@ -5070,7 +5073,7 @@ def _load_policy_definitions(path='c:\\Windows\\PolicyDefinitions',
|
|||
adml_file = os.path.join(
|
||||
root,
|
||||
display_language_fallback.split('-')[0],
|
||||
os.path.splitext(t_admx_file)[0] + '.adml')
|
||||
admx_file_name + '.adml')
|
||||
if not __salt__['file.file_exists'](adml_file):
|
||||
msg = ('An ADML file in the specified ADML language '
|
||||
'"{0}" and the fallback language "{1}" do not '
|
||||
|
@ -5526,7 +5529,7 @@ def _write_secedit_data(inf_data):
|
|||
# Success
|
||||
if retcode == 0:
|
||||
# Pop secedit data so it will always be current
|
||||
__context__.pop('lgpo.secedit_data')
|
||||
__context__.pop('lgpo.secedit_data', None)
|
||||
return True
|
||||
# Failure
|
||||
return False
|
||||
|
@ -5658,25 +5661,48 @@ def _getAdmlPresentationRefId(adml_data, ref_id):
|
|||
helper function to check for a presentation label for a policy element
|
||||
'''
|
||||
search_results = adml_data.xpath('//*[@*[local-name() = "refId"] = "{0}"]'.format(ref_id))
|
||||
prepended_text = ''
|
||||
alternate_label = ''
|
||||
if search_results:
|
||||
for result in search_results:
|
||||
the_localname = etree.QName(result.tag).localname
|
||||
if the_localname == 'textBox' \
|
||||
or the_localname == 'comboBox':
|
||||
|
||||
# We want to prefer result.text as the label, however, if it is none
|
||||
# we will fall back to this method for getting the label
|
||||
# Brings some code back from:
|
||||
# https://github.com/saltstack/salt/pull/55823/files#diff-b2e4dac5ccc17ab548f245371ec5d6faL5658
|
||||
if result.text is None:
|
||||
# Get the label from the text element above the referenced
|
||||
# element. For example:
|
||||
# --- taken from AppPrivacy.adml ---
|
||||
# <text>Force allow these specific apps (use Package Family Names):</text>
|
||||
# <multiTextBox refId="LetAppsSyncWithDevices_ForceAllowTheseApps_List"/>
|
||||
# In this case, the label for the refId is the text element
|
||||
# above it.
|
||||
presentation_element = PRESENTATION_ANCESTOR_XPATH(result)
|
||||
if presentation_element:
|
||||
presentation_element = presentation_element[0]
|
||||
if TEXT_ELEMENT_XPATH(presentation_element):
|
||||
for p_item in presentation_element.getchildren():
|
||||
if p_item == result:
|
||||
break
|
||||
if etree.QName(p_item.tag).localname == 'text':
|
||||
if getattr(p_item, 'text'):
|
||||
alternate_label = getattr(p_item, 'text').rstrip()
|
||||
if alternate_label.endswith('.'):
|
||||
alternate_label = ''
|
||||
|
||||
if the_localname in ['textBox', 'comboBox']:
|
||||
label_items = result.xpath('.//*[local-name() = "label"]')
|
||||
for label_item in label_items:
|
||||
if label_item.text:
|
||||
return (prepended_text + ' ' + label_item.text.rstrip().rstrip(':')).lstrip()
|
||||
elif the_localname == 'decimalTextBox' \
|
||||
or the_localname == 'longDecimalTextBox' \
|
||||
or the_localname == 'dropdownList' \
|
||||
or the_localname == 'listBox' \
|
||||
or the_localname == 'checkBox' \
|
||||
or the_localname == 'text' \
|
||||
or the_localname == 'multiTextBox':
|
||||
return label_item.text.rstrip().rstrip(':')
|
||||
elif the_localname in ['decimalTextBox', 'longDecimalTextBox',
|
||||
'dropdownList', 'listBox', 'checkBox',
|
||||
'text', 'multiTextBox']:
|
||||
if result.text:
|
||||
return (prepended_text + ' ' + result.text.rstrip().rstrip(':')).lstrip()
|
||||
return result.text.rstrip().rstrip(':')
|
||||
else:
|
||||
return alternate_label.rstrip(':')
|
||||
return None
|
||||
|
||||
|
||||
|
@ -6139,6 +6165,10 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
|
|||
|
||||
if standard_element_expected_string and not check_deleted:
|
||||
if this_element_value is not None:
|
||||
# Sometimes values come in as strings
|
||||
if isinstance(this_element_value, str):
|
||||
log.debug('Converting {0} to bytes'.format(this_element_value))
|
||||
this_element_value = this_element_value.encode('utf-32-le')
|
||||
expected_string = b''.join(['['.encode('utf-16-le'),
|
||||
reg_key,
|
||||
encoded_null,
|
||||
|
@ -6777,13 +6807,16 @@ def _regexSearchKeyValueCombo(policy_data, policy_regpath, policy_regkey):
|
|||
for a policy_regpath and policy_regkey combo
|
||||
'''
|
||||
if policy_data:
|
||||
specialValueRegex = salt.utils.stringutils.to_bytes(r'(\*\*Del\.|\*\*DelVals\.){0,1}')
|
||||
regex_str = [r'(\*', r'\*', 'D', 'e', 'l', r'\.', r'|\*', r'\*', 'D',
|
||||
'e', 'l', 'V', 'a', 'l', 's', r'\.', '){0,1}']
|
||||
specialValueRegex = '\x00'.join(regex_str)
|
||||
specialValueRegex = salt.utils.stringutils.to_bytes(specialValueRegex)
|
||||
_thisSearch = b''.join([salt.utils.stringutils.to_bytes(r'\['),
|
||||
re.escape(policy_regpath),
|
||||
b'\00;',
|
||||
specialValueRegex,
|
||||
re.escape(policy_regkey),
|
||||
b'\00;'])
|
||||
re.escape(policy_regpath),
|
||||
b'\x00;\x00',
|
||||
specialValueRegex,
|
||||
re.escape(policy_regkey.lstrip(b'\x00')),
|
||||
b'\x00;'])
|
||||
match = re.search(_thisSearch, policy_data, re.IGNORECASE)
|
||||
if match:
|
||||
# add 2 so we get the ']' and the \00
|
||||
|
|
|
@ -95,7 +95,7 @@ TASK_TRIGGER_LOGON = 9
|
|||
TASK_TRIGGER_SESSION_STATE_CHANGE = 11
|
||||
|
||||
duration = {'Immediately': 'PT0M',
|
||||
'Indefinitely': 'PT0M',
|
||||
'Indefinitely': '',
|
||||
'Do not wait': 'PT0M',
|
||||
'15 seconds': 'PT15S',
|
||||
'30 seconds': 'PT30S',
|
||||
|
@ -571,10 +571,7 @@ def create_task(name,
|
|||
logon_type=task_definition.Principal.LogonType)
|
||||
|
||||
# Verify task was created
|
||||
if name in list_tasks(location):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return name in list_tasks(location)
|
||||
|
||||
|
||||
def create_task_from_xml(name,
|
||||
|
@ -679,10 +676,7 @@ def create_task_from_xml(name,
|
|||
log.debug('Failed to create task: %s', failure_code)
|
||||
|
||||
# Verify creation
|
||||
if name in list_tasks(location):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return name in list_tasks(location)
|
||||
|
||||
|
||||
def create_folder(name, location='\\'):
|
||||
|
@ -724,10 +718,7 @@ def create_folder(name, location='\\'):
|
|||
task_folder.CreateFolder(name)
|
||||
|
||||
# Verify creation
|
||||
if name in list_folders(location):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return name in list_folders(location)
|
||||
|
||||
|
||||
def edit_task(name=None,
|
||||
|
@ -1132,10 +1123,7 @@ def delete_task(name, location='\\'):
|
|||
task_folder.DeleteTask(name, 0)
|
||||
|
||||
# Verify deletion
|
||||
if name not in list_tasks(location):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return name not in list_tasks(location)
|
||||
|
||||
|
||||
def delete_folder(name, location='\\'):
|
||||
|
|
|
@ -39,7 +39,7 @@ log = logging.getLogger(__name__)
|
|||
HAS_DJANGO = False
|
||||
|
||||
try:
|
||||
from django import dispatch
|
||||
from django import dispatch # pylint: disable=E0611
|
||||
HAS_DJANGO = True
|
||||
except ImportError:
|
||||
HAS_DJANGO = False
|
||||
|
|
|
@ -10,7 +10,7 @@ Runner to provide F5 Load Balancer functionality
|
|||
.. code-block:: yaml
|
||||
|
||||
load_balancers:
|
||||
bigip1.example.com
|
||||
bigip1.example.com:
|
||||
username: admin
|
||||
password: secret
|
||||
bigip2.example.com:
|
||||
|
|
|
@ -73,6 +73,7 @@ def present(name,
|
|||
allow_passwordless=False,
|
||||
unix_socket=False,
|
||||
password_column=None,
|
||||
auth_plugin='mysql_native_password',
|
||||
**connection_args):
|
||||
'''
|
||||
Ensure that the named user is present with the specified properties. A
|
||||
|
@ -127,7 +128,11 @@ def present(name,
|
|||
ret['result'] = False
|
||||
return ret
|
||||
else:
|
||||
if __salt__['mysql.user_exists'](name, host, passwordless=True, unix_socket=unix_socket, password_column=password_column,
|
||||
if __salt__['mysql.user_exists'](name,
|
||||
host,
|
||||
passwordless=True,
|
||||
unix_socket=unix_socket,
|
||||
password_column=password_column,
|
||||
**connection_args):
|
||||
ret['comment'] += ' with passwordless login'
|
||||
return ret
|
||||
|
@ -138,11 +143,19 @@ def present(name,
|
|||
ret['result'] = False
|
||||
return ret
|
||||
else:
|
||||
if __salt__['mysql.user_exists'](name, host, password, password_hash, unix_socket=unix_socket, password_column=password_column,
|
||||
if __salt__['mysql.user_exists'](name,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
unix_socket=unix_socket,
|
||||
password_column=password_column,
|
||||
**connection_args):
|
||||
ret['comment'] += ' with the desired password'
|
||||
if password_hash and not password:
|
||||
ret['comment'] += ' hash'
|
||||
if auth_plugin == 'mysql_native_password':
|
||||
ret['comment'] += ' with the desired password'
|
||||
if password_hash and not password:
|
||||
ret['comment'] += ' hash'
|
||||
else:
|
||||
ret['comment'] += '. Unable to verify password.'
|
||||
return ret
|
||||
else:
|
||||
err = _get_mysql_error()
|
||||
|
@ -152,7 +165,10 @@ def present(name,
|
|||
return ret
|
||||
|
||||
# check if user exists with a different password
|
||||
if __salt__['mysql.user_exists'](name, host, unix_socket=unix_socket, **connection_args):
|
||||
if __salt__['mysql.user_exists'](name,
|
||||
host,
|
||||
unix_socket=unix_socket,
|
||||
**connection_args):
|
||||
|
||||
# The user is present, change the password
|
||||
if __opts__['test']:
|
||||
|
@ -168,9 +184,12 @@ def present(name,
|
|||
ret['comment'] += 'changed'
|
||||
return ret
|
||||
|
||||
if __salt__['mysql.user_chpass'](name, host,
|
||||
password, password_hash,
|
||||
allow_passwordless, unix_socket,
|
||||
if __salt__['mysql.user_chpass'](name,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket,
|
||||
**connection_args):
|
||||
ret['comment'] = \
|
||||
'Password for user {0}@{1} has been ' \
|
||||
|
@ -209,9 +228,14 @@ def present(name,
|
|||
ret['result'] = False
|
||||
return ret
|
||||
|
||||
if __salt__['mysql.user_create'](name, host,
|
||||
password, password_hash,
|
||||
allow_passwordless, unix_socket=unix_socket, password_column=password_column,
|
||||
if __salt__['mysql.user_create'](name,
|
||||
host,
|
||||
password,
|
||||
password_hash,
|
||||
allow_passwordless,
|
||||
unix_socket=unix_socket,
|
||||
password_column=password_column,
|
||||
auth_plugin=auth_plugin,
|
||||
**connection_args):
|
||||
ret['comment'] = \
|
||||
'The user {0}@{1} has been added'.format(name, host)
|
||||
|
|
|
@ -51,7 +51,7 @@ def purge_pip():
|
|||
return
|
||||
pip_related_entries = [
|
||||
(k, v) for (k, v) in sys.modules.items()
|
||||
or getattr(v, '__module__', '').startswith('pip.')
|
||||
if getattr(v, '__module__', '').startswith('pip.')
|
||||
or (isinstance(v, types.ModuleType) and v.__name__.startswith('pip.'))
|
||||
]
|
||||
for name, entry in pip_related_entries:
|
||||
|
@ -96,21 +96,8 @@ try:
|
|||
HAS_PIP = True
|
||||
except ImportError:
|
||||
HAS_PIP = False
|
||||
# Remove references to the loaded pip module above so reloading works
|
||||
import sys
|
||||
pip_related_entries = [
|
||||
(k, v) for (k, v) in sys.modules.items()
|
||||
or getattr(v, '__module__', '').startswith('pip.')
|
||||
or (isinstance(v, types.ModuleType) and v.__name__.startswith('pip.'))
|
||||
]
|
||||
for name, entry in pip_related_entries:
|
||||
sys.modules.pop(name)
|
||||
del entry
|
||||
purge_pip()
|
||||
|
||||
del pip
|
||||
sys_modules_pip = sys.modules.pop('pip', None)
|
||||
if sys_modules_pip is not None:
|
||||
del sys_modules_pip
|
||||
|
||||
if HAS_PIP is True:
|
||||
if not hasattr(purge_pip, '__pip_ver__'):
|
||||
|
|
|
@ -396,10 +396,13 @@ def present(name,
|
|||
vname=vname,
|
||||
use_32bit_registry=use_32bit_registry)
|
||||
|
||||
# Cast the vdata according to the vtype
|
||||
vdata_decoded = __utils__['reg.cast_vdata'](vdata=vdata, vtype=vtype)
|
||||
|
||||
# Check if the key already exists
|
||||
# If so, check perms
|
||||
# We check `vdata` and `success` because `vdata` can be None
|
||||
if vdata == reg_current['vdata'] and reg_current['success']:
|
||||
if vdata_decoded == reg_current['vdata'] and reg_current['success']:
|
||||
ret['comment'] = '{0} in {1} is already present' \
|
||||
''.format(salt.utils.stringutils.to_unicode(vname, 'utf-8') if vname else '(Default)',
|
||||
salt.utils.stringutils.to_unicode(name, 'utf-8'))
|
||||
|
@ -413,9 +416,6 @@ def present(name,
|
|||
inheritance=win_inheritance,
|
||||
reset=win_perms_reset)
|
||||
|
||||
# Cast the vdata according to the vtype
|
||||
vdata_decoded = __utils__['reg.cast_vdata'](vdata=vdata, vtype=vtype)
|
||||
|
||||
add_change = {'Key': r'{0}\{1}'.format(hive, key),
|
||||
'Entry': '{0}'.format(salt.utils.stringutils.to_unicode(vname, 'utf-8') if vname else '(Default)'),
|
||||
'Value': vdata_decoded,
|
||||
|
@ -440,10 +440,10 @@ def present(name,
|
|||
|
||||
if not ret['result']:
|
||||
ret['changes'] = {}
|
||||
ret['comment'] = r'Failed to add {0} to {1}\{2}'.format(name, hive, key)
|
||||
ret['comment'] = r'Failed to add {0} to {1}\{2}'.format(vname, hive, key)
|
||||
else:
|
||||
ret['changes'] = {'reg': {'Added': add_change}}
|
||||
ret['comment'] = r'Added {0} to {1}\{2}'.format(name, hive, key)
|
||||
ret['comment'] = r'Added {0} to {1}\{2}'.format(vname, hive, key)
|
||||
|
||||
if ret['result']:
|
||||
ret = __utils__['dacl.check_perms'](
|
||||
|
|
|
@ -487,7 +487,7 @@ def running(name,
|
|||
time.sleep(init_delay)
|
||||
|
||||
# only force a change state if we have explicitly detected them
|
||||
after_toggle_status = __salt__['service.status'](name, sig, **kwargs)
|
||||
after_toggle_status = __salt__['service.status'](name, sig, **status_kwargs)
|
||||
if 'service.enabled' in __salt__:
|
||||
after_toggle_enable_status = __salt__['service.enabled'](name)
|
||||
else:
|
||||
|
|
|
@ -246,6 +246,11 @@ def _parse_vmconfig(config, instances):
|
|||
## some property are lowercase
|
||||
if 'mac' in instance_config:
|
||||
instance_config['mac'] = instance_config['mac'].lower()
|
||||
## calculate mac from vrrp_vrid
|
||||
if 'vrrp_vrid' in instance_config:
|
||||
instance_config['mac'] = "00:00:5e:00:01:{0}".format(
|
||||
hex(int(instance_config['vrrp_vrid'])).split('x')[-1].zfill(2),
|
||||
)
|
||||
vmconfig[prop].append(instance_config)
|
||||
else:
|
||||
log.error('smartos.vm_present::parse_vmconfig - failed to parse')
|
||||
|
@ -768,7 +773,8 @@ def vm_present(name, vmconfig, config=None):
|
|||
'instance': {
|
||||
'nics': 'mac',
|
||||
'disks': 'path',
|
||||
'filesystems': 'target'
|
||||
'filesystems': 'target',
|
||||
'pci_devices': 'path',
|
||||
},
|
||||
'create_only': [
|
||||
'filesystems'
|
||||
|
|
|
@ -112,6 +112,7 @@ import logging
|
|||
import salt.utils.data
|
||||
import salt.utils.dictdiffer
|
||||
import salt.utils.json
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.versions
|
||||
import salt.utils.win_functions
|
||||
|
||||
|
@ -152,6 +153,35 @@ def _compare_policies(new_policy, current_policy):
|
|||
return False
|
||||
|
||||
|
||||
def _convert_to_unicode(data):
|
||||
'''
|
||||
Helper function that makes sure all items in the dictionary are unicode for
|
||||
comparing the existing state with the desired state. This function is only
|
||||
needed for Python 2 and can be removed once we've migrated to Python 3.
|
||||
|
||||
The data returned by the current settings sometimes has a mix of unicode and
|
||||
string values (these don't matter in Py3). This causes the comparison to
|
||||
say it's not in the correct state even though it is. They basically compares
|
||||
apples to apples, etc.
|
||||
|
||||
Also, in Python 2, the utf-16 encoded strings remain utf-16 encoded (each
|
||||
character separated by `/x00`) In Python 3 it returns a utf-8 string. This
|
||||
will just remove all the null bytes (`/x00`), again comparing apples to
|
||||
apples.
|
||||
'''
|
||||
if isinstance(data, six.string_types):
|
||||
data = data.replace('\x00', '')
|
||||
return salt.utils.stringutils.to_unicode(data)
|
||||
elif isinstance(data, dict):
|
||||
return dict((_convert_to_unicode(k),
|
||||
_convert_to_unicode(v))
|
||||
for k, v in data.items())
|
||||
elif isinstance(data, list):
|
||||
return list(_convert_to_unicode(v) for v in data)
|
||||
else:
|
||||
return data
|
||||
|
||||
|
||||
def set_(name,
|
||||
setting=None,
|
||||
policy_class=None,
|
||||
|
@ -342,6 +372,9 @@ def set_(name,
|
|||
requested_policy_check = salt.utils.json.loads(requested_policy_json)
|
||||
current_policy_check = salt.utils.json.loads(current_policy_json)
|
||||
|
||||
if six.PY2:
|
||||
current_policy_check = _convert_to_unicode(current_policy_check)
|
||||
|
||||
# Are the requested and current policies identical
|
||||
policies_are_equal = _compare_policies(
|
||||
requested_policy_check, current_policy_check)
|
||||
|
|
|
@ -1019,7 +1019,7 @@ class Schedule(object):
|
|||
|
||||
if when < now - loop_interval and \
|
||||
not data.get('_run', False) and \
|
||||
not data.get('run', False) and \
|
||||
not run and \
|
||||
not data['_splay']:
|
||||
data['_next_fire_time'] = None
|
||||
data['_continue'] = True
|
||||
|
|
|
@ -122,6 +122,8 @@ def wrap_tmpl_func(render_str):
|
|||
slspath = context['sls'].replace('.', '/')
|
||||
if tmplpath is not None:
|
||||
context['tplpath'] = tmplpath
|
||||
if not tmplpath.lower().replace('\\', '/').endswith('/init.sls'):
|
||||
slspath = os.path.dirname(slspath)
|
||||
template = tmplpath.replace('\\', '/')
|
||||
i = template.rfind(slspath.replace('.', '/'))
|
||||
if i != -1:
|
||||
|
|
|
@ -58,6 +58,7 @@ enum_adapter_types = {
|
|||
28: 'Slip',
|
||||
37: 'ATM',
|
||||
48: 'GenericModem',
|
||||
53: 'TAPAdapter', # Not in MSDN Defined enumeration
|
||||
62: 'FastEthernetT',
|
||||
63: 'ISDN',
|
||||
69: 'FastEthernetFx',
|
||||
|
@ -108,12 +109,16 @@ def __virtual__():
|
|||
|
||||
def _get_base_properties(i_face):
|
||||
raw_mac = i_face.GetPhysicalAddress().ToString()
|
||||
try:
|
||||
i_face_type = enum_adapter_types[i_face.NetworkInterfaceType]
|
||||
except KeyError:
|
||||
i_face_type = i_face.Description
|
||||
return {
|
||||
'alias': i_face.Name,
|
||||
'description': i_face.Description,
|
||||
'id': i_face.Id,
|
||||
'receive_only': i_face.IsReceiveOnly,
|
||||
'type': enum_adapter_types[i_face.NetworkInterfaceType],
|
||||
'type': i_face_type,
|
||||
'status': enum_operational_status[i_face.OperationalStatus],
|
||||
'physical_address': ':'.join(raw_mac[i:i+2] for i in range(0, 12, 2))}
|
||||
|
||||
|
|
100
salt/version.py
100
salt/version.py
|
@ -113,8 +113,8 @@ class SaltStackVersion(object):
|
|||
'Sodium' : (MAX_SIZE - 98, 0),
|
||||
'Magnesium' : (MAX_SIZE - 97, 0),
|
||||
'Aluminium' : (MAX_SIZE - 96, 0),
|
||||
'Silicon' : (MAX_SIZE - 95, 0),
|
||||
'Phosphorus' : (MAX_SIZE - 94, 0),
|
||||
'Silicon' : (MAX_SIZE - 95, 0),
|
||||
'Phosphorus' : (MAX_SIZE - 94, 0),
|
||||
# pylint: disable=E8265
|
||||
#'Sulfur' : (MAX_SIZE - 93, 0),
|
||||
#'Chlorine' : (MAX_SIZE - 92, 0),
|
||||
|
@ -232,7 +232,11 @@ class SaltStackVersion(object):
|
|||
major = int(major)
|
||||
|
||||
if isinstance(minor, string_types):
|
||||
minor = int(minor)
|
||||
if not minor:
|
||||
# Empty string
|
||||
minor = None
|
||||
else:
|
||||
minor = int(minor)
|
||||
|
||||
if bugfix is None and not self.new_version(major=major):
|
||||
bugfix = 0
|
||||
|
@ -319,50 +323,44 @@ class SaltStackVersion(object):
|
|||
# Higher than 0.17, lower than first date based
|
||||
return 0 < self.major < 2014
|
||||
|
||||
def min_info(self):
|
||||
info = [self.major]
|
||||
if self.new_version(self.major):
|
||||
if self.minor:
|
||||
info.append(self.minor)
|
||||
else:
|
||||
info.extend([self.minor,
|
||||
self.bugfix,
|
||||
self.mbugfix])
|
||||
return info
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
return (
|
||||
self.major,
|
||||
self.minor,
|
||||
self.bugfix,
|
||||
self.mbugfix
|
||||
)
|
||||
return tuple(self.min_info())
|
||||
|
||||
@property
|
||||
def pre_info(self):
|
||||
return (
|
||||
self.major,
|
||||
self.minor,
|
||||
self.bugfix,
|
||||
self.mbugfix,
|
||||
self.pre_type,
|
||||
self.pre_num
|
||||
)
|
||||
info = self.min_info()
|
||||
info.extend([self.pre_type,
|
||||
self.pre_num])
|
||||
return tuple(info)
|
||||
|
||||
@property
|
||||
def noc_info(self):
|
||||
return (
|
||||
self.major,
|
||||
self.minor,
|
||||
self.bugfix,
|
||||
self.mbugfix,
|
||||
self.pre_type,
|
||||
self.pre_num,
|
||||
self.noc
|
||||
)
|
||||
info = self.min_info()
|
||||
info.extend([self.pre_type,
|
||||
self.pre_num,
|
||||
self.noc])
|
||||
return tuple(info)
|
||||
|
||||
@property
|
||||
def full_info(self):
|
||||
return (
|
||||
self.major,
|
||||
self.minor,
|
||||
self.bugfix,
|
||||
self.mbugfix,
|
||||
self.pre_type,
|
||||
self.pre_num,
|
||||
self.noc,
|
||||
self.sha
|
||||
)
|
||||
info = self.min_info()
|
||||
info.extend([self.pre_type,
|
||||
self.pre_num,
|
||||
self.noc,
|
||||
self.sha])
|
||||
return tuple(info)
|
||||
|
||||
@property
|
||||
def string(self):
|
||||
|
@ -402,6 +400,16 @@ class SaltStackVersion(object):
|
|||
version_string += ' ({0})'.format(self.RMATCH[(self.major, self.minor)])
|
||||
return version_string
|
||||
|
||||
@property
|
||||
def pre_index(self):
|
||||
if self.new_version(self.major):
|
||||
pre_type = 2
|
||||
if not isinstance(self.minor, int):
|
||||
pre_type = 1
|
||||
else:
|
||||
pre_type = 4
|
||||
return pre_type
|
||||
|
||||
def __str__(self):
|
||||
return self.string
|
||||
|
||||
|
@ -418,23 +426,29 @@ class SaltStackVersion(object):
|
|||
)
|
||||
)
|
||||
|
||||
pre_type = self.pre_index
|
||||
other_pre_type = other.pre_index
|
||||
other_noc_info = list(other.noc_info)
|
||||
noc_info = list(self.noc_info)
|
||||
|
||||
if self.new_version(self.major):
|
||||
if isinstance(self.minor, int) and not isinstance(other.minor, int):
|
||||
other_noc_info[1] = 0
|
||||
if self.minor and not other.minor:
|
||||
# We have minor information, the other side does not
|
||||
if self.minor > 0:
|
||||
other_noc_info[1] = 0
|
||||
|
||||
if not isinstance(self.minor, int) and isinstance(other.minor, int):
|
||||
noc_info[1] = 0
|
||||
if not self.minor and other.minor:
|
||||
# The other side has minor information, we don't
|
||||
if other.minor > 0:
|
||||
noc_info[1] = 0
|
||||
|
||||
if self.pre_type and not other.pre_type:
|
||||
# We have pre-release information, the other side doesn't
|
||||
other_noc_info[4] = 'zzzzz'
|
||||
other_noc_info[other_pre_type] = 'zzzzz'
|
||||
|
||||
if not self.pre_type and other.pre_type:
|
||||
# The other side has pre-release informatio, we don't
|
||||
noc_info[4] = 'zzzzz'
|
||||
# The other side has pre-release information, we don't
|
||||
noc_info[pre_type] = 'zzzzz'
|
||||
|
||||
return method(tuple(noc_info), tuple(other_noc_info))
|
||||
|
||||
|
|
31
setup.py
31
setup.py
|
@ -74,8 +74,9 @@ BOOTSTRAP_SCRIPT_DISTRIBUTED_VERSION = os.environ.get(
|
|||
)
|
||||
|
||||
# Store a reference to the executing platform
|
||||
IS_OSX_PLATFORM = sys.platform.startswith('darwin')
|
||||
IS_WINDOWS_PLATFORM = sys.platform.startswith('win')
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
if IS_WINDOWS_PLATFORM or IS_OSX_PLATFORM:
|
||||
IS_SMARTOS_PLATFORM = False
|
||||
else:
|
||||
# os.uname() not available on Windows.
|
||||
|
@ -100,8 +101,15 @@ SALT_SYSPATHS_HARDCODED = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', '
|
|||
SALT_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'base.txt')
|
||||
SALT_CRYPTO_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'crypto.txt')
|
||||
SALT_ZEROMQ_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'zeromq.txt')
|
||||
SALT_WINDOWS_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'pkg', 'windows', 'req.txt')
|
||||
SALT_LONG_DESCRIPTION_FILE = os.path.join(os.path.abspath(SETUP_DIRNAME), 'README.rst')
|
||||
SALT_OSX_REQS = [
|
||||
os.path.join(os.path.abspath(SETUP_DIRNAME), 'pkg', 'osx', 'req.txt'),
|
||||
os.path.join(os.path.abspath(SETUP_DIRNAME), 'pkg', 'osx', 'req_ext.txt')
|
||||
]
|
||||
SALT_WINDOWS_REQS = [
|
||||
os.path.join(os.path.abspath(SETUP_DIRNAME), 'pkg', 'windows', 'req.txt'),
|
||||
os.path.join(os.path.abspath(SETUP_DIRNAME), 'pkg', 'windows', 'req_win.txt')
|
||||
]
|
||||
|
||||
# Salt SSH Packaging Detection
|
||||
PACKAGED_FOR_SALT_SSH_FILE = os.path.join(os.path.abspath(SETUP_DIRNAME), '.salt-ssh-package')
|
||||
|
@ -700,8 +708,7 @@ class Install(install):
|
|||
install.finalize_options(self)
|
||||
|
||||
def run(self):
|
||||
from distutils.version import StrictVersion
|
||||
if StrictVersion(setuptools.__version__) < StrictVersion('9.1'):
|
||||
if LooseVersion(setuptools.__version__) < LooseVersion('9.1'):
|
||||
sys.stderr.write(
|
||||
'\n\nInstalling Salt requires setuptools >= 9.1\n'
|
||||
'Available setuptools version is {}\n\n'.format(setuptools.__version__)
|
||||
|
@ -1019,14 +1026,24 @@ class SaltDistribution(distutils.dist.Distribution):
|
|||
|
||||
@property
|
||||
def _property_install_requires(self):
|
||||
|
||||
if IS_OSX_PLATFORM:
|
||||
install_requires = []
|
||||
for reqfile in SALT_OSX_REQS:
|
||||
install_requires += _parse_requirements_file(reqfile)
|
||||
return install_requires
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
install_requires = []
|
||||
for reqfile in SALT_WINDOWS_REQS:
|
||||
install_requires += _parse_requirements_file(reqfile)
|
||||
return install_requires
|
||||
|
||||
install_requires = _parse_requirements_file(SALT_REQS)
|
||||
|
||||
if self.salt_transport == 'zeromq':
|
||||
install_requires += _parse_requirements_file(SALT_CRYPTO_REQS)
|
||||
install_requires += _parse_requirements_file(SALT_ZEROMQ_REQS)
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
install_requires = _parse_requirements_file(SALT_WINDOWS_REQS)
|
||||
return install_requires
|
||||
|
||||
@property
|
||||
|
|
|
@ -95,6 +95,9 @@ config_test:
|
|||
|
||||
mine_functions:
|
||||
test.ping: []
|
||||
test.arg:
|
||||
- isn't
|
||||
- allow_tgt: 'sub_minion'
|
||||
|
||||
# sdb env module
|
||||
osenv:
|
||||
|
|
|
@ -62,3 +62,8 @@ grains:
|
|||
keystone.password: demopass
|
||||
keystone.tenant: demo
|
||||
keystone.auth_url: http://127.0.0.1:5000/v3/
|
||||
|
||||
mine_functions:
|
||||
test.arg:
|
||||
- isn't
|
||||
- allow_tgt: 'sub_minion'
|
||||
|
|
10
tests/integration/files/file/base/issue-56131.sls
Normal file
10
tests/integration/files/file/base/issue-56131.sls
Normal file
|
@ -0,0 +1,10 @@
|
|||
# archive-test
|
||||
vault:
|
||||
archive.extracted:
|
||||
- name: {{ pillar['unzip_to'] }}
|
||||
- source: salt://issue-56131.zip
|
||||
- source_hash: sha256=4fc6f049d658a414aca066fb11c2109d05b59f082d707d5d6355b6c574d25720
|
||||
- archive_format: zip
|
||||
- enforce_toplevel: False
|
||||
- unless:
|
||||
- echo hello && 1
|
BIN
tests/integration/files/file/base/issue-56131.zip
Normal file
BIN
tests/integration/files/file/base/issue-56131.zip
Normal file
Binary file not shown.
7
tests/integration/files/file/base/issue-56195/test.ps1
Normal file
7
tests/integration/files/file/base/issue-56195/test.ps1
Normal file
|
@ -0,0 +1,7 @@
|
|||
[CmdLetBinding()]
|
||||
Param(
|
||||
[SecureString] $SecureString
|
||||
)
|
||||
|
||||
$Credential = New-Object System.Net.NetworkCredential("DummyId", $SecureString)
|
||||
$Credential.Password
|
|
@ -0,0 +1,3 @@
|
|||
directory-level1-init-test-pass:
|
||||
test.succeed_without_changes:
|
||||
- name: testing-saltcheck
|
|
@ -0,0 +1,3 @@
|
|||
directory-level1-test-pass:
|
||||
test.succeed_without_changes:
|
||||
- name: testing-saltcheck
|
|
@ -1,3 +1,7 @@
|
|||
include:
|
||||
- validate-saltcheck.directory
|
||||
- validate-saltcheck.directory.level1
|
||||
|
||||
saltcheck-test-pass:
|
||||
test.succeed_without_changes:
|
||||
- name: testing-saltcheck
|
||||
|
|
53
tests/integration/minion/test_minion_cache.py
Normal file
53
tests/integration/minion/test_minion_cache.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
import salt.loader
|
||||
import salt.minion
|
||||
import salt.utils.yaml
|
||||
from salt.utils.files import fopen
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
class BasePillarTest(ModuleCase):
|
||||
@with_tempdir()
|
||||
def test_minion_cache_should_cache_files(self, tempdir):
|
||||
pillar = {"this": {"is": {"some": "pillar data"}}}
|
||||
opts = {
|
||||
"file_client": "remote",
|
||||
"minion_pillar_cache": "true",
|
||||
"master_type": "local",
|
||||
"discovery": False,
|
||||
"master": "local",
|
||||
"__role": "",
|
||||
"id": "test",
|
||||
"saltenv": "base",
|
||||
"pillar_cache": True,
|
||||
"pillar_cache_backend": "disk",
|
||||
"pillar_cache_ttl": 3600,
|
||||
"cachedir": tempdir,
|
||||
"state_top": "top.sls",
|
||||
"pillar_roots": {"base": tempdir},
|
||||
"extension_modules": tempdir,
|
||||
"file_ignore_regex": [],
|
||||
"file_ignore_glob": [],
|
||||
"pillar": pillar,
|
||||
}
|
||||
with patch("salt.loader.grains", return_value={}), patch(
|
||||
"salt.minion.SMinion.gen_modules"
|
||||
), patch("salt.minion.SMinion.eval_master"), patch(
|
||||
"salt.minion.install_zmq"
|
||||
), patch(
|
||||
"salt.minion.ZMQDefaultLoop.current"
|
||||
):
|
||||
minion = salt.minion.SMinion(opts)
|
||||
self.assertTrue("pillar" in os.listdir(tempdir))
|
||||
pillar_cache = os.path.join(tempdir, "pillar")
|
||||
self.assertTrue("top.sls" in os.listdir(pillar_cache))
|
||||
self.assertTrue("cache.sls" in os.listdir(pillar_cache))
|
||||
with fopen(os.path.join(pillar_cache, "cache.sls"), "rb") as f:
|
||||
cached_data = salt.utils.yaml.safe_load(f)
|
||||
assert cached_data == pillar
|
|
@ -491,3 +491,14 @@ class CMDModuleTest(ModuleCase):
|
|||
out = self.run_function('cmd.run', ['set'], env={"abc": "123", "ABC": "456"}).splitlines()
|
||||
self.assertIn('abc=123', out)
|
||||
self.assertIn('ABC=456', out)
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'minion is not windows')
|
||||
def test_windows_powershell_script_args(self):
|
||||
'''
|
||||
Ensure that powershell processes inline script in args
|
||||
'''
|
||||
val = 'i like cheese'
|
||||
args = '-SecureString (ConvertTo-SecureString -String "{0}" -AsPlainText -Force) -ErrorAction Stop'.format(val)
|
||||
script = 'salt://issue-56195/test.ps1'
|
||||
ret = self.run_function('cmd.script', [script], args=args, shell='powershell')
|
||||
self.assertEqual(ret['stdout'], val)
|
||||
|
|
|
@ -8,29 +8,29 @@ import time
|
|||
import pprint
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.case import ModuleCase, ShellCase
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
|
||||
|
||||
class MineTest(ModuleCase):
|
||||
class MineTest(ModuleCase, ShellCase):
|
||||
'''
|
||||
Test the mine system
|
||||
'''
|
||||
def setUp(self):
|
||||
self.tgt = r'\*'
|
||||
if salt.utils.platform.is_windows():
|
||||
self.tgt = '*'
|
||||
self.wait_for_all_jobs()
|
||||
|
||||
def test_get(self):
|
||||
'''
|
||||
test mine.get and mine.update
|
||||
'''
|
||||
self.assertTrue(self.run_function('mine.update', minion_tgt='minion'))
|
||||
# The sub_minion does not have mine_functions defined in its configuration
|
||||
# In this case, mine.update returns None
|
||||
self.assertIsNone(
|
||||
self.run_function(
|
||||
'mine.update',
|
||||
minion_tgt='sub_minion'
|
||||
)
|
||||
)
|
||||
assert self.run_function('mine.update', minion_tgt='minion')
|
||||
assert self.run_function('mine.update', minion_tgt='sub_minion')
|
||||
# Since the minion has mine_functions defined in its configuration,
|
||||
# mine.update will return True
|
||||
self.assertTrue(
|
||||
|
@ -40,6 +40,78 @@ class MineTest(ModuleCase):
|
|||
)
|
||||
)
|
||||
|
||||
def test_get_allow_tgt(self):
|
||||
'''
|
||||
test mine.get and mine.update using allow_tgt
|
||||
'''
|
||||
assert self.run_function('mine.update', minion_tgt='minion')
|
||||
assert self.run_function('mine.update', minion_tgt='sub_minion')
|
||||
|
||||
# sub_minion should be able to view test.arg data
|
||||
sub_min_ret = self.run_call('mine.get {0} test.arg'.format(self.tgt), config_dir=RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR)
|
||||
assert " - isn't" in sub_min_ret
|
||||
|
||||
# minion should not be able to view test.arg data
|
||||
min_ret = self.run_call('mine.get {0} test.arg'.format(self.tgt))
|
||||
assert " - isn't" not in min_ret
|
||||
|
||||
def test_send_allow_tgt(self):
|
||||
'''
|
||||
test mine.send with allow_tgt set
|
||||
'''
|
||||
mine_name = 'test_this'
|
||||
for minion in ['sub_minion', 'minion']:
|
||||
assert self.run_function('mine.send', [mine_name,
|
||||
'mine_function=test.arg_clean', 'one'], allow_tgt='sub_minion',
|
||||
minion_tgt=minion)
|
||||
min_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name))
|
||||
sub_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name),
|
||||
config_dir=RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR)
|
||||
|
||||
# ensure we did get the mine_name mine function for sub_minion
|
||||
assert ' - one' in sub_ret
|
||||
# ensure we did not get the mine_name mine function for minion
|
||||
assert ' - one' not in min_ret
|
||||
|
||||
def test_send_allow_tgt_compound(self):
|
||||
'''
|
||||
test mine.send with allow_tgt set
|
||||
and using compound targeting
|
||||
'''
|
||||
mine_name = 'test_this_comp'
|
||||
for minion in ['sub_minion', 'minion']:
|
||||
assert self.run_function('mine.send', [mine_name,
|
||||
'mine_function=test.arg_clean', 'one'],
|
||||
allow_tgt='L@minion,sub_minion',
|
||||
allow_tgt_type='compound',
|
||||
minion_tgt=minion)
|
||||
min_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name))
|
||||
sub_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name),
|
||||
config_dir=RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR)
|
||||
|
||||
# ensure we get the mine_name mine function for both minions
|
||||
for ret in [min_ret, sub_ret]:
|
||||
assert ' - one' in ret
|
||||
|
||||
def test_send_allow_tgt_doesnotexist(self):
|
||||
'''
|
||||
test mine.send with allow_tgt set when
|
||||
the minion defined in allow_tgt does
|
||||
not exist
|
||||
'''
|
||||
mine_name = 'mine_doesnotexist'
|
||||
for minion in ['sub_minion', 'minion']:
|
||||
assert self.run_function('mine.send', [mine_name,
|
||||
'mine_function=test.arg_clean', 'one'], allow_tgt='doesnotexist',
|
||||
minion_tgt=minion)
|
||||
min_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name))
|
||||
sub_ret = self.run_call('mine.get {0} {1}'.format(self.tgt, mine_name),
|
||||
config_dir=RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR)
|
||||
|
||||
# ensure we did not get the mine_name mine function for both minions
|
||||
for ret in [sub_ret, min_ret]:
|
||||
assert ' - one' not in ret
|
||||
|
||||
def test_send(self):
|
||||
'''
|
||||
test mine.send
|
||||
|
|
|
@ -2268,3 +2268,26 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertEqual(state_run[state_id]['comment'],
|
||||
'Success!')
|
||||
self.assertTrue(state_run[state_id]['result'])
|
||||
|
||||
def test_issue_56131(self):
|
||||
module_path = os.path.join(RUNTIME_VARS.CODE_DIR, 'pip.py')
|
||||
if six.PY3:
|
||||
modulec_path = os.path.join(RUNTIME_VARS.CODE_DIR, '__pycache__', 'pip.pyc')
|
||||
else:
|
||||
modulec_path = os.path.join(RUNTIME_VARS.CODE_DIR, 'pip.pyc')
|
||||
unzip_path = os.path.join(RUNTIME_VARS.TMP, 'issue-56131.txt')
|
||||
|
||||
def clean_paths(paths):
|
||||
for path in paths:
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
log.warn("Path not found: %s", path)
|
||||
|
||||
with salt.utils.files.fopen(module_path, 'w') as fp:
|
||||
fp.write('raise ImportError("No module named pip")')
|
||||
self.addCleanup(clean_paths, [unzip_path, module_path, modulec_path])
|
||||
assert not os.path.exists(unzip_path)
|
||||
state_run = self.run_function('state.sls', mods='issue-56131', pillar={'unzip_to': RUNTIME_VARS.TMP}, timeout=30)
|
||||
assert state_run is not False
|
||||
assert os.path.exists(unzip_path)
|
||||
|
|
|
@ -506,6 +506,22 @@ class WinLgpoTest(ModuleCase):
|
|||
'Not Configured',
|
||||
[r'; Source file: c:\\windows\\system32\\grouppolicy\\machine\\registry.pol[\s]*; PARSING COMPLETED.'])
|
||||
|
||||
@destructiveTest
|
||||
def test_set_computer_policy_GuestAccountStatus(self):
|
||||
'''
|
||||
Test setting/unsetting/changing GuestAccountStatus
|
||||
'''
|
||||
# disable GuestAccountStatus
|
||||
self._testSeceditPolicy(
|
||||
'GuestAccountStatus',
|
||||
'Disabled',
|
||||
[r'^EnableGuestAccount = 0'])
|
||||
# enable GuestAccountStatus
|
||||
self._testSeceditPolicy(
|
||||
'GuestAccountStatus',
|
||||
'Enabled',
|
||||
[r'^EnableGuestAccount = 1'])
|
||||
|
||||
@destructiveTest
|
||||
def test_set_computer_policy_PasswordComplexity(self):
|
||||
'''
|
||||
|
|
|
@ -9,7 +9,12 @@ import os
|
|||
import random
|
||||
import time
|
||||
|
||||
import dateutil.parser as dateutil_parser
|
||||
try:
|
||||
import dateutil.parser as dateutil_parser
|
||||
HAS_DATEUTIL_PARSER = True
|
||||
except ImportError:
|
||||
HAS_DATEUTIL_PARSER = False
|
||||
|
||||
import datetime
|
||||
|
||||
# Import Salt Testing libs
|
||||
|
@ -43,6 +48,7 @@ DEFAULT_CONFIG['pki_dir'] = os.path.join(ROOT_DIR, 'pki')
|
|||
DEFAULT_CONFIG['cachedir'] = os.path.join(ROOT_DIR, 'cache')
|
||||
|
||||
|
||||
@skipIf(HAS_DATEUTIL_PARSER is False, 'The \'dateutil.parser\' library is not available')
|
||||
class SchedulerEvalTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
Validate the pkg module
|
||||
|
@ -920,6 +926,7 @@ class SchedulerEvalTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
}
|
||||
run_time1 = dateutil_parser.parse('11/29/2017 4:00pm')
|
||||
run_time2 = run_time1 + datetime.timedelta(seconds=splay)
|
||||
run_time3 = run_time2 + datetime.timedelta(seconds=1)
|
||||
|
||||
# Add the job to the scheduler
|
||||
self.schedule.opts.update(job)
|
||||
|
@ -940,6 +947,13 @@ class SchedulerEvalTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
ret = self.schedule.job_status(job_name)
|
||||
self.assertEqual(ret['_last_run'], run_time2)
|
||||
|
||||
# Evaluate at expected runtime3, should not run
|
||||
# _next_fire_time should be None
|
||||
self.schedule.eval(now=run_time3)
|
||||
ret = self.schedule.job_status(job_name)
|
||||
self.assertEqual(ret['_last_run'], run_time2)
|
||||
self.assertEqual(ret['_next_fire_time'], None)
|
||||
|
||||
def test_eval_when_splay_in_past(self):
|
||||
'''
|
||||
verify that scheduled job runs
|
||||
|
|
|
@ -405,8 +405,12 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
pprint.pformat(ret)
|
||||
)
|
||||
)
|
||||
import salt.modules.virtualenv_mod
|
||||
msg = 'New python executable'
|
||||
if salt.modules.virtualenv_mod.virtualenv_ver(venv_dir) >= (20, 0, 2):
|
||||
msg = 'created virtual environment'
|
||||
self.assertIn(
|
||||
'New python executable',
|
||||
msg,
|
||||
ret['stdout'],
|
||||
msg='Expected STDOUT did not match. Full return dictionary:\n{}'.format(
|
||||
pprint.pformat(ret)
|
||||
|
|
|
@ -81,9 +81,10 @@ class VirtualenvTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
' - requirements: salt://issue-2594-requirements.txt',
|
||||
]
|
||||
|
||||
reqs = ['pep8==1.3.3', 'zope.interface==4.7.1']
|
||||
# Let's populate the requirements file, just pep-8 for now
|
||||
with salt.utils.files.fopen(requirements_file_path, 'a') as fhw:
|
||||
fhw.write('pep8==1.3.3\n')
|
||||
fhw.write(reqs[0] + '\n')
|
||||
|
||||
# Let's run our state!!!
|
||||
try:
|
||||
|
@ -94,7 +95,7 @@ class VirtualenvTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertSaltTrueReturn(ret)
|
||||
self.assertInSaltComment('Created new virtualenv', ret)
|
||||
self.assertSaltStateChangesEqual(
|
||||
ret, ['pep8==1.3.3'], keys=('packages', 'new')
|
||||
ret, [reqs[0]], keys=('packages', 'new')
|
||||
)
|
||||
except AssertionError:
|
||||
# Always clean up the tests temp files
|
||||
|
@ -106,12 +107,12 @@ class VirtualenvTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
|
||||
# Let's make sure, it really got installed
|
||||
ret = self.run_function('pip.freeze', bin_env=venv_path)
|
||||
self.assertIn('pep8==1.3.3', ret)
|
||||
self.assertNotIn('zope.interface==4.0.1', ret)
|
||||
self.assertIn(reqs[0], ret)
|
||||
self.assertNotIn(reqs[1], ret)
|
||||
|
||||
# Now let's update the requirements file, which is now cached.
|
||||
with salt.utils.files.fopen(requirements_file_path, 'w') as fhw:
|
||||
fhw.write('zope.interface==4.0.1\n')
|
||||
fhw.write(reqs[1] + '\n')
|
||||
|
||||
# Let's run our state!!!
|
||||
try:
|
||||
|
@ -122,7 +123,7 @@ class VirtualenvTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertSaltTrueReturn(ret)
|
||||
self.assertInSaltComment('virtualenv exists', ret)
|
||||
self.assertSaltStateChangesEqual(
|
||||
ret, ['zope.interface==4.0.1'], keys=('packages', 'new')
|
||||
ret, [reqs[1]], keys=('packages', 'new')
|
||||
)
|
||||
except AssertionError:
|
||||
# Always clean up the tests temp files
|
||||
|
@ -134,8 +135,8 @@ class VirtualenvTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
|
||||
# Let's make sure, it really got installed
|
||||
ret = self.run_function('pip.freeze', bin_env=venv_path)
|
||||
self.assertIn('pep8==1.3.3', ret)
|
||||
self.assertIn('zope.interface==4.0.1', ret)
|
||||
self.assertIn(reqs[0], ret)
|
||||
self.assertIn(reqs[1], ret)
|
||||
|
||||
# If we reached this point no assertion failed, so, cleanup!
|
||||
if os.path.exists(venv_path):
|
||||
|
|
|
@ -175,9 +175,12 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixin
|
|||
arg_str = '--config-dir {0} {1}'.format(self.config_dir, arg_str)
|
||||
return self.run_script('salt-cp', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr)
|
||||
|
||||
def run_call(self, arg_str, with_retcode=False, catch_stderr=False, local=False, timeout=15):
|
||||
def run_call(self, arg_str, with_retcode=False, catch_stderr=False,
|
||||
local=False, timeout=15, config_dir=None):
|
||||
if not config_dir:
|
||||
config_dir = self.config_dir
|
||||
arg_str = '{0} --config-dir {1} {2}'.format('--local' if local else '',
|
||||
self.config_dir, arg_str)
|
||||
config_dir, arg_str)
|
||||
|
||||
return self.run_script('salt-call',
|
||||
arg_str,
|
||||
|
@ -582,12 +585,14 @@ class ShellCase(ShellTestCase, AdaptedConfigurationTestCaseMixin, ScriptPathMixi
|
|||
timeout=timeout)
|
||||
|
||||
def run_call(self, arg_str, with_retcode=False, catch_stderr=False, # pylint: disable=W0221
|
||||
local=False, timeout=RUN_TIMEOUT):
|
||||
local=False, timeout=RUN_TIMEOUT, config_dir=None):
|
||||
'''
|
||||
Execute salt-call.
|
||||
'''
|
||||
if not config_dir:
|
||||
config_dir = self.config_dir
|
||||
arg_str = '{0} --config-dir {1} {2}'.format('--local' if local else '',
|
||||
self.config_dir, arg_str)
|
||||
config_dir, arg_str)
|
||||
ret = self.run_script('salt-call',
|
||||
arg_str,
|
||||
with_retcode=with_retcode,
|
||||
|
@ -772,8 +777,6 @@ class ModuleCase(TestCase, SaltClientTestCaseMixin):
|
|||
'ssh.recv_known_host_entries',
|
||||
'time.sleep'
|
||||
)
|
||||
if minion_tgt == 'sub_minion':
|
||||
known_to_return_none += ('mine.update',)
|
||||
if 'f_arg' in kwargs:
|
||||
kwargs['arg'] = kwargs.pop('f_arg')
|
||||
if 'f_timeout' in kwargs:
|
||||
|
|
|
@ -1485,3 +1485,19 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
self.assertIn('osfullname', os_grains)
|
||||
self.assertEqual(os_grains.get('osfullname'), 'FreeBSD')
|
||||
|
||||
def test_saltversioninfo(self):
|
||||
'''
|
||||
test saltversioninfo core grain.
|
||||
'''
|
||||
ret = core.saltversioninfo()
|
||||
info = ret['saltversioninfo']
|
||||
assert isinstance(ret, dict)
|
||||
assert isinstance(info, list)
|
||||
try:
|
||||
assert len(info) == 1
|
||||
except AssertionError:
|
||||
# We have a minor version we need to test
|
||||
assert len(info) == 2
|
||||
assert all([x is not None for x in info])
|
||||
assert all([isinstance(x, int) for x in info])
|
||||
|
|
|
@ -43,6 +43,9 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
Test cases for salt.modules.mine
|
||||
'''
|
||||
def setUp(self):
|
||||
self.kernel_ret = 'Linux!'
|
||||
self.foo_ret = 'baz'
|
||||
self.ip_ret = '2001:db8::1:3'
|
||||
self.cache = FakeCache()
|
||||
|
||||
def setup_loader_modules(self):
|
||||
|
@ -94,15 +97,16 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Tests sending an item to the mine in the minion's local cache,
|
||||
and then immediately fetching it again (since tests are executed unordered).
|
||||
Also verify that the stored mine cache has the correct structure (with ACL).
|
||||
Also verify that the stored mine cache does not use ACL data structure
|
||||
without allow_tgt passed.
|
||||
'''
|
||||
with patch.dict(mine.__opts__, {
|
||||
'file_client': 'local',
|
||||
'id': 'webserver',
|
||||
}), \
|
||||
patch.dict(mine.__salt__, {
|
||||
'network.ip_addrs': MagicMock(return_value='2001:db8::1:3'),
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'network.ip_addrs': MagicMock(return_value=self.ip_ret),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}):
|
||||
ret = mine.send('ip_addr', mine_function='network.ip_addrs')
|
||||
mine.send('foo.bar')
|
||||
|
@ -110,14 +114,8 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assertEqual(
|
||||
self.cache.fetch('minions/webserver', 'mine_cache'),
|
||||
{
|
||||
'ip_addr': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'ip_addr': self.ip_ret,
|
||||
'foo.bar': self.foo_ret,
|
||||
}
|
||||
)
|
||||
with patch.dict(mine.__opts__, {
|
||||
|
@ -128,9 +126,9 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
ret_single_dict = mine.get('*', ['ip_addr'])
|
||||
ret_multi = mine.get('*', 'ip_addr,foo.bar')
|
||||
ret_multi2 = mine.get('*', ['ip_addr', 'foo.bar'])
|
||||
self.assertEqual(ret_single, {'webserver': '2001:db8::1:3'})
|
||||
self.assertEqual(ret_single_dict, {'ip_addr': {'webserver': '2001:db8::1:3'}})
|
||||
self.assertEqual(ret_multi, {'ip_addr': {'webserver': '2001:db8::1:3'}, 'foo.bar': {'webserver': 'baz'}})
|
||||
self.assertEqual(ret_single, {'webserver': self.ip_ret})
|
||||
self.assertEqual(ret_single_dict, {'ip_addr': {'webserver': self.ip_ret}})
|
||||
self.assertEqual(ret_multi, {'ip_addr': {'webserver': self.ip_ret}, 'foo.bar': {'webserver': self.foo_ret}})
|
||||
self.assertEqual(ret_multi, ret_multi2)
|
||||
|
||||
def test_send_get_acl_local(self):
|
||||
|
@ -138,15 +136,16 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
Tests sending an item to the mine in the minion's local cache,
|
||||
including ACL information (useless when only working locally, but hey),
|
||||
and then immediately fetching it again (since tests are executed unordered).
|
||||
Also verify that the stored mine cache has the correct structure (with ACL).
|
||||
Also verify that the stored mine cache has the correct structure (with ACL)
|
||||
when using allow_tgt and no ACL without allow_tgt.
|
||||
'''
|
||||
with patch.dict(mine.__opts__, {
|
||||
'file_client': 'local',
|
||||
'id': 'webserver',
|
||||
}), \
|
||||
patch.dict(mine.__salt__, {
|
||||
'network.ip_addrs': MagicMock(return_value='2001:db8::1:3'),
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'network.ip_addrs': MagicMock(return_value=self.ip_ret),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}):
|
||||
ret = mine.send('ip_addr', mine_function='network.ip_addrs', allow_tgt='web*', allow_tgt_type='glob')
|
||||
mine.send('foo.bar')
|
||||
|
@ -155,15 +154,12 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.cache.fetch('minions/webserver', 'mine_cache'),
|
||||
{
|
||||
'ip_addr': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: self.ip_ret,
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
'allow_tgt': 'web*',
|
||||
'allow_tgt_type': 'glob',
|
||||
},
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'foo.bar': self.foo_ret,
|
||||
}
|
||||
)
|
||||
with patch.dict(mine.__opts__, {
|
||||
|
@ -171,7 +167,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'id': 'webserver',
|
||||
}):
|
||||
ret_single = mine.get('*', 'ip_addr')
|
||||
self.assertEqual(ret_single, {'webserver': '2001:db8::1:3'})
|
||||
self.assertEqual(ret_single, {'webserver': self.ip_ret})
|
||||
|
||||
def test_send_master(self):
|
||||
'''
|
||||
|
@ -180,7 +176,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
with patch.object(mine, '_mine_send', MagicMock(side_effect=lambda x, y: x)),\
|
||||
patch.dict(mine.__salt__, {
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}), \
|
||||
patch.dict(mine.__opts__, {
|
||||
'file_client': 'remote',
|
||||
|
@ -192,12 +188,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
{
|
||||
'id': 'foo',
|
||||
'cmd': '_mine',
|
||||
'data': {
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
},
|
||||
'data': {'foo.bar': self.foo_ret},
|
||||
'clear': False,
|
||||
}
|
||||
)
|
||||
|
@ -209,7 +200,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
with patch.object(mine, '_mine_send', MagicMock(side_effect=lambda x, y: x)),\
|
||||
patch.dict(mine.__salt__, {
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}), \
|
||||
patch.dict(mine.__opts__, {
|
||||
'file_client': 'remote',
|
||||
|
@ -223,7 +214,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'cmd': '_mine',
|
||||
'data': {
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: self.foo_ret,
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
'allow_tgt': 'roles:web',
|
||||
'allow_tgt_type': 'grains',
|
||||
|
@ -239,7 +230,7 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
mock_load = {
|
||||
'tgt_type': 'qux',
|
||||
'tgt': 'baz',
|
||||
'tgt': self.foo_ret,
|
||||
'cmd': '_mine_get',
|
||||
'fun': 'foo.bar',
|
||||
'id': 'foo'
|
||||
|
@ -292,9 +283,9 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
}), \
|
||||
patch.dict(mine.__salt__, {
|
||||
'config.merge': MagicMock(return_value=config_mine_functions),
|
||||
'grains.get': lambda x: 'Linux!',
|
||||
'network.ip_addrs': MagicMock(return_value='2001:db8::1:3'),
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'grains.get': lambda x: self.kernel_ret,
|
||||
'network.ip_addrs': MagicMock(return_value=self.ip_ret),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}):
|
||||
ret = mine.update()
|
||||
self.assertEqual(ret, 'FakeCache:StoreSuccess!')
|
||||
|
@ -302,22 +293,16 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assertEqual(
|
||||
self.cache.fetch('minions/webserver', 'mine_cache'),
|
||||
{
|
||||
'ip_addr': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'network.ip_addrs': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'ip_addr': self.ip_ret,
|
||||
'network.ip_addrs': self.ip_ret,
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: self.foo_ret,
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
'allow_tgt': 'G@roles:webserver',
|
||||
'allow_tgt_type': 'compound',
|
||||
},
|
||||
'kernel': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'Linux!',
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: self.kernel_ret,
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
'allow_tgt': 'web*',
|
||||
},
|
||||
|
@ -343,8 +328,8 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
patch.dict(mine.__salt__, {
|
||||
'config.merge': MagicMock(return_value={}),
|
||||
'grains.get': lambda x: 'Linux!!',
|
||||
'network.ip_addrs': MagicMock(return_value='2001:db8::1:4'),
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'network.ip_addrs': MagicMock(return_value=self.ip_ret),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}):
|
||||
ret = mine.update(mine_functions=manual_mine_functions)
|
||||
self.assertEqual(ret, 'FakeCache:StoreSuccess!')
|
||||
|
@ -352,16 +337,10 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assertEqual(
|
||||
self.cache.fetch('minions/webserver', 'mine_cache'),
|
||||
{
|
||||
'ip_addr': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:4',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'network.ip_addrs': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:4',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'ip_addr': self.ip_ret,
|
||||
'network.ip_addrs': self.ip_ret,
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: self.foo_ret,
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
'allow_tgt': 'G@roles:webserver',
|
||||
'allow_tgt_type': 'compound',
|
||||
|
@ -388,22 +367,10 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'id': 'webserver',
|
||||
'cmd': '_mine',
|
||||
'data': {
|
||||
'ip_addr': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'network.ip_addrs': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: '2001:db8::1:3',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'foo.bar': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'baz',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'kernel': {
|
||||
salt.utils.mine.MINE_ITEM_ACL_DATA: 'Linux!',
|
||||
salt.utils.mine.MINE_ITEM_ACL_ID: salt.utils.mine.MINE_ITEM_ACL_VERSION,
|
||||
},
|
||||
'ip_addr': self.ip_ret,
|
||||
'network.ip_addrs': self.ip_ret,
|
||||
'foo.bar': self.foo_ret,
|
||||
'kernel': self.kernel_ret,
|
||||
},
|
||||
'clear': False,
|
||||
}
|
||||
|
@ -415,9 +382,9 @@ class MineTestCase(TestCase, LoaderModuleMockMixin):
|
|||
}), \
|
||||
patch.dict(mine.__salt__, {
|
||||
'config.merge': MagicMock(return_value=config_mine_functions),
|
||||
'grains.get': lambda x: 'Linux!',
|
||||
'network.ip_addrs': MagicMock(return_value='2001:db8::1:3'),
|
||||
'foo.bar': MagicMock(return_value='baz'),
|
||||
'grains.get': lambda x: self.kernel_ret,
|
||||
'network.ip_addrs': MagicMock(return_value=self.ip_ret),
|
||||
'foo.bar': MagicMock(return_value=self.foo_ret),
|
||||
}):
|
||||
# Verify the correct load
|
||||
self.assertEqual(
|
||||
|
|
|
@ -127,23 +127,28 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
|||
)
|
||||
|
||||
with patch.object(mysql, 'version', return_value='8.0.11'):
|
||||
self._test_call(mysql.user_exists,
|
||||
{'sql': ('SELECT User,Host FROM mysql.user WHERE '
|
||||
'User = %(user)s AND Host = %(host)s'),
|
||||
'sql_args': {'host': '%',
|
||||
'user': 'mytestuser'
|
||||
}
|
||||
},
|
||||
user='mytestuser',
|
||||
host='%',
|
||||
password='BLUECOW'
|
||||
)
|
||||
with patch.object(mysql, '__get_auth_plugin', MagicMock(return_value='mysql_native_password')):
|
||||
self._test_call(mysql.user_exists,
|
||||
{'sql': ('SELECT User,Host FROM mysql.user WHERE '
|
||||
'User = %(user)s AND Host = %(host)s AND '
|
||||
'Password = %(password)s'),
|
||||
'sql_args': {'host': '%',
|
||||
'password': '*1A01CF8FBE6425398935FB90359AD8B817399102',
|
||||
'user': 'mytestuser'
|
||||
}
|
||||
},
|
||||
user='mytestuser',
|
||||
host='%',
|
||||
password='BLUECOW'
|
||||
)
|
||||
|
||||
with patch.object(mysql, 'version', return_value='10.2.21-MariaDB'):
|
||||
self._test_call(mysql.user_exists,
|
||||
{'sql': ('SELECT User,Host FROM mysql.user WHERE '
|
||||
'User = %(user)s AND Host = %(host)s'),
|
||||
'User = %(user)s AND Host = %(host)s AND '
|
||||
'Password = PASSWORD(%(password)s)'),
|
||||
'sql_args': {'host': 'localhost',
|
||||
'password': 'BLUECOW',
|
||||
'user': 'mytestuser'
|
||||
}
|
||||
},
|
||||
|
@ -175,16 +180,59 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Test the creation of a MySQL user in mysql exec module
|
||||
'''
|
||||
self._test_call(mysql.user_create,
|
||||
{'sql': 'CREATE USER %(user)s@%(host)s IDENTIFIED BY %(password)s',
|
||||
'sql_args': {'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser',
|
||||
password='BLUECOW'
|
||||
)
|
||||
with patch.object(mysql, 'version', return_value='8.0.10'):
|
||||
with patch.object(mysql, '__get_auth_plugin', MagicMock(return_value='mysql_native_password')):
|
||||
self._test_call(mysql.user_create,
|
||||
{'sql': 'CREATE USER %(user)s@%(host)s IDENTIFIED BY %(password)s',
|
||||
'sql_args': {'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser',
|
||||
password='BLUECOW'
|
||||
)
|
||||
|
||||
with patch.object(mysql, 'version', return_value='8.0.11'):
|
||||
with patch.object(mysql, '__get_auth_plugin', MagicMock(return_value='mysql_native_password')):
|
||||
self._test_call(mysql.user_create,
|
||||
{'sql': 'CREATE USER %(user)s@%(host)s IDENTIFIED WITH %(auth_plugin)s BY %(password)s',
|
||||
'sql_args': {'password': 'BLUECOW',
|
||||
'auth_plugin': 'mysql_native_password',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser',
|
||||
password='BLUECOW'
|
||||
)
|
||||
|
||||
# Test creating a user with passwordless=True and unix_socket=True
|
||||
with patch.object(mysql, 'version', return_value='8.0.10'):
|
||||
with patch.object(mysql, 'plugin_status', MagicMock(return_value='ACTIVE')):
|
||||
self._test_call(mysql.user_create,
|
||||
{'sql': 'CREATE USER %(user)s@%(host)s IDENTIFIED WITH auth_socket',
|
||||
'sql_args': {'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser',
|
||||
allow_passwordless=True,
|
||||
unix_socket=True,
|
||||
)
|
||||
|
||||
with patch.object(mysql, 'version', return_value='10.2.21-MariaDB'):
|
||||
with patch.object(mysql, 'plugin_status', MagicMock(return_value='ACTIVE')):
|
||||
self._test_call(mysql.user_create,
|
||||
{'sql': 'CREATE USER %(user)s@%(host)s IDENTIFIED VIA unix_socket',
|
||||
'sql_args': {'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser',
|
||||
allow_passwordless=True,
|
||||
unix_socket=True,
|
||||
)
|
||||
|
||||
def test_user_chpass(self):
|
||||
'''
|
||||
|
@ -193,49 +241,52 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
|||
connect_mock = MagicMock()
|
||||
with patch.object(mysql, '_connect', connect_mock):
|
||||
with patch.object(mysql, 'version', return_value='8.0.10'):
|
||||
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
|
||||
mysql.user_chpass('testuser', password='BLUECOW')
|
||||
calls = (
|
||||
call().cursor().execute(
|
||||
'UPDATE mysql.user SET Password=PASSWORD(%(password)s) WHERE User=%(user)s AND Host = %(host)s;',
|
||||
{'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
),
|
||||
call().cursor().execute('FLUSH PRIVILEGES;'),
|
||||
)
|
||||
connect_mock.assert_has_calls(calls, any_order=True)
|
||||
with patch.object(mysql, 'user_exists', MagicMock(return_value=True)):
|
||||
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
|
||||
mysql.user_chpass('testuser', password='BLUECOW')
|
||||
calls = (
|
||||
call().cursor().execute(
|
||||
'UPDATE mysql.user SET Password=PASSWORD(%(password)s) WHERE User=%(user)s AND Host = %(host)s;',
|
||||
{'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
),
|
||||
call().cursor().execute('FLUSH PRIVILEGES;'),
|
||||
)
|
||||
connect_mock.assert_has_calls(calls, any_order=True)
|
||||
|
||||
connect_mock = MagicMock()
|
||||
with patch.object(mysql, '_connect', connect_mock):
|
||||
with patch.object(mysql, 'version', return_value='8.0.11'):
|
||||
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
|
||||
mysql.user_chpass('testuser', password='BLUECOW')
|
||||
calls = (
|
||||
call().cursor().execute(
|
||||
"ALTER USER %(user)s@%(host)s IDENTIFIED BY %(password)s;",
|
||||
{'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
),
|
||||
call().cursor().execute('FLUSH PRIVILEGES;'),
|
||||
)
|
||||
connect_mock.assert_has_calls(calls, any_order=True)
|
||||
with patch.object(mysql, 'user_exists', MagicMock(return_value=True)):
|
||||
with patch.dict(mysql.__salt__, {'config.option': MagicMock()}):
|
||||
mysql.user_chpass('testuser', password='BLUECOW')
|
||||
calls = (
|
||||
call().cursor().execute(
|
||||
"ALTER USER %(user)s@%(host)s IDENTIFIED BY %(password)s;",
|
||||
{'password': 'BLUECOW',
|
||||
'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
),
|
||||
call().cursor().execute('FLUSH PRIVILEGES;'),
|
||||
)
|
||||
connect_mock.assert_has_calls(calls, any_order=True)
|
||||
|
||||
def test_user_remove(self):
|
||||
'''
|
||||
Test the removal of a MySQL user in mysql exec module
|
||||
'''
|
||||
self._test_call(mysql.user_remove,
|
||||
{'sql': 'DROP USER %(user)s@%(host)s',
|
||||
'sql_args': {'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser'
|
||||
)
|
||||
with patch.object(mysql, 'user_exists', MagicMock(return_value=True)):
|
||||
self._test_call(mysql.user_remove,
|
||||
{'sql': 'DROP USER %(user)s@%(host)s',
|
||||
'sql_args': {'user': 'testuser',
|
||||
'host': 'localhost',
|
||||
}
|
||||
},
|
||||
'testuser'
|
||||
)
|
||||
|
||||
def test_db_check(self):
|
||||
'''
|
||||
|
@ -458,6 +509,36 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
|||
expected = 'MySQL Error 9999: Something Went Wrong'
|
||||
self.assertEqual(mysql.__context__['mysql.error'], expected)
|
||||
|
||||
def test_plugin_add(self):
|
||||
'''
|
||||
Test the adding/installing a MySQL / MariaDB plugin
|
||||
'''
|
||||
with patch.object(mysql, 'plugin_status', MagicMock(return_value='')):
|
||||
self._test_call(mysql.plugin_add,
|
||||
'INSTALL PLUGIN auth_socket SONAME "auth_socket.so"',
|
||||
'auth_socket',
|
||||
)
|
||||
|
||||
def test_plugin_remove(self):
|
||||
'''
|
||||
Test the removing/uninstalling a MySQL / MariaDB plugin
|
||||
'''
|
||||
with patch.object(mysql, 'plugin_status', MagicMock(return_value='ACTIVE')):
|
||||
self._test_call(mysql.plugin_remove,
|
||||
'UNINSTALL PLUGIN auth_socket',
|
||||
'auth_socket',
|
||||
)
|
||||
|
||||
def test_plugin_status(self):
|
||||
'''
|
||||
Test checking the status of a MySQL / MariaDB plugin
|
||||
'''
|
||||
self._test_call(mysql.plugin_status,
|
||||
{'sql': 'SELECT PLUGIN_STATUS FROM INFORMATION_SCHEMA.PLUGINS WHERE PLUGIN_NAME = %(name)s',
|
||||
'sql_args': {'name': 'auth_socket'}
|
||||
},
|
||||
'auth_socket')
|
||||
|
||||
def _test_call(self, function, expected_sql, *args, **kwargs):
|
||||
connect_mock = MagicMock()
|
||||
with patch.object(mysql, '_connect', connect_mock):
|
||||
|
|
|
@ -9,6 +9,10 @@ import salt.modules.pdbedit as pdbedit
|
|||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
)
|
||||
|
||||
|
||||
class PdbeditTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
@ -19,6 +23,44 @@ class PdbeditTestCase(TestCase, LoaderModuleMockMixin):
|
|||
def setup_loader_modules(self):
|
||||
return {pdbedit: {}}
|
||||
|
||||
def test_version(self):
|
||||
'''
|
||||
Test salt.modules.__virtual__'s handling of pdbedit versions
|
||||
'''
|
||||
mock_bad_ver = MagicMock(return_value='Ver 1.1a')
|
||||
mock_old_ver = MagicMock(return_value='Version 1.0.0')
|
||||
mock_exa_ver = MagicMock(return_value='Version 4.8.0')
|
||||
mock_new_ver = MagicMock(return_value='Version 4.9.2')
|
||||
|
||||
# NOTE: no pdbedit installed
|
||||
with patch('salt.utils.path.which', MagicMock(return_value=None)):
|
||||
ret = pdbedit.__virtual__()
|
||||
self.assertEqual(ret, (False, 'pdbedit command is not available'))
|
||||
|
||||
# NOTE: pdbedit is not returning a valid version
|
||||
with patch('salt.utils.path.which', MagicMock(return_value='/opt/local/bin/pdbedit')), \
|
||||
patch('salt.modules.cmdmod.run', mock_bad_ver):
|
||||
ret = pdbedit.__virtual__()
|
||||
self.assertEqual(ret, (False, 'pdbedit -V returned an unknown version format'))
|
||||
|
||||
# NOTE: pdbedit is too old
|
||||
with patch('salt.utils.path.which', MagicMock(return_value='/opt/local/bin/pdbedit')), \
|
||||
patch('salt.modules.cmdmod.run', mock_old_ver):
|
||||
ret = pdbedit.__virtual__()
|
||||
self.assertEqual(ret, (False, 'pdbedit is to old, 4.8.0 or newer is required'))
|
||||
|
||||
# NOTE: pdbedit is exactly 4.8.0
|
||||
with patch('salt.utils.path.which', MagicMock(return_value='/opt/local/bin/pdbedit')), \
|
||||
patch('salt.modules.cmdmod.run', mock_exa_ver):
|
||||
ret = pdbedit.__virtual__()
|
||||
self.assertEqual(ret, 'pdbedit')
|
||||
|
||||
# NOTE: pdbedit is newer than 4.8.0
|
||||
with patch('salt.utils.path.which', MagicMock(return_value='/opt/local/bin/pdbedit')), \
|
||||
patch('salt.modules.cmdmod.run', mock_new_ver):
|
||||
ret = pdbedit.__virtual__()
|
||||
self.assertEqual(ret, 'pdbedit')
|
||||
|
||||
def test_generate_nt_hash(self):
|
||||
'''
|
||||
Test salt.modules.pdbedit.generate_nt_hash
|
||||
|
|
|
@ -14,12 +14,29 @@ from tests.support.mock import MagicMock, patch
|
|||
import salt.utils.platform
|
||||
import salt.modules.pip as pip
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import salt.utils.platform
|
||||
|
||||
|
||||
class PipTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
return {pip: {'__salt__': {'cmd.which_bin': lambda _: 'pip'}}}
|
||||
|
||||
def test__pip_bin_env(self):
|
||||
ret = pip._pip_bin_env(None, 'C:/Users/ch44d/Documents/salt/tests/pip.exe')
|
||||
if salt.utils.platform.is_windows():
|
||||
self.assertEqual(ret, 'C:/Users/ch44d/Documents/salt/tests')
|
||||
else:
|
||||
self.assertEqual(ret, None)
|
||||
|
||||
def test__pip_bin_env_no_change(self):
|
||||
cwd = 'C:/Users/ch44d/Desktop'
|
||||
ret = pip._pip_bin_env(cwd, 'C:/Users/ch44d/Documents/salt/tests/pip.exe')
|
||||
self.assertEqual(ret, cwd)
|
||||
|
||||
def test__pip_bin_env_no_bin_env(self):
|
||||
ret = pip._pip_bin_env(None, None)
|
||||
self.assertEqual(None, None)
|
||||
|
||||
def test_fix4361(self):
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):
|
||||
|
|
|
@ -350,3 +350,31 @@ class VirtualenvTestCase(TestCase, LoaderModuleMockMixin):
|
|||
runas=None,
|
||||
python_shell=False
|
||||
)
|
||||
|
||||
def test_virtualenv_ver(self):
|
||||
'''
|
||||
test virtualenv_ver when there is no ImportError
|
||||
'''
|
||||
ret = virtualenv_mod.virtualenv_ver(venv_bin='pyvenv')
|
||||
assert ret == (1, 9, 1)
|
||||
|
||||
def test_virtualenv_ver_importerror(self):
|
||||
'''
|
||||
test virtualenv_ver when there is an ImportError
|
||||
'''
|
||||
with ForceImportErrorOn('virtualenv'):
|
||||
mock_ver = MagicMock(return_value={'retcode': 0, 'stdout': '1.9.1'})
|
||||
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock_ver}):
|
||||
ret = virtualenv_mod.virtualenv_ver(venv_bin='pyenv')
|
||||
assert ret == (1, 9, 1)
|
||||
|
||||
def test_virtualenv_ver_importerror_cmd_error(self):
|
||||
'''
|
||||
test virtualenv_ver when there is an ImportError
|
||||
and virtualenv --version does not return anything
|
||||
'''
|
||||
with ForceImportErrorOn('virtualenv'):
|
||||
mock_ver = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(virtualenv_mod.__salt__, {'cmd.run_all': mock_ver}):
|
||||
with self.assertRaises(CommandExecutionError):
|
||||
virtualenv_mod.virtualenv_ver(venv_bin='pyenv')
|
||||
|
|
|
@ -10,11 +10,7 @@ import types
|
|||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
Mock,
|
||||
)
|
||||
from tests.support.mock import MagicMock, patch, Mock
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.win_dns_client as win_dns_client
|
||||
|
@ -68,7 +64,6 @@ class WinDnsClientTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
Test cases for salt.modules.win_dns_client
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
# wmi and pythoncom modules are platform specific...
|
||||
mock_pythoncom = types.ModuleType(
|
||||
|
@ -89,8 +84,7 @@ class WinDnsClientTestCase(TestCase, LoaderModuleMockMixin):
|
|||
Test if it return a list of the configured DNS servers
|
||||
of the specified interface.
|
||||
'''
|
||||
with patch('salt.utils', Mockwinapi), \
|
||||
patch('salt.utils.winapi.Com', MagicMock()), \
|
||||
with patch('salt.utils.winapi.Com', MagicMock()), \
|
||||
patch.object(self.WMI, 'Win32_NetworkAdapter',
|
||||
return_value=[Mockwmi()]), \
|
||||
patch.object(self.WMI, 'Win32_NetworkAdapterConfiguration',
|
||||
|
@ -159,3 +153,22 @@ class WinDnsClientTestCase(TestCase, LoaderModuleMockMixin):
|
|||
return_value=[Mockwmi()]), \
|
||||
patch.object(wmi, 'WMI', Mock(return_value=self.WMI)):
|
||||
self.assertTrue(win_dns_client.get_dns_config())
|
||||
|
||||
@patch('salt.utils.platform.is_windows')
|
||||
def test___virtual__non_windows(self, mock):
|
||||
mock.return_value = False
|
||||
result = win_dns_client.__virtual__()
|
||||
expected = (False, 'Module win_dns_client: module only works on '
|
||||
'Windows systems')
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
@patch.object(win_dns_client, 'HAS_LIBS', False)
|
||||
def test___virtual__missing_libs(self):
|
||||
result = win_dns_client.__virtual__()
|
||||
expected = (False, 'Module win_dns_client: missing required libraries')
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test___virtual__(self):
|
||||
result = win_dns_client.__virtual__()
|
||||
expected = 'win_dns_client'
|
||||
self.assertEqual(result, expected)
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import glob
|
||||
import os
|
||||
|
||||
# Import Salt Testing Libs
|
||||
|
@ -15,31 +16,28 @@ from tests.support.unit import TestCase, skipIf
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.config
|
||||
import salt.modules.cmdmod
|
||||
import salt.modules.file
|
||||
import salt.modules.win_file as win_file
|
||||
import salt.loader
|
||||
import salt.modules.win_lgpo as win_lgpo
|
||||
import salt.states.win_lgpo
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_dacl
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
import salt.utils.win_reg
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import 3rd Party Libs
|
||||
import salt.ext.six as six
|
||||
|
||||
# We're going to actually use the loader, without grains (slow)
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts)
|
||||
modules = salt.loader.minion_mods(opts, utils=utils)
|
||||
|
||||
LOADER_DICTS = {
|
||||
win_lgpo: {
|
||||
'__salt__': {
|
||||
'file.file_exists': salt.modules.file.file_exists,
|
||||
'file.makedirs': win_file.makedirs_,
|
||||
'file.write': salt.modules.file.write,
|
||||
'file.remove': win_file.remove,
|
||||
'cmd.run': salt.modules.cmdmod.run},
|
||||
'__opts__': salt.config.DEFAULT_MINION_OPTS.copy(),
|
||||
'__utils__': {
|
||||
'reg.read_value': salt.utils.win_reg.read_value,
|
||||
'auditpol.get_auditpol_dump':
|
||||
salt.utils.win_lgpo_auditpol.get_auditpol_dump}},
|
||||
win_file: {
|
||||
'__utils__': {
|
||||
'dacl.set_perms': salt.utils.win_dacl.set_perms}}}
|
||||
'__opts__': opts,
|
||||
'__salt__': modules,
|
||||
'__utils__': utils,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class WinLGPOTestCase(TestCase):
|
||||
|
@ -60,6 +58,88 @@ class WinLGPOTestCase(TestCase):
|
|||
expected = '300000 or 5 minutes (recommended)'
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test__regexSearchKeyValueCombo_enabled(self):
|
||||
'''
|
||||
Make sure
|
||||
'''
|
||||
policy_data = b'[\x00s\x00o\x00f\x00t\x00w\x00a\x00r\x00e\x00\\\x00p' \
|
||||
b'\x00o\x00l\x00i\x00c\x00i\x00e\x00s\x00\\\x00m\x00i' \
|
||||
b'\x00c\x00r\x00o\x00s\x00o\x00f\x00t\x00\\\x00w\x00i' \
|
||||
b'\x00n\x00d\x00o\x00w\x00s\x00\\\x00w\x00i\x00n\x00d' \
|
||||
b'\x00o\x00w\x00s\x00 \x00e\x00r\x00r\x00o\x00r\x00 ' \
|
||||
b'\x00r\x00e\x00p\x00o\x00r\x00t\x00i\x00n\x00g\x00\\' \
|
||||
b'\x00c\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00\x00;\x00D' \
|
||||
b'\x00e\x00f\x00a\x00u\x00l\x00t\x00C\x00o\x00n\x00s' \
|
||||
b'\x00e\x00n\x00t\x00\x00\x00;\x00\x01\x00\x00\x00;\x00' \
|
||||
b'\x04\x00\x00\x00;\x00\x02\x00\x00\x00]\x00'
|
||||
policy_regpath = b'\x00s\x00o\x00f\x00t\x00w\x00a\x00r\x00e\x00\\\x00p' \
|
||||
b'\x00o\x00l\x00i\x00c\x00i\x00e\x00s\x00\\\x00m\x00i' \
|
||||
b'\x00c\x00r\x00o\x00s\x00o\x00f\x00t\x00\\\x00w\x00i' \
|
||||
b'\x00n\x00d\x00o\x00w\x00s\x00\\\x00w\x00i\x00n\x00d' \
|
||||
b'\x00o\x00w\x00s\x00 \x00e\x00r\x00r\x00o\x00r\x00 ' \
|
||||
b'\x00r\x00e\x00p\x00o\x00r\x00t\x00i\x00n\x00g\x00\\' \
|
||||
b'\x00c\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00'
|
||||
policy_regkey = b'\x00D\x00e\x00f\x00a\x00u\x00l\x00t\x00C\x00o\x00n' \
|
||||
b'\x00s\x00e\x00n\x00t\x00\x00'
|
||||
test = win_lgpo._regexSearchKeyValueCombo(
|
||||
policy_data=policy_data,
|
||||
policy_regpath=policy_regpath,
|
||||
policy_regkey=policy_regkey
|
||||
)
|
||||
self.assertEqual(test, policy_data)
|
||||
|
||||
def test__regexSearchKeyValueCombo_not_configured(self):
|
||||
'''
|
||||
Make sure
|
||||
'''
|
||||
policy_data = b''
|
||||
policy_regpath = b'\x00s\x00o\x00f\x00t\x00w\x00a\x00r\x00e\x00\\\x00p' \
|
||||
b'\x00o\x00l\x00i\x00c\x00i\x00e\x00s\x00\\\x00m\x00i' \
|
||||
b'\x00c\x00r\x00o\x00s\x00o\x00f\x00t\x00\\\x00w\x00i' \
|
||||
b'\x00n\x00d\x00o\x00w\x00s\x00\\\x00w\x00i\x00n\x00d' \
|
||||
b'\x00o\x00w\x00s\x00 \x00e\x00r\x00r\x00o\x00r\x00 ' \
|
||||
b'\x00r\x00e\x00p\x00o\x00r\x00t\x00i\x00n\x00g\x00\\' \
|
||||
b'\x00c\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00'
|
||||
policy_regkey = b'\x00D\x00e\x00f\x00a\x00u\x00l\x00t\x00C\x00o\x00n' \
|
||||
b'\x00s\x00e\x00n\x00t\x00\x00'
|
||||
test = win_lgpo._regexSearchKeyValueCombo(
|
||||
policy_data=policy_data,
|
||||
policy_regpath=policy_regpath,
|
||||
policy_regkey=policy_regkey
|
||||
)
|
||||
self.assertIsNone(test)
|
||||
|
||||
def test__regexSearchKeyValueCombo_disabled(self):
|
||||
'''
|
||||
Make sure
|
||||
'''
|
||||
policy_data = b'[\x00s\x00o\x00f\x00t\x00w\x00a\x00r\x00e\x00\\\x00p' \
|
||||
b'\x00o\x00l\x00i\x00c\x00i\x00e\x00s\x00\\\x00m\x00i' \
|
||||
b'\x00c\x00r\x00o\x00s\x00o\x00f\x00t\x00\\\x00w\x00i' \
|
||||
b'\x00n\x00d\x00o\x00w\x00s\x00\\\x00w\x00i\x00n\x00d' \
|
||||
b'\x00o\x00w\x00s\x00 \x00e\x00r\x00r\x00o\x00r\x00 ' \
|
||||
b'\x00r\x00e\x00p\x00o\x00r\x00t\x00i\x00n\x00g\x00\\' \
|
||||
b'\x00c\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00\x00;\x00*' \
|
||||
b'\x00*\x00d\x00e\x00l\x00.\x00D\x00e\x00f\x00a\x00u' \
|
||||
b'\x00l\x00t\x00C\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00' \
|
||||
b'\x00;\x00\x01\x00\x00\x00;\x00\x04\x00\x00\x00;\x00 ' \
|
||||
b'\x00\x00\x00]\x00'
|
||||
policy_regpath = b'\x00s\x00o\x00f\x00t\x00w\x00a\x00r\x00e\x00\\\x00p' \
|
||||
b'\x00o\x00l\x00i\x00c\x00i\x00e\x00s\x00\\\x00m\x00i' \
|
||||
b'\x00c\x00r\x00o\x00s\x00o\x00f\x00t\x00\\\x00w\x00i' \
|
||||
b'\x00n\x00d\x00o\x00w\x00s\x00\\\x00w\x00i\x00n\x00d' \
|
||||
b'\x00o\x00w\x00s\x00 \x00e\x00r\x00r\x00o\x00r\x00 ' \
|
||||
b'\x00r\x00e\x00p\x00o\x00r\x00t\x00i\x00n\x00g\x00\\' \
|
||||
b'\x00c\x00o\x00n\x00s\x00e\x00n\x00t\x00\x00'
|
||||
policy_regkey = b'\x00D\x00e\x00f\x00a\x00u\x00l\x00t\x00C\x00o\x00n' \
|
||||
b'\x00s\x00e\x00n\x00t\x00\x00'
|
||||
test = win_lgpo._regexSearchKeyValueCombo(
|
||||
policy_data=policy_data,
|
||||
policy_regpath=policy_regpath,
|
||||
policy_regkey=policy_regkey
|
||||
)
|
||||
self.assertEqual(test, policy_data)
|
||||
|
||||
def test__encode_string(self):
|
||||
'''
|
||||
``_encode_string`` should return a null terminated ``utf-16-le`` encoded
|
||||
|
@ -254,6 +334,39 @@ class WinLGPOGetPolicyADMXTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'Allow Telemetry': 'Not Configured'}}}}}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
||||
@destructiveTest
|
||||
def test__load_policy_definitions(self):
|
||||
'''
|
||||
Test that unexpected files in the PolicyDefinitions directory won't
|
||||
cause the _load_policy_definitions function to explode
|
||||
https://gitlab.com/saltstack/enterprise/lock/issues/3826
|
||||
'''
|
||||
# The PolicyDefinitions directory should only contain ADMX files. We
|
||||
# want to make sure the `_load_policy_definitions` function skips non
|
||||
# ADMX files in this directory.
|
||||
# Create a bogus ADML file in PolicyDefinitions directory
|
||||
bogus_fle = os.path.join(
|
||||
'c:\\Windows\\PolicyDefinitions',
|
||||
'_bogus.adml')
|
||||
cache_dir = os.path.join(
|
||||
win_lgpo.__opts__['cachedir'],
|
||||
'lgpo',
|
||||
'policy_defs')
|
||||
try:
|
||||
with salt.utils.files.fopen(bogus_fle, 'w+') as fh:
|
||||
fh.write('<junk></junk>')
|
||||
# This function doesn't return anything (None), it just loads
|
||||
# the XPath structures into __context__. We're just making sure it
|
||||
# doesn't stack trace here
|
||||
self.assertIsNone(win_lgpo._load_policy_definitions())
|
||||
finally:
|
||||
# Remove source file
|
||||
os.remove(bogus_fle)
|
||||
# Remove cached file
|
||||
search_string = '{0}\\_bogus*.adml'.format(cache_dir)
|
||||
for file_name in glob.glob(search_string):
|
||||
os.remove(file_name)
|
||||
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
|
||||
class WinLGPOGetPolicyFromPolicyInfoTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
@ -605,13 +718,16 @@ class WinLGPOGetPointAndPrintENTestCase(TestCase, LoaderModuleMockMixin):
|
|||
policy_class=policy_class,
|
||||
adml_language='en-US')
|
||||
if success:
|
||||
return salt.modules.win_lgpo._get_policy_adm_setting(
|
||||
results = salt.modules.win_lgpo._get_policy_adm_setting(
|
||||
admx_policy=policy_obj,
|
||||
policy_class=policy_class,
|
||||
adml_language='en-US',
|
||||
return_full_policy_names=return_full_policy_names,
|
||||
hierarchical_return=hierarchical_return
|
||||
)
|
||||
if six.PY2:
|
||||
results = salt.states.win_lgpo._convert_to_unicode(results)
|
||||
return results
|
||||
return 'Policy Not Found'
|
||||
|
||||
def test_point_and_print_enabled(self):
|
||||
|
@ -631,7 +747,7 @@ class WinLGPOGetPointAndPrintENTestCase(TestCase, LoaderModuleMockMixin):
|
|||
True,
|
||||
'PointAndPrint_TrustedServers_Chk':
|
||||
True,
|
||||
u'PointAndPrint_TrustedServers_Edit':
|
||||
'PointAndPrint_TrustedServers_Edit':
|
||||
'fakeserver1;fakeserver2'}}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
||||
|
@ -655,7 +771,7 @@ class WinLGPOGetPointAndPrintENTestCase(TestCase, LoaderModuleMockMixin):
|
|||
True,
|
||||
'PointAndPrint_TrustedServers_Chk':
|
||||
True,
|
||||
u'PointAndPrint_TrustedServers_Edit':
|
||||
'PointAndPrint_TrustedServers_Edit':
|
||||
'fakeserver1;fakeserver2'}}}}}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
||||
|
@ -674,8 +790,8 @@ class WinLGPOGetPointAndPrintENTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'Show warning and elevation prompt',
|
||||
'Users can only point and print to machines in their forest':
|
||||
True,
|
||||
u'Users can only point and print to these servers': True,
|
||||
u'When updating drivers for an existing connection':
|
||||
'Users can only point and print to these servers': True,
|
||||
'When updating drivers for an existing connection':
|
||||
'Show warning only'}}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
||||
|
@ -699,8 +815,36 @@ class WinLGPOGetPointAndPrintENTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'Users can only point and print to machines in '
|
||||
'their forest':
|
||||
True,
|
||||
u'Users can only point and print to these servers':
|
||||
'Users can only point and print to these servers':
|
||||
True,
|
||||
u'When updating drivers for an existing connection':
|
||||
'When updating drivers for an existing connection':
|
||||
'Show warning only'}}}}}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
|
||||
class WinLGPOGetPolicyFromPolicyResources(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test functions related to policy info gathered from ADMX/ADML files
|
||||
'''
|
||||
adml_data = None
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return LOADER_DICTS
|
||||
|
||||
def setUp(self):
|
||||
if self.adml_data is None:
|
||||
self.adml_data = win_lgpo._get_policy_resources('en-US')
|
||||
|
||||
def test__getAdmlPresentationRefId(self):
|
||||
ref_id = 'LetAppsAccessAccountInfo_Enum'
|
||||
expected = 'Default for all apps'
|
||||
result = win_lgpo._getAdmlPresentationRefId(self.adml_data, ref_id)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test__getAdmlPresentationRefId_result_text_is_none(self):
|
||||
ref_id = 'LetAppsAccessAccountInfo_UserInControlOfTheseApps_List'
|
||||
expected = 'Put user in control of these specific apps (use Package ' \
|
||||
'Family Names)'
|
||||
result = win_lgpo._getAdmlPresentationRefId(self.adml_data, ref_id)
|
||||
self.assertEqual(result, expected)
|
||||
|
|
60
tests/unit/modules/test_win_task.py
Normal file
60
tests/unit/modules/test_win_task.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.helpers import destructiveTest
|
||||
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.win_task as win_task
|
||||
import salt.utils.platform
|
||||
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
|
||||
class WinTaskTestCase(TestCase):
|
||||
'''
|
||||
Test cases for salt.modules.win_task
|
||||
'''
|
||||
def test_repeat_interval(self):
|
||||
task_name = 'SaltTest1'
|
||||
try:
|
||||
ret = win_task.create_task(task_name,
|
||||
user_name='System',
|
||||
force=True,
|
||||
action_type='Execute',
|
||||
cmd='c:\\salt\\salt-call.bat',
|
||||
trigger_type='Daily',
|
||||
trigger_enabled=True,
|
||||
repeat_duration='30 minutes',
|
||||
repeat_interval='30 minutes')
|
||||
self.assertTrue(ret)
|
||||
|
||||
ret = win_task.info(task_name)
|
||||
self.assertEqual(ret['triggers'][0]['trigger_type'], 'Daily')
|
||||
finally:
|
||||
ret = win_task.delete_task(task_name)
|
||||
self.assertTrue(ret)
|
||||
|
||||
def test_repeat_interval_and_indefinitely(self):
|
||||
task_name = 'SaltTest2'
|
||||
try:
|
||||
ret = win_task.create_task(task_name,
|
||||
user_name='System',
|
||||
force=True,
|
||||
action_type='Execute',
|
||||
cmd='c:\\salt\\salt-call.bat',
|
||||
trigger_type='Daily',
|
||||
trigger_enabled=True,
|
||||
repeat_duration='Indefinitely',
|
||||
repeat_interval='30 minutes')
|
||||
self.assertTrue(ret)
|
||||
|
||||
ret = win_task.info(task_name)
|
||||
self.assertEqual(ret['triggers'][0]['trigger_type'], 'Daily')
|
||||
finally:
|
||||
ret = win_task.delete_task(task_name)
|
||||
self.assertTrue(ret)
|
|
@ -27,6 +27,7 @@ import salt.utils.files
|
|||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.modules.zcbuildout as buildout
|
||||
import salt.modules.virtualenv_mod
|
||||
import salt.modules.cmdmod as cmd
|
||||
from salt.ext import six
|
||||
|
||||
|
@ -466,6 +467,9 @@ class BuildoutOnlineTestCase(Base):
|
|||
|
||||
@requires_network()
|
||||
def test_run_buildout(self):
|
||||
if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
|
||||
self.skipTest("Skiping until upstream resolved https://github.com/pypa/virtualenv/issues/1715")
|
||||
|
||||
b_dir = os.path.join(self.tdir, 'b')
|
||||
ret = buildout.bootstrap(b_dir, buildout_ver=2, python=self.py_st)
|
||||
self.assertTrue(ret['status'])
|
||||
|
@ -477,6 +481,9 @@ class BuildoutOnlineTestCase(Base):
|
|||
|
||||
@requires_network()
|
||||
def test_buildout(self):
|
||||
if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
|
||||
self.skipTest("Skiping until upstream resolved https://github.com/pypa/virtualenv/issues/1715")
|
||||
|
||||
b_dir = os.path.join(self.tdir, 'b')
|
||||
ret = buildout.buildout(b_dir, buildout_ver=2, python=self.py_st)
|
||||
self.assertTrue(ret['status'])
|
||||
|
|
|
@ -49,7 +49,7 @@ class RegTestCase(TestCase, LoaderModuleMockMixin):
|
|||
Test to set a registry entry.
|
||||
'''
|
||||
expected = {
|
||||
'comment': 'Added {0} to {0}'.format(self.name),
|
||||
'comment': 'Added {0} to {1}'.format(self.vname, self.name),
|
||||
'pchanges': {},
|
||||
'changes': {
|
||||
'reg': {
|
||||
|
@ -58,15 +58,65 @@ class RegTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'Perms': {
|
||||
'Deny': None,
|
||||
'Grant': None},
|
||||
'Value': '0.15.3',
|
||||
'Value': self.vdata,
|
||||
'Key': self.name,
|
||||
'Owner': None,
|
||||
'Entry': 'version'}}},
|
||||
'Entry': self.vname}}},
|
||||
'name': self.name,
|
||||
'result': True}
|
||||
ret = reg.present(self.name, vname=self.vname, vdata=self.vdata)
|
||||
self.assertDictEqual(ret, expected)
|
||||
|
||||
@destructiveTest
|
||||
def test_present_string_dword(self):
|
||||
'''
|
||||
Test to set a registry entry.
|
||||
'''
|
||||
vname = 'dword_data'
|
||||
vdata = '00000001'
|
||||
vtype = 'REG_DWORD'
|
||||
expected_vdata = 1
|
||||
expected = {
|
||||
'comment': 'Added {0} to {1}'.format(vname, self.name),
|
||||
'pchanges': {},
|
||||
'changes': {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Inheritance': True,
|
||||
'Perms': {
|
||||
'Deny': None,
|
||||
'Grant': None},
|
||||
'Value': expected_vdata,
|
||||
'Key': self.name,
|
||||
'Owner': None,
|
||||
'Entry': vname}}},
|
||||
'name': self.name,
|
||||
'result': True}
|
||||
ret = reg.present(
|
||||
self.name, vname=vname, vdata=vdata, vtype=vtype)
|
||||
self.assertDictEqual(ret, expected)
|
||||
|
||||
@destructiveTest
|
||||
def test_present_string_dword_existing(self):
|
||||
'''
|
||||
Test to set a registry entry.
|
||||
'''
|
||||
vname = 'dword_data'
|
||||
vdata = '0000001'
|
||||
vtype = 'REG_DWORD'
|
||||
# Set it first
|
||||
reg.present(
|
||||
self.name, vname=vname, vdata=vdata, vtype=vtype)
|
||||
expected = {
|
||||
'comment': '{0} in {1} is already present'.format(vname, self.name),
|
||||
'pchanges': {},
|
||||
'changes': {},
|
||||
'name': self.name,
|
||||
'result': True}
|
||||
ret = reg.present(
|
||||
self.name, vname=vname, vdata=vdata, vtype=vtype)
|
||||
self.assertDictEqual(ret, expected)
|
||||
|
||||
def test_present_test_true(self):
|
||||
expected = {
|
||||
'comment': '',
|
||||
|
|
|
@ -7,14 +7,15 @@
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.helpers import destructiveTest
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
)
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.platform
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.states.service as service
|
||||
|
||||
|
||||
|
@ -251,3 +252,71 @@ class ServiceTestCase(TestCase, LoaderModuleMockMixin):
|
|||
ret[3])
|
||||
|
||||
self.assertDictEqual(service.mod_watch("salt", "stack"), ret[1])
|
||||
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(salt.utils.platform.is_darwin(), "service.running is currently failing on OSX")
|
||||
class ServiceTestCaseFunctional(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Validate the service state
|
||||
'''
|
||||
def setup_loader_modules(self):
|
||||
self.opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
self.opts['grains'] = salt.loader.grains(self.opts)
|
||||
self.utils = salt.loader.utils(self.opts)
|
||||
self.modules = salt.loader.minion_mods(self.opts, utils=self.utils)
|
||||
|
||||
self.service_name = 'cron'
|
||||
cmd_name = 'crontab'
|
||||
os_family = self.opts['grains']['os_family']
|
||||
os_release = self.opts['grains']['osrelease']
|
||||
if os_family == 'RedHat':
|
||||
self.service_name = 'crond'
|
||||
elif os_family == 'Arch':
|
||||
self.service_name = 'sshd'
|
||||
cmd_name = 'systemctl'
|
||||
elif os_family == 'MacOS':
|
||||
self.service_name = 'org.ntp.ntpd'
|
||||
if int(os_release.split('.')[1]) >= 13:
|
||||
self.service_name = 'com.openssh.sshd'
|
||||
elif os_family == 'Windows':
|
||||
self.service_name = 'Spooler'
|
||||
|
||||
if os_family != 'Windows' and salt.utils.path.which(cmd_name) is None:
|
||||
self.skipTest('{0} is not installed'.format(cmd_name))
|
||||
|
||||
return {
|
||||
service: {
|
||||
'__grains__': self.opts['grains'],
|
||||
'__opts__': self.opts,
|
||||
'__salt__': self.modules,
|
||||
'__utils__': self.utils,
|
||||
},
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
self.pre_srv_enabled = True if self.service_name in self.modules['service.get_enabled']() else False
|
||||
self.post_srv_disable = False
|
||||
if not self.pre_srv_enabled:
|
||||
self.modules['service.enable'](self.service_name)
|
||||
self.post_srv_disable = True
|
||||
|
||||
def tearDown(self):
|
||||
if self.post_srv_disable:
|
||||
self.modules['service.disable'](self.service_name)
|
||||
|
||||
def test_running_with_reload(self):
|
||||
with patch.dict(service.__opts__, {'test': False}):
|
||||
service.dead(self.service_name, enable=False)
|
||||
result = service.running(name=self.service_name, enable=True, reload=False)
|
||||
|
||||
expected = {
|
||||
'changes': {
|
||||
self.service_name: True
|
||||
},
|
||||
'comment': 'Service {0} has been enabled, and is '
|
||||
'running'.format(self.service_name),
|
||||
'name': self.service_name,
|
||||
'result': True
|
||||
}
|
||||
self.assertDictEqual(result, expected)
|
||||
|
|
|
@ -5,6 +5,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.states.smartos as smartos
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
|
@ -34,3 +35,46 @@ class SmartOsTestCase(TestCase, LoaderModuleMockMixin):
|
|||
ret = smartos.config_present(name=name, value=value)
|
||||
assert not ret['result']
|
||||
assert ret['comment'] == 'Could not add property {0} with value "{1}" to config'.format(name, value)
|
||||
|
||||
def test_parse_vmconfig_vrrp(self):
|
||||
'''
|
||||
Test _parse_vmconfig's vrid -> mac convertor
|
||||
|
||||
SmartOS will always use a mac based on the vrrp_vrid,
|
||||
so we will replace the provided mac with the one based
|
||||
on this value.
|
||||
|
||||
Doing so ensures that 'old' nics are removed and 'new'
|
||||
nics get added as these actions are keyed on the mac
|
||||
property.
|
||||
'''
|
||||
# NOTE: vmconfig is not a full vmadm payload,
|
||||
# this is not an issue given we are only testing
|
||||
# the vrrp_vrid to mac conversions
|
||||
ret = smartos._parse_vmconfig(
|
||||
OrderedDict([
|
||||
('nics', OrderedDict([
|
||||
('00:00:5e:00:01:01', OrderedDict([
|
||||
('vrrp_vrid', 1),
|
||||
('vrrp_primary_ip', '12.34.5.6'),
|
||||
])),
|
||||
('00:00:5e:00:01:24', OrderedDict([
|
||||
('vrrp_vrid', 240),
|
||||
('vrrp_primary_ip', '12.34.5.6'),
|
||||
])),
|
||||
('00:22:06:00:00:01', OrderedDict([
|
||||
('ips', ['12.34.5.6/24']),
|
||||
]))
|
||||
]))
|
||||
]),
|
||||
{'nics': 'mac'},
|
||||
)
|
||||
|
||||
# NOTE: nics.0 is a vrrp nic with correct mac (check mac == vrid based -> unchanged)
|
||||
assert ret['nics'][0]['mac'] == '00:00:5e:00:01:01'
|
||||
|
||||
# NOTE: nics.1 is a vrrp nic with incorrect mac (check mac == vrid based -> changed)
|
||||
assert ret['nics'][1]['mac'] == '00:00:5e:00:01:f0'
|
||||
|
||||
# NOTE: nics.2 was not a vrrp nic (check mac was not changed)
|
||||
assert ret['nics'][2]['mac'] == '00:22:06:00:00:01'
|
||||
|
|
|
@ -13,37 +13,26 @@ from tests.support.unit import TestCase, skipIf
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.config
|
||||
import salt.modules.cmdmod
|
||||
import salt.modules.file
|
||||
import salt.modules.win_file as win_file
|
||||
import salt.modules.win_lgpo as win_lgpo_mod
|
||||
import salt.loader
|
||||
import salt.states.win_lgpo as win_lgpo
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_dacl
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
import salt.utils.win_reg
|
||||
import salt.utils.stringutils
|
||||
|
||||
# Import 3rd Party Libs
|
||||
import salt.ext.six as six
|
||||
|
||||
# We're going to actually use the loader, without grains (slow)
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts)
|
||||
modules = salt.loader.minion_mods(opts, utils=utils)
|
||||
|
||||
LOADER_DICTS = {
|
||||
win_lgpo: {
|
||||
'__salt__': {
|
||||
'lgpo.get_policy': win_lgpo_mod.get_policy,
|
||||
'lgpo.get_policy_info': win_lgpo_mod.get_policy_info,
|
||||
'lgpo.set': win_lgpo_mod.set_}},
|
||||
win_lgpo_mod: {
|
||||
'__salt__': {
|
||||
'cmd.run': salt.modules.cmdmod.run,
|
||||
'file.file_exists': salt.modules.file.file_exists,
|
||||
'file.makedirs': win_file.makedirs_,
|
||||
'file.remove': win_file.remove,
|
||||
'file.write': salt.modules.file.write},
|
||||
'__opts__': salt.config.DEFAULT_MINION_OPTS.copy(),
|
||||
'__utils__': {
|
||||
'reg.read_value': salt.utils.win_reg.read_value,
|
||||
'auditpol.get_auditpol_dump':
|
||||
salt.utils.win_lgpo_auditpol.get_auditpol_dump}},
|
||||
win_file: {
|
||||
'__utils__': {
|
||||
'dacl.set_perms': salt.utils.win_dacl.set_perms}}}
|
||||
'__opts__': opts,
|
||||
'__salt__': modules,
|
||||
'__utils__': utils,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class WinLGPOComparePoliciesTestCase(TestCase):
|
||||
|
@ -193,6 +182,7 @@ class WinLGPOPolicyElementNames(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(win_lgpo.__opts__, {'test': False}):
|
||||
result = win_lgpo.set_(name='test_state',
|
||||
computer_policy=computer_policy)
|
||||
result = win_lgpo._convert_to_unicode(result)
|
||||
expected = {
|
||||
'Point and Print Restrictions': {
|
||||
'Enter fully qualified server names separated by '
|
||||
|
@ -203,9 +193,9 @@ class WinLGPOPolicyElementNames(TestCase, LoaderModuleMockMixin):
|
|||
'Users can only point and print to machines in '
|
||||
'their forest':
|
||||
True,
|
||||
u'Users can only point and print to these servers':
|
||||
'Users can only point and print to these servers':
|
||||
True,
|
||||
u'When updating drivers for an existing connection':
|
||||
'When updating drivers for an existing connection':
|
||||
'Show warning only'}}
|
||||
self.assertDictEqual(
|
||||
result['changes']['new']['Computer Configuration'], expected)
|
||||
|
@ -231,6 +221,8 @@ class WinLGPOPolicyElementNames(TestCase, LoaderModuleMockMixin):
|
|||
with patch.dict(win_lgpo.__opts__, {'test': False}):
|
||||
result = win_lgpo.set_(name='test_state',
|
||||
computer_policy=computer_policy)
|
||||
if six.PY2:
|
||||
result = win_lgpo._convert_to_unicode(result)
|
||||
expected = {
|
||||
'Point and Print Restrictions': {
|
||||
'Enter fully qualified server names separated by '
|
||||
|
@ -241,9 +233,9 @@ class WinLGPOPolicyElementNames(TestCase, LoaderModuleMockMixin):
|
|||
'Users can only point and print to machines in '
|
||||
'their forest':
|
||||
True,
|
||||
u'Users can only point and print to these servers':
|
||||
'Users can only point and print to these servers':
|
||||
True,
|
||||
u'When updating drivers for an existing connection':
|
||||
'When updating drivers for an existing connection':
|
||||
'Show warning only'}}
|
||||
self.assertDictEqual(
|
||||
result['changes']['new']['Computer Configuration'], expected)
|
||||
|
@ -332,7 +324,7 @@ class WinLGPOPolicyElementNamesTestTrue(TestCase, LoaderModuleMockMixin):
|
|||
'comment': 'All specified policies are properly configured'}
|
||||
self.assertDictEqual(result['changes'], expected['changes'])
|
||||
self.assertTrue(result['result'])
|
||||
self.assertEqual(result['comment'], result['comment'])
|
||||
self.assertEqual(result['comment'], expected['comment'])
|
||||
|
||||
def test_old_element_naming_style(self):
|
||||
computer_policy = {
|
||||
|
@ -362,7 +354,7 @@ class WinLGPOPolicyElementNamesTestTrue(TestCase, LoaderModuleMockMixin):
|
|||
'All specified policies are properly configured'}
|
||||
self.assertDictEqual(result['changes'], expected['changes'])
|
||||
self.assertTrue(result['result'])
|
||||
self.assertEqual(result['comment'], result['comment'])
|
||||
self.assertEqual(result['comment'], expected['comment'])
|
||||
|
||||
def test_invalid_elements(self):
|
||||
computer_policy = {
|
||||
|
|
|
@ -8,11 +8,12 @@ import os
|
|||
from tests.support.helpers import requires_network
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.unit import skipIf
|
||||
from tests.unit.modules.test_zcbuildout import Base, KNOWN_VIRTUALENV_BINARY_NAMES
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
from tests.unit.modules.test_zcbuildout import Base, KNOWN_VIRTUALENV_BINARY_NAMES
|
||||
import salt.modules.zcbuildout as modbuildout
|
||||
import salt.modules.virtualenv_mod
|
||||
import salt.states.zcbuildout as buildout
|
||||
import salt.modules.cmdmod as cmd
|
||||
|
||||
|
@ -60,6 +61,8 @@ class BuildoutTestCase(Base):
|
|||
|
||||
@requires_network()
|
||||
def test_installed(self):
|
||||
if salt.modules.virtualenv_mod.virtualenv_ver(self.ppy_st) >= (20, 0, 0):
|
||||
self.skipTest("Skiping until upstream resolved https://github.com/pypa/virtualenv/issues/1715")
|
||||
b_dir = os.path.join(self.tdir, 'b')
|
||||
ret = buildout.installed(b_dir,
|
||||
python=self.py_st,
|
||||
|
|
|
@ -14,6 +14,7 @@ from tests.support.runtests import RUNTIME_VARS
|
|||
import tests.support.helpers
|
||||
|
||||
# Import Salt libs
|
||||
import salt
|
||||
import salt.ext.six
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.platform
|
||||
|
@ -95,3 +96,10 @@ class VendorTornadoTest(TestCase):
|
|||
log.error("Test found bad line: %s", line)
|
||||
valid_lines.append(line)
|
||||
assert valid_lines == [], len(valid_lines)
|
||||
|
||||
def test_regression_56063(self):
|
||||
importer = salt.TornadoImporter()
|
||||
try:
|
||||
importer.find_module('tornado')
|
||||
except TypeError:
|
||||
assert False, 'TornadoImporter raised type error when one argument passed'
|
||||
|
|
|
@ -134,8 +134,9 @@ class BadTestModuleNamesTestCase(TestCase):
|
|||
'integration.logging.handlers.test_logstash_mod',
|
||||
'integration.master.test_event_return',
|
||||
'integration.minion.test_blackout',
|
||||
'integration.minion.test_pillar',
|
||||
'integration.minion.test_executor',
|
||||
'integration.minion.test_minion_cache',
|
||||
'integration.minion.test_pillar',
|
||||
'integration.minion.test_timeout',
|
||||
'integration.modules.test_decorators',
|
||||
'integration.modules.test_pkg',
|
||||
|
|
|
@ -37,8 +37,12 @@ class VersionTestCase(TestCase):
|
|||
('v2014.1.4.1', (2014, 1, 4, 1, '', 0, 0, None), None),
|
||||
('v2014.1.4.1rc3-n/a-abcdefff', (2014, 1, 4, 1, 'rc', 3, -1, 'abcdefff'), None),
|
||||
('v3.4.1.1', (3, 4, 1, 1, '', 0, 0, None), None),
|
||||
('v3000', (3000, None, None, 0, '', 0, 0, None), '3000'),
|
||||
('v3000rc1', (3000, None, None, 0, 'rc', 1, 0, None), '3000rc1'),
|
||||
('v3000', (3000, '', 0, 0, None), '3000'),
|
||||
('v3000.0', (3000, '', 0, 0, None), '3000'),
|
||||
('v4518.1', (4518, 1, '', 0, 0, None), '4518.1'),
|
||||
('v3000rc1', (3000, 'rc', 1, 0, None), '3000rc1'),
|
||||
('v3000rc1-n/a-abcdefff', (3000, 'rc', 1, -1, 'abcdefff'), None),
|
||||
('3000-n/a-1e7bc8f', (3000, '', 0, -1, '1e7bc8f'), None)
|
||||
|
||||
)
|
||||
|
||||
|
@ -76,6 +80,9 @@ class VersionTestCase(TestCase):
|
|||
# version scheme in the future
|
||||
# but still adding test for it
|
||||
('v3000', 'v3000.0rc1'),
|
||||
('v3000.1rc1', 'v3000.0rc1'),
|
||||
('v3000', 'v2019.2.1rc1'),
|
||||
('v3001rc1', 'v2019.2.1rc1'),
|
||||
)
|
||||
for higher_version, lower_version in examples:
|
||||
self.assertTrue(SaltStackVersion.parse(higher_version) > lower_version)
|
||||
|
@ -142,6 +149,27 @@ class VersionTestCase(TestCase):
|
|||
assert not ver.bugfix
|
||||
assert ver.string == '{0}.{1}'.format(maj_ver, min_ver)
|
||||
|
||||
def test_string_new_version_minor_as_string(self):
|
||||
'''
|
||||
Validate string property method
|
||||
using new versioning scheme alongside
|
||||
minor version
|
||||
'''
|
||||
maj_ver = '3000'
|
||||
min_ver = '1'
|
||||
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
|
||||
assert ver.minor == int(min_ver)
|
||||
assert not ver.bugfix
|
||||
assert ver.string == '{0}.{1}'.format(maj_ver, min_ver)
|
||||
|
||||
# This only seems to happen on a cloned repo without its tags
|
||||
maj_ver = '3000'
|
||||
min_ver = ''
|
||||
ver = SaltStackVersion(major=maj_ver, minor=min_ver)
|
||||
assert ver.minor is None, '{!r} is not {!r}'.format(ver.minor, min_ver) # pylint: disable=repr-flag-used-in-string
|
||||
assert not ver.bugfix
|
||||
assert ver.string == maj_ver
|
||||
|
||||
def test_string_old_version(self):
|
||||
'''
|
||||
Validate string property method
|
||||
|
@ -154,6 +182,45 @@ class VersionTestCase(TestCase):
|
|||
assert ver.bugfix == 0
|
||||
assert ver.string == '{0}.{1}.0'.format(maj_ver, min_ver)
|
||||
|
||||
def test_noc_info(self):
|
||||
'''
|
||||
Test noc_info property method
|
||||
'''
|
||||
expect = (
|
||||
('v2014.1.4.1rc3-n/a-abcdefff', (2014, 1, 4, 1, 'rc', 3, -1)),
|
||||
('v3.4.1.1', (3, 4, 1, 1, '', 0, 0)),
|
||||
('v3000', (3000, '', 0, 0)),
|
||||
('v3000.0', (3000, '', 0, 0)),
|
||||
('v4518.1', (4518, 1, '', 0, 0)),
|
||||
('v3000rc1', (3000, 'rc', 1, 0)),
|
||||
('v3000rc1-n/a-abcdefff', (3000, 'rc', 1, -1)),
|
||||
)
|
||||
|
||||
for vstr, noc_info in expect:
|
||||
saltstack_version = SaltStackVersion.parse(vstr)
|
||||
assert saltstack_version.noc_info, noc_info
|
||||
assert len(saltstack_version.noc_info) == len(noc_info)
|
||||
|
||||
def test_full_info(self):
|
||||
'''
|
||||
Test full_Info property method
|
||||
'''
|
||||
expect = (
|
||||
('v2014.1.4.1rc3-n/a-abcdefff', (2014, 1, 4, 1, 'rc', 3, -1, 'abcdefff')),
|
||||
('v3.4.1.1', (3, 4, 1, 1, '', 0, 0, None)),
|
||||
('v3000', (3000, '', 0, 0, None)),
|
||||
('v3000.0', (3000, '', 0, 0, None)),
|
||||
('v4518.1', (4518, 1, '', 0, 0, None)),
|
||||
('v3000rc1', (3000, 'rc', 1, 0, None)),
|
||||
('v3000rc1-n/a-abcdefff', (3000, 'rc', 1, -1, 'abcdefff')),
|
||||
|
||||
)
|
||||
|
||||
for vstr, full_info in expect:
|
||||
saltstack_version = SaltStackVersion.parse(vstr)
|
||||
assert saltstack_version.full_info, full_info
|
||||
assert len(saltstack_version.full_info) == len(full_info)
|
||||
|
||||
def test_discover_version(self):
|
||||
'''
|
||||
Test call to __discover_version
|
||||
|
@ -180,3 +247,31 @@ class VersionTestCase(TestCase):
|
|||
with proc_mock, patch_os:
|
||||
ret = getattr(salt.version, '__discover_version')(salt_ver)
|
||||
assert ret == exp
|
||||
|
||||
def test_info_new_version(self):
|
||||
'''
|
||||
test info property method with new versioning scheme
|
||||
'''
|
||||
vers = ((3000, None, None),
|
||||
(3000, 1, None),
|
||||
(3001, 0, None))
|
||||
for maj_ver, min_ver, bug_fix in vers:
|
||||
ver = SaltStackVersion(major=maj_ver, minor=min_ver, bugfix=bug_fix)
|
||||
if min_ver:
|
||||
assert ver.info == (maj_ver, min_ver)
|
||||
else:
|
||||
assert ver.info == (maj_ver,)
|
||||
|
||||
def test_info_old_version(self):
|
||||
'''
|
||||
test info property method with old versioning scheme
|
||||
'''
|
||||
vers = ((2019, 2, 1),
|
||||
(2018, 3, 0),
|
||||
(2017, 7, None))
|
||||
for maj_ver, min_ver, bug_fix in vers:
|
||||
ver = SaltStackVersion(major=maj_ver, minor=min_ver, bugfix=bug_fix)
|
||||
if bug_fix is None:
|
||||
assert ver.info == (maj_ver, min_ver, 0, 0)
|
||||
else:
|
||||
assert ver.info == (maj_ver, min_ver, bug_fix, 0)
|
||||
|
|
|
@ -5,13 +5,16 @@ Unit tests for salt.utils.templates.py
|
|||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.templates
|
||||
import salt.utils.files
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -181,3 +184,46 @@ class RenderTestCase(TestCase):
|
|||
ctx['var'] = 'OK'
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res.strip(), 'OK')
|
||||
|
||||
|
||||
class MockRender(object):
|
||||
def __call__(self, tplstr, context, tmplpath=None):
|
||||
self.tplstr = tplstr
|
||||
self.context = context
|
||||
self.tmplpath = tmplpath
|
||||
return tplstr
|
||||
|
||||
|
||||
class WrapRenderTestCase(TestCase):
|
||||
|
||||
@with_tempdir()
|
||||
def test_wrap_issue_56119_a(self, tempdir):
|
||||
slsfile = os.path.join(tempdir, 'foo')
|
||||
with salt.utils.files.fopen(slsfile, 'w') as fp:
|
||||
fp.write('{{ slspath }}')
|
||||
context = {'opts': {}, 'saltenv': 'base', 'sls': 'foo.bar'}
|
||||
render = MockRender()
|
||||
wrapped = salt.utils.templates.wrap_tmpl_func(render)
|
||||
res = wrapped(
|
||||
slsfile,
|
||||
context=context,
|
||||
tmplpath='/tmp/foo/bar/init.sls'
|
||||
)
|
||||
assert render.context['slspath'] == 'foo/bar', render.context['slspath']
|
||||
assert render.context['tpldir'] == 'foo/bar', render.context['tpldir']
|
||||
|
||||
@with_tempdir()
|
||||
def test_wrap_issue_56119_b(self, tempdir):
|
||||
slsfile = os.path.join(tempdir, 'foo')
|
||||
with salt.utils.files.fopen(slsfile, 'w') as fp:
|
||||
fp.write('{{ slspath }}')
|
||||
context = {'opts': {}, 'saltenv': 'base', 'sls': 'foo.bar.bang'}
|
||||
render = MockRender()
|
||||
wrapped = salt.utils.templates.wrap_tmpl_func(render)
|
||||
res = wrapped(
|
||||
slsfile,
|
||||
context=context,
|
||||
tmplpath='/tmp/foo/bar/bang.sls'
|
||||
)
|
||||
assert render.context['slspath'] == 'foo/bar', render.context['slspath']
|
||||
assert render.context['tpldir'] == 'foo/bar', render.context['tpldir']
|
||||
|
|
|
@ -10,16 +10,6 @@ from tests.support.unit import TestCase, skipIf
|
|||
import salt.utils.platform
|
||||
import salt.utils.win_network as win_network
|
||||
|
||||
mock_base = MagicMock(return_value={
|
||||
'alias': 'Ethernet',
|
||||
'description': 'Dell GigabitEthernet',
|
||||
'id': '{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
'receive_only': False,
|
||||
'status': 'Up',
|
||||
'type': 'Ethernet',
|
||||
'physical_address': '02:D5:F1:DD:31:E0'
|
||||
})
|
||||
|
||||
mock_ip_base = MagicMock(return_value={
|
||||
'dns_enabled': False,
|
||||
'dns_suffix': '',
|
||||
|
@ -75,11 +65,36 @@ mock_anycast = MagicMock(return_value={'ip_anycast': [],
|
|||
mock_wins = MagicMock(return_value={'ip_wins': []})
|
||||
|
||||
|
||||
class PhysicalAddress(object):
|
||||
def __init__(self, address):
|
||||
self.address = address
|
||||
|
||||
def ToString(self):
|
||||
return str(self.address)
|
||||
|
||||
|
||||
class Interface(object):
|
||||
Name = 'Ethernet'
|
||||
'''
|
||||
Mocked interface object
|
||||
'''
|
||||
def __init__(self,
|
||||
i_address='02D5F1DD31E0',
|
||||
i_description='Dell GigabitEthernet',
|
||||
i_id='{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
i_name='Ethernet',
|
||||
i_receive_only=False,
|
||||
i_status=1,
|
||||
i_type=6):
|
||||
self.PhysicalAddress = PhysicalAddress(i_address)
|
||||
self.Description = i_description
|
||||
self.Id = i_id
|
||||
self.Name = i_name
|
||||
self.NetworkInterfaceType = i_type
|
||||
self.IsReceiveOnly = i_receive_only
|
||||
self.OperationalStatus = i_status
|
||||
|
||||
|
||||
mock_int = MagicMock(return_value=[Interface()])
|
||||
def GetPhysicalAddress(self):
|
||||
return self.PhysicalAddress
|
||||
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
|
||||
|
@ -130,8 +145,8 @@ class WinNetworkTestCase(TestCase):
|
|||
'status': 'Up',
|
||||
'type': 'Ethernet'}}
|
||||
|
||||
mock_int = MagicMock(return_value=[Interface()])
|
||||
with patch.object(win_network, '_get_network_interfaces', mock_int), \
|
||||
patch.object(win_network, '_get_base_properties', mock_base), \
|
||||
patch.object(win_network, '_get_ip_base_properties', mock_ip_base), \
|
||||
patch.object(win_network, '_get_ip_unicast_info', mock_unicast), \
|
||||
patch.object(win_network, '_get_ip_gateway_info', mock_gateway), \
|
||||
|
@ -157,8 +172,8 @@ class WinNetworkTestCase(TestCase):
|
|||
'inet6': [{'address': 'fe80::e8a4:1224:5548:2b81',
|
||||
'gateway': 'fe80::208:a2ff:fe0b:de70'}],
|
||||
'up': True}}
|
||||
mock_int = MagicMock(return_value=[Interface()])
|
||||
with patch.object(win_network, '_get_network_interfaces', mock_int), \
|
||||
patch.object(win_network, '_get_base_properties', mock_base), \
|
||||
patch.object(win_network, '_get_ip_base_properties', mock_ip_base), \
|
||||
patch.object(win_network, '_get_ip_unicast_info', mock_unicast), \
|
||||
patch.object(win_network, '_get_ip_gateway_info', mock_gateway), \
|
||||
|
@ -171,3 +186,55 @@ class WinNetworkTestCase(TestCase):
|
|||
results = win_network.get_interface_info()
|
||||
|
||||
self.assertDictEqual(expected, results)
|
||||
|
||||
def test__get_base_properties_tap_adapter(self):
|
||||
'''
|
||||
Adapter Type 53 is apparently an undocumented type corresponding to
|
||||
OpenVPN TAP Adapters and possibly other TAP Adapters. This test makes
|
||||
sure the win_network util will catch that.
|
||||
https://github.com/saltstack/salt/issues/56196
|
||||
https://github.com/saltstack/salt/issues/56275
|
||||
'''
|
||||
i_face = Interface(
|
||||
i_address='03DE4D0713FA',
|
||||
i_description='Windows TAP Adapter',
|
||||
i_id='{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
i_name='Windows TAP Adapter',
|
||||
i_receive_only=False,
|
||||
i_status=1,
|
||||
i_type=53)
|
||||
expected = {
|
||||
'alias': 'Windows TAP Adapter',
|
||||
'description': 'Windows TAP Adapter',
|
||||
'id': '{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
'receive_only': False,
|
||||
'physical_address': '03:DE:4D:07:13:FA',
|
||||
'status': 'Up',
|
||||
'type': 'TAPAdapter'}
|
||||
results = win_network._get_base_properties(i_face=i_face)
|
||||
self.assertDictEqual(expected, results)
|
||||
|
||||
def test__get_base_properties_undefined_adapter(self):
|
||||
'''
|
||||
The Adapter Type 53 may be an arbitrary number assigned by OpenVPN.
|
||||
This will test the ability to avoid stack tracing on an undefined
|
||||
adapter type. If one is encountered, just use the description.
|
||||
'''
|
||||
i_face = Interface(
|
||||
i_address='03DE4D0713FA',
|
||||
i_description='Undefined Adapter',
|
||||
i_id='{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
i_name='Undefined',
|
||||
i_receive_only=False,
|
||||
i_status=1,
|
||||
i_type=50)
|
||||
expected = {
|
||||
'alias': 'Undefined',
|
||||
'description': 'Undefined Adapter',
|
||||
'id': '{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}',
|
||||
'receive_only': False,
|
||||
'physical_address': '03:DE:4D:07:13:FA',
|
||||
'status': 'Up',
|
||||
'type': 'Undefined Adapter'}
|
||||
results = win_network._get_base_properties(i_face=i_face)
|
||||
self.assertDictEqual(expected, results)
|
||||
|
|
|
@ -630,12 +630,17 @@ class WinFunctionsTestCase(TestCase):
|
|||
Should always return integer
|
||||
'''
|
||||
vdata = 1
|
||||
expected = 1
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype='REG_DWORD')
|
||||
self.assertTrue(isinstance(result, six.integer_types))
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
vdata = '1'
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype='REG_DWORD')
|
||||
self.assertTrue(isinstance(result, six.integer_types))
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
vdata = '0000001'
|
||||
result = win_reg.cast_vdata(vdata=vdata, vtype='REG_DWORD')
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_cast_vdata_reg_expand_sz(self):
|
||||
'''
|
||||
|
|
Loading…
Add table
Reference in a new issue