mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 09:40:20 +00:00
Merge 3006.x into master
Conflicts: * doc/ref/configuration/minion.rst
This commit is contained in:
commit
c33b56281b
70 changed files with 674 additions and 420 deletions
2
.github/workflows/test-action-macos.yml
vendored
2
.github/workflows/test-action-macos.yml
vendored
|
@ -395,7 +395,7 @@ jobs:
|
||||||
|
|
||||||
- name: Run Flaky Tests
|
- name: Run Flaky Tests
|
||||||
id: run-flaky-tests
|
id: run-flaky-tests
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'false' }}
|
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
|
||||||
env:
|
env:
|
||||||
SKIP_REQUIREMENTS_INSTALL: "1"
|
SKIP_REQUIREMENTS_INSTALL: "1"
|
||||||
PRINT_TEST_SELECTION: "0"
|
PRINT_TEST_SELECTION: "0"
|
||||||
|
|
2
.github/workflows/test-action.yml
vendored
2
.github/workflows/test-action.yml
vendored
|
@ -381,7 +381,7 @@ jobs:
|
||||||
|
|
||||||
- name: Run Flaky Tests
|
- name: Run Flaky Tests
|
||||||
id: run-flaky-tests
|
id: run-flaky-tests
|
||||||
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'false' }}
|
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && steps.get-test-flags.outputs.flaky_jail_tests == 'true' }}
|
||||||
run: |
|
run: |
|
||||||
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
|
||||||
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
--nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \
|
||||||
|
|
1
changelog/62477.fixed.md
Normal file
1
changelog/62477.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Ensure NamedLoaderContext's have their value() used if passing to other modules
|
1
changelog/63589.fixed.md
Normal file
1
changelog/63589.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
add documentation note about reactor state ids.
|
1
changelog/64082.fixed.md
Normal file
1
changelog/64082.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Fix dmsetup device names with hyphen being picked up.
|
1
changelog/64117.fixed.md
Normal file
1
changelog/64117.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package
|
1
changelog/64118.fixed.md
Normal file
1
changelog/64118.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg`
|
1
changelog/64126.fixed.md
Normal file
1
changelog/64126.fixed.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
lgpo_reg.set_value now returns ``True`` on success instead of ``None``
|
2
changelog/64170.fixed.md
Normal file
2
changelog/64170.fixed.md
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Fixed issue in salt-cloud so that multiple masters specified in the cloud
|
||||||
|
are written to the minion config properly
|
|
@ -2438,7 +2438,7 @@ enabled and can be disabled by changing this value to ``False``.
|
||||||
``saltenv`` will take its value. If both are used, ``environment`` will be
|
``saltenv`` will take its value. If both are used, ``environment`` will be
|
||||||
ignored and ``saltenv`` will be used.
|
ignored and ``saltenv`` will be used.
|
||||||
|
|
||||||
The default fileserver environment to use when copy files and applying states.
|
The default fileserver environment to use when copying files and applying states.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
|
|
@ -212,6 +212,10 @@ in :ref:`local reactions <reactor-local>`, but as noted above this is not very
|
||||||
user-friendly. Therefore, the new config schema is recommended if the master
|
user-friendly. Therefore, the new config schema is recommended if the master
|
||||||
is running a supported release.
|
is running a supported release.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
State ids of reactors for runners and wheels should all be unique. They can
|
||||||
|
overwrite each other when added to the async queue causing lost reactions.
|
||||||
|
|
||||||
The below two examples are equivalent:
|
The below two examples are equivalent:
|
||||||
|
|
||||||
+-------------------------------------------------+-------------------------------------------------+
|
+-------------------------------------------------+-------------------------------------------------+
|
||||||
|
@ -248,6 +252,10 @@ Like :ref:`runner reactions <reactor-runner>`, the old config schema called for
|
||||||
wheel reactions to have arguments passed directly under the name of the
|
wheel reactions to have arguments passed directly under the name of the
|
||||||
:ref:`wheel function <all-salt.wheel>` (or in ``arg`` or ``kwarg`` parameters).
|
:ref:`wheel function <all-salt.wheel>` (or in ``arg`` or ``kwarg`` parameters).
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
State ids of reactors for runners and wheels should all be unique. They can
|
||||||
|
overwrite each other when added to the async queue causing lost reactions.
|
||||||
|
|
||||||
The below two examples are equivalent:
|
The below two examples are equivalent:
|
||||||
|
|
||||||
+-----------------------------------+---------------------------------+
|
+-----------------------------------+---------------------------------+
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
pkg/common/salt-proxy@.service /lib/systemd/system
|
|
||||||
conf/roster /etc/salt
|
conf/roster /etc/salt
|
||||||
conf/cloud /etc/salt
|
conf/cloud /etc/salt
|
||||||
conf/proxy /etc/salt
|
|
||||||
pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
|
pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
|
||||||
pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
|
pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
|
||||||
pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d
|
pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d
|
||||||
|
|
|
@ -1,2 +1,4 @@
|
||||||
conf/minion /etc/salt
|
conf/minion /etc/salt
|
||||||
|
conf/proxy /etc/salt
|
||||||
pkg/common/salt-minion.service /lib/systemd/system
|
pkg/common/salt-minion.service /lib/systemd/system
|
||||||
|
pkg/common/salt-proxy@.service /lib/systemd/system
|
||||||
|
|
|
@ -36,7 +36,7 @@ python-etcd>0.4.2
|
||||||
pyvmomi
|
pyvmomi
|
||||||
requests
|
requests
|
||||||
rfc3987
|
rfc3987
|
||||||
sqlparse>=0.4.2
|
sqlparse>=0.4.4
|
||||||
strict_rfc3339>=0.7
|
strict_rfc3339>=0.7
|
||||||
toml
|
toml
|
||||||
vcert~=0.7.0; sys_platform != 'win32'
|
vcert~=0.7.0; sys_platform != 'win32'
|
||||||
|
|
|
@ -838,7 +838,7 @@ smbprotocol==1.10.1
|
||||||
# pypsexec
|
# pypsexec
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -820,7 +820,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.2
|
smmap==3.0.2
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -820,7 +820,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -808,7 +808,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -862,7 +862,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -369,7 +369,7 @@ six==1.15.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -784,7 +784,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.2
|
smmap==3.0.2
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -792,7 +792,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -834,7 +834,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -367,7 +367,7 @@ six==1.15.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -888,7 +888,7 @@ smbprotocol==1.10.1
|
||||||
# pypsexec
|
# pypsexec
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -864,7 +864,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -859,7 +859,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -908,7 +908,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -384,7 +384,7 @@ six==1.15.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -877,7 +877,7 @@ smbprotocol==1.10.1
|
||||||
# pypsexec
|
# pypsexec
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -854,7 +854,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -850,7 +850,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -896,7 +896,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -372,7 +372,7 @@ six==1.15.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -880,7 +880,7 @@ smbprotocol==1.10.1
|
||||||
# pypsexec
|
# pypsexec
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -857,7 +857,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.2
|
smmap==3.0.2
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -857,7 +857,7 @@ six==1.16.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -851,7 +851,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -901,7 +901,7 @@ slack-sdk==3.19.5
|
||||||
# via slack-bolt
|
# via slack-bolt
|
||||||
smmap==3.0.4
|
smmap==3.0.4
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -373,7 +373,7 @@ six==1.15.0
|
||||||
# websocket-client
|
# websocket-client
|
||||||
smmap==4.0.0
|
smmap==4.0.0
|
||||||
# via gitdb
|
# via gitdb
|
||||||
sqlparse==0.4.2
|
sqlparse==0.4.4
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
strict-rfc3339==0.7
|
strict-rfc3339==0.7
|
||||||
# via -r requirements/static/ci/common.in
|
# via -r requirements/static/ci/common.in
|
||||||
|
|
|
@ -9,6 +9,7 @@ import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
|
||||||
|
import salt.fileclient
|
||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.json
|
import salt.utils.json
|
||||||
import salt.utils.url
|
import salt.utils.url
|
||||||
|
@ -28,65 +29,62 @@ def update_master_cache(states, saltenv="base"):
|
||||||
# Setup for copying states to gendir
|
# Setup for copying states to gendir
|
||||||
gendir = tempfile.mkdtemp()
|
gendir = tempfile.mkdtemp()
|
||||||
trans_tar = salt.utils.files.mkstemp()
|
trans_tar = salt.utils.files.mkstemp()
|
||||||
if "cp.fileclient_{}".format(id(__opts__)) not in __context__:
|
with salt.fileclient.get_file_client(__opts__) as cp_fileclient:
|
||||||
__context__[
|
|
||||||
"cp.fileclient_{}".format(id(__opts__))
|
|
||||||
] = salt.fileclient.get_file_client(__opts__)
|
|
||||||
|
|
||||||
# generate cp.list_states output and save to gendir
|
# generate cp.list_states output and save to gendir
|
||||||
cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
|
cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]())
|
||||||
cp_output_file = os.path.join(gendir, "cp_output.txt")
|
cp_output_file = os.path.join(gendir, "cp_output.txt")
|
||||||
with salt.utils.files.fopen(cp_output_file, "w") as fp:
|
with salt.utils.files.fopen(cp_output_file, "w") as fp:
|
||||||
fp.write(cp_output)
|
fp.write(cp_output)
|
||||||
|
|
||||||
# cp state directories to gendir
|
# cp state directories to gendir
|
||||||
already_processed = []
|
already_processed = []
|
||||||
sls_list = salt.utils.args.split_input(states)
|
sls_list = salt.utils.args.split_input(states)
|
||||||
for state_name in sls_list:
|
for state_name in sls_list:
|
||||||
# generate low data for each state and save to gendir
|
# generate low data for each state and save to gendir
|
||||||
state_low_file = os.path.join(gendir, state_name + ".low")
|
state_low_file = os.path.join(gendir, state_name + ".low")
|
||||||
state_low_output = salt.utils.json.dumps(
|
state_low_output = salt.utils.json.dumps(
|
||||||
__salt__["state.show_low_sls"](state_name)
|
__salt__["state.show_low_sls"](state_name)
|
||||||
)
|
|
||||||
with salt.utils.files.fopen(state_low_file, "w") as fp:
|
|
||||||
fp.write(state_low_output)
|
|
||||||
|
|
||||||
state_name = state_name.replace(".", os.sep)
|
|
||||||
if state_name in already_processed:
|
|
||||||
log.debug("Already cached state for %s", state_name)
|
|
||||||
else:
|
|
||||||
file_copy_file = os.path.join(gendir, state_name + ".copy")
|
|
||||||
log.debug("copying %s to %s", state_name, gendir)
|
|
||||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
|
||||||
# Duplicate cp.get_dir to gendir
|
|
||||||
copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir(
|
|
||||||
qualified_name, gendir, saltenv
|
|
||||||
)
|
)
|
||||||
if copy_result:
|
with salt.utils.files.fopen(state_low_file, "w") as fp:
|
||||||
copy_result = [dir.replace(gendir, state_cache) for dir in copy_result]
|
fp.write(state_low_output)
|
||||||
copy_result_output = salt.utils.json.dumps(copy_result)
|
|
||||||
with salt.utils.files.fopen(file_copy_file, "w") as fp:
|
state_name = state_name.replace(".", os.sep)
|
||||||
fp.write(copy_result_output)
|
if state_name in already_processed:
|
||||||
already_processed.append(state_name)
|
log.debug("Already cached state for %s", state_name)
|
||||||
else:
|
else:
|
||||||
# If files were not copied, assume state.file.sls was given and just copy state
|
|
||||||
state_name = os.path.dirname(state_name)
|
|
||||||
file_copy_file = os.path.join(gendir, state_name + ".copy")
|
file_copy_file = os.path.join(gendir, state_name + ".copy")
|
||||||
if state_name in already_processed:
|
log.debug("copying %s to %s", state_name, gendir)
|
||||||
log.debug("Already cached state for %s", state_name)
|
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||||
|
# Duplicate cp.get_dir to gendir
|
||||||
|
copy_result = cp_fileclient.get_dir(qualified_name, gendir, saltenv)
|
||||||
|
if copy_result:
|
||||||
|
copy_result = [
|
||||||
|
dir.replace(gendir, state_cache) for dir in copy_result
|
||||||
|
]
|
||||||
|
copy_result_output = salt.utils.json.dumps(copy_result)
|
||||||
|
with salt.utils.files.fopen(file_copy_file, "w") as fp:
|
||||||
|
fp.write(copy_result_output)
|
||||||
|
already_processed.append(state_name)
|
||||||
else:
|
else:
|
||||||
qualified_name = salt.utils.url.create(state_name, saltenv)
|
# If files were not copied, assume state.file.sls was given and just copy state
|
||||||
copy_result = __context__[
|
state_name = os.path.dirname(state_name)
|
||||||
"cp.fileclient_{}".format(id(__opts__))
|
file_copy_file = os.path.join(gendir, state_name + ".copy")
|
||||||
].get_dir(qualified_name, gendir, saltenv)
|
if state_name in already_processed:
|
||||||
if copy_result:
|
log.debug("Already cached state for %s", state_name)
|
||||||
copy_result = [
|
else:
|
||||||
dir.replace(gendir, state_cache) for dir in copy_result
|
qualified_name = salt.utils.url.create(state_name, saltenv)
|
||||||
]
|
copy_result = cp_fileclient.get_dir(
|
||||||
copy_result_output = salt.utils.json.dumps(copy_result)
|
qualified_name, gendir, saltenv
|
||||||
with salt.utils.files.fopen(file_copy_file, "w") as fp:
|
)
|
||||||
fp.write(copy_result_output)
|
if copy_result:
|
||||||
already_processed.append(state_name)
|
copy_result = [
|
||||||
|
dir.replace(gendir, state_cache) for dir in copy_result
|
||||||
|
]
|
||||||
|
copy_result_output = salt.utils.json.dumps(copy_result)
|
||||||
|
with salt.utils.files.fopen(file_copy_file, "w") as fp:
|
||||||
|
fp.write(copy_result_output)
|
||||||
|
already_processed.append(state_name)
|
||||||
|
|
||||||
# turn gendir into tarball and remove gendir
|
# turn gendir into tarball and remove gendir
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -834,7 +834,7 @@ def show_highstate(**kwargs):
|
||||||
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
|
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
|
||||||
with salt.client.ssh.state.SSHHighState(
|
with salt.client.ssh.state.SSHHighState(
|
||||||
opts,
|
opts,
|
||||||
__pillar__,
|
__pillar__.value(),
|
||||||
__salt__,
|
__salt__,
|
||||||
__context__["fileclient"],
|
__context__["fileclient"],
|
||||||
context=__context__.value(),
|
context=__context__.value(),
|
||||||
|
|
|
@ -849,7 +849,6 @@ class Client:
|
||||||
kwargs.pop("env")
|
kwargs.pop("env")
|
||||||
|
|
||||||
kwargs["saltenv"] = saltenv
|
kwargs["saltenv"] = saltenv
|
||||||
url_data = urllib.parse.urlparse(url)
|
|
||||||
sfn = self.cache_file(url, saltenv, cachedir=cachedir)
|
sfn = self.cache_file(url, saltenv, cachedir=cachedir)
|
||||||
if not sfn or not os.path.exists(sfn):
|
if not sfn or not os.path.exists(sfn):
|
||||||
return ""
|
return ""
|
||||||
|
@ -1165,13 +1164,8 @@ class RemoteClient(Client):
|
||||||
|
|
||||||
if not salt.utils.platform.is_windows():
|
if not salt.utils.platform.is_windows():
|
||||||
hash_server, stat_server = self.hash_and_stat_file(path, saltenv)
|
hash_server, stat_server = self.hash_and_stat_file(path, saltenv)
|
||||||
try:
|
|
||||||
mode_server = stat_server[0]
|
|
||||||
except (IndexError, TypeError):
|
|
||||||
mode_server = None
|
|
||||||
else:
|
else:
|
||||||
hash_server = self.hash_file(path, saltenv)
|
hash_server = self.hash_file(path, saltenv)
|
||||||
mode_server = None
|
|
||||||
|
|
||||||
# Check if file exists on server, before creating files and
|
# Check if file exists on server, before creating files and
|
||||||
# directories
|
# directories
|
||||||
|
@ -1214,13 +1208,8 @@ class RemoteClient(Client):
|
||||||
if dest2check and os.path.isfile(dest2check):
|
if dest2check and os.path.isfile(dest2check):
|
||||||
if not salt.utils.platform.is_windows():
|
if not salt.utils.platform.is_windows():
|
||||||
hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv)
|
hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv)
|
||||||
try:
|
|
||||||
mode_local = stat_local[0]
|
|
||||||
except (IndexError, TypeError):
|
|
||||||
mode_local = None
|
|
||||||
else:
|
else:
|
||||||
hash_local = self.hash_file(dest2check, saltenv)
|
hash_local = self.hash_file(dest2check, saltenv)
|
||||||
mode_local = None
|
|
||||||
|
|
||||||
if hash_local == hash_server:
|
if hash_local == hash_server:
|
||||||
return dest2check
|
return dest2check
|
||||||
|
|
|
@ -32,7 +32,7 @@ def loader_context(loader):
|
||||||
class NamedLoaderContext(collections.abc.MutableMapping):
|
class NamedLoaderContext(collections.abc.MutableMapping):
|
||||||
"""
|
"""
|
||||||
A NamedLoaderContext object is injected by the loader providing access to
|
A NamedLoaderContext object is injected by the loader providing access to
|
||||||
Salt's 'magic dunders' (__salt__, __utils__, ect).
|
Salt's 'magic dunders' (__salt__, __utils__, etc).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name, loader_context, default=None):
|
def __init__(self, name, loader_context, default=None):
|
||||||
|
|
|
@ -217,7 +217,7 @@ def _gather_pillar(pillarenv, pillar_override):
|
||||||
"""
|
"""
|
||||||
pillar = salt.pillar.get_pillar(
|
pillar = salt.pillar.get_pillar(
|
||||||
__opts__,
|
__opts__,
|
||||||
__grains__,
|
__grains__.value(),
|
||||||
__opts__["id"],
|
__opts__["id"],
|
||||||
__opts__["saltenv"],
|
__opts__["saltenv"],
|
||||||
pillar_override=pillar_override,
|
pillar_override=pillar_override,
|
||||||
|
|
|
@ -41,7 +41,7 @@ def _gather_pillar(pillarenv, pillar_override):
|
||||||
"""
|
"""
|
||||||
pillar = salt.pillar.get_pillar(
|
pillar = salt.pillar.get_pillar(
|
||||||
__opts__,
|
__opts__,
|
||||||
__grains__,
|
__grains__.value(),
|
||||||
__opts__["id"],
|
__opts__["id"],
|
||||||
__opts__["saltenv"],
|
__opts__["saltenv"],
|
||||||
pillar_override=pillar_override,
|
pillar_override=pillar_override,
|
||||||
|
|
|
@ -113,7 +113,7 @@ def active():
|
||||||
ret = {}
|
ret = {}
|
||||||
# TODO: This command should be extended to collect more information, such as UUID.
|
# TODO: This command should be extended to collect more information, such as UUID.
|
||||||
devices = __salt__["cmd.run_stdout"]("dmsetup ls --target crypt")
|
devices = __salt__["cmd.run_stdout"]("dmsetup ls --target crypt")
|
||||||
out_regex = re.compile(r"(?P<devname>\w+)\W+\((?P<major>\d+), (?P<minor>\d+)\)")
|
out_regex = re.compile(r"(?P<devname>\S+)\s+\((?P<major>\d+), (?P<minor>\d+)\)")
|
||||||
|
|
||||||
log.debug(devices)
|
log.debug(devices)
|
||||||
for line in devices.split("\n"):
|
for line in devices.split("\n"):
|
||||||
|
|
|
@ -6644,14 +6644,6 @@ def script_retcode(
|
||||||
)["retcode"]
|
)["retcode"]
|
||||||
|
|
||||||
|
|
||||||
def _mk_fileclient():
|
|
||||||
"""
|
|
||||||
Create a file client and add it to the context.
|
|
||||||
"""
|
|
||||||
if "cp.fileclient" not in __context__:
|
|
||||||
__context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__)
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_tmp_path():
|
def _generate_tmp_path():
|
||||||
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
|
return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6]))
|
||||||
|
|
||||||
|
@ -6665,11 +6657,10 @@ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""
|
||||||
# reuse it from salt.ssh, however this function should
|
# reuse it from salt.ssh, however this function should
|
||||||
# be somewhere else
|
# be somewhere else
|
||||||
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
|
refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs)
|
||||||
_mk_fileclient()
|
with salt.fileclient.get_file_client(__opts__) as fileclient:
|
||||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
return salt.client.ssh.state.prep_trans_tar(
|
||||||
__context__["cp.fileclient"], chunks, refs, pillar, name
|
fileclient, chunks, refs, pillar, name
|
||||||
)
|
)
|
||||||
return trans_tar
|
|
||||||
|
|
||||||
|
|
||||||
def _compile_state(sls_opts, mods=None):
|
def _compile_state(sls_opts, mods=None):
|
||||||
|
|
|
@ -137,9 +137,10 @@ def write_reg_pol(data, policy_class="Machine"):
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SaltInvocationError: Invalid policy class
|
SaltInvocationError: Invalid policy class
|
||||||
|
CommandExecutionError: On failure
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None
|
bool: True if successful
|
||||||
|
|
||||||
CLI Example:
|
CLI Example:
|
||||||
|
|
||||||
|
@ -175,7 +176,6 @@ def get_value(key, v_name, policy_class="Machine"):
|
||||||
file.
|
file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
key (str): The registry key where the value name resides
|
key (str): The registry key where the value name resides
|
||||||
|
|
||||||
v_name (str): The value name to retrieve
|
v_name (str): The value name to retrieve
|
||||||
|
@ -228,7 +228,6 @@ def get_key(key, policy_class="Machine"):
|
||||||
Get all the values set in a key in the ``Registry.pol`` file.
|
Get all the values set in a key in the ``Registry.pol`` file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
key (str): The registry key where the values reside
|
key (str): The registry key where the values reside
|
||||||
|
|
||||||
policy_class (str): The registry class to read from. Can be one of the
|
policy_class (str): The registry class to read from. Can be one of the
|
||||||
|
@ -278,7 +277,6 @@ def set_value(
|
||||||
style policies. This is the equivalent of setting a policy to ``Enabled``
|
style policies. This is the equivalent of setting a policy to ``Enabled``
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
key (str): The registry key path
|
key (str): The registry key path
|
||||||
|
|
||||||
v_name (str): The registry value name within the key
|
v_name (str): The registry value name within the key
|
||||||
|
@ -305,14 +303,14 @@ def set_value(
|
||||||
|
|
||||||
Default is ``Machine``
|
Default is ``Machine``
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: ``True`` if successful, otherwise ``False``
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SaltInvocationError: Invalid policy_class
|
SaltInvocationError: Invalid policy_class
|
||||||
SaltInvocationError: Invalid v_type
|
SaltInvocationError: Invalid v_type
|
||||||
SaltInvocationError: v_data doesn't match v_type
|
SaltInvocationError: v_data doesn't match v_type
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: ``True`` if successful, otherwise ``False``
|
||||||
|
|
||||||
CLI Example:
|
CLI Example:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
@ -385,7 +383,7 @@ def set_value(
|
||||||
|
|
||||||
write_reg_pol(pol_data)
|
write_reg_pol(pol_data)
|
||||||
|
|
||||||
salt.utils.win_reg.set_value(
|
return salt.utils.win_reg.set_value(
|
||||||
hive=hive,
|
hive=hive,
|
||||||
key=key,
|
key=key,
|
||||||
vname=v_name,
|
vname=v_name,
|
||||||
|
@ -401,7 +399,6 @@ def disable_value(key, v_name, policy_class="machine"):
|
||||||
to ``Disabled`` in the Group Policy editor (``gpedit.msc``)
|
to ``Disabled`` in the Group Policy editor (``gpedit.msc``)
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
key (str): The registry key path
|
key (str): The registry key path
|
||||||
|
|
||||||
v_name (str): The registry value name within the key
|
v_name (str): The registry value name within the key
|
||||||
|
@ -415,13 +412,14 @@ def disable_value(key, v_name, policy_class="machine"):
|
||||||
|
|
||||||
Default is ``Machine``
|
Default is ``Machine``
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SaltInvocationError: Invalid policy_class
|
||||||
|
CommandExecutionError: On failure
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: ``True`` if successful, otherwise ``False``
|
bool: ``True`` if successful, otherwise ``False``
|
||||||
None: If already disabled
|
None: If already disabled
|
||||||
|
|
||||||
Raises:
|
|
||||||
SaltInvocationError: Invalid policy_class
|
|
||||||
|
|
||||||
CLI Example:
|
CLI Example:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
@ -468,7 +466,7 @@ def disable_value(key, v_name, policy_class="machine"):
|
||||||
|
|
||||||
write_reg_pol(pol_data)
|
write_reg_pol(pol_data)
|
||||||
|
|
||||||
salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||||
|
|
||||||
|
|
||||||
def delete_value(key, v_name, policy_class="Machine"):
|
def delete_value(key, v_name, policy_class="Machine"):
|
||||||
|
@ -478,7 +476,6 @@ def delete_value(key, v_name, policy_class="Machine"):
|
||||||
``Not Configured``.
|
``Not Configured``.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
key (str): The registry key path
|
key (str): The registry key path
|
||||||
|
|
||||||
v_name (str): The registry value name within the key
|
v_name (str): The registry value name within the key
|
||||||
|
@ -492,13 +489,14 @@ def delete_value(key, v_name, policy_class="Machine"):
|
||||||
|
|
||||||
Default is ``Machine``
|
Default is ``Machine``
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SaltInvocationError: Invalid policy_class
|
||||||
|
CommandExecutionError: On failure
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: ``True`` if successful, otherwise ``False``
|
bool: ``True`` if successful, otherwise ``False``
|
||||||
None: Key/value not present
|
None: Key/value not present
|
||||||
|
|
||||||
Raises:
|
|
||||||
SaltInvocationError: Invalid policy_class
|
|
||||||
|
|
||||||
CLI Example:
|
CLI Example:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
@ -538,7 +536,7 @@ def delete_value(key, v_name, policy_class="Machine"):
|
||||||
|
|
||||||
write_reg_pol(pol_data)
|
write_reg_pol(pol_data)
|
||||||
|
|
||||||
salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||||
|
|
||||||
|
|
||||||
# This is for testing different settings and verifying that we are writing the
|
# This is for testing different settings and verifying that we are writing the
|
||||||
|
|
|
@ -1298,7 +1298,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose):
|
||||||
successful_verbose[short_path_name] = []
|
successful_verbose[short_path_name] = []
|
||||||
|
|
||||||
|
|
||||||
def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
|
def _get_source_sum(source_hash, file_path, saltenv, verify_ssl=True):
|
||||||
"""
|
"""
|
||||||
Extract the hash sum, whether it is in a remote hash file, or just a string.
|
Extract the hash sum, whether it is in a remote hash file, or just a string.
|
||||||
"""
|
"""
|
||||||
|
@ -1315,7 +1315,7 @@ def _get_source_sum(source_hash, file_path, saltenv, **kwargs):
|
||||||
# The source_hash is a file on a server
|
# The source_hash is a file on a server
|
||||||
try:
|
try:
|
||||||
cached_hash_file = __salt__["cp.cache_file"](
|
cached_hash_file = __salt__["cp.cache_file"](
|
||||||
source_hash, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
|
source_hash, saltenv=saltenv, verify_ssl=verify_ssl
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
log.exception("Failed to cache %s", source_hash, exc_info=exc)
|
log.exception("Failed to cache %s", source_hash, exc_info=exc)
|
||||||
|
@ -1671,7 +1671,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||||
try:
|
try:
|
||||||
cached_file = __salt__["cp.cache_file"](
|
cached_file = __salt__["cp.cache_file"](
|
||||||
cache_file,
|
cache_file,
|
||||||
saltenv,
|
saltenv=saltenv,
|
||||||
verify_ssl=kwargs.get("verify_ssl", True),
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
|
@ -1686,7 +1686,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||||
try:
|
try:
|
||||||
cached_file = __salt__["cp.cache_file"](
|
cached_file = __salt__["cp.cache_file"](
|
||||||
cache_file,
|
cache_file,
|
||||||
saltenv,
|
saltenv=saltenv,
|
||||||
verify_ssl=kwargs.get("verify_ssl", True),
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
|
@ -1706,7 +1706,9 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||||
# It's not cached. Cache it, mate.
|
# It's not cached. Cache it, mate.
|
||||||
try:
|
try:
|
||||||
cached_pkg = __salt__["cp.cache_file"](
|
cached_pkg = __salt__["cp.cache_file"](
|
||||||
installer, saltenv, verify_ssl=kwargs.get("verify_ssl", True)
|
installer,
|
||||||
|
saltenv=saltenv,
|
||||||
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
msg = "Failed to cache {}".format(installer)
|
msg = "Failed to cache {}".format(installer)
|
||||||
|
@ -1730,7 +1732,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||||
try:
|
try:
|
||||||
cached_pkg = __salt__["cp.cache_file"](
|
cached_pkg = __salt__["cp.cache_file"](
|
||||||
installer,
|
installer,
|
||||||
saltenv,
|
saltenv=saltenv,
|
||||||
verify_ssl=kwargs.get("verify_ssl", True),
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
|
@ -1754,7 +1756,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||||
# Compare the hash sums
|
# Compare the hash sums
|
||||||
source_hash = pkginfo[version_num].get("source_hash", False)
|
source_hash = pkginfo[version_num].get("source_hash", False)
|
||||||
if source_hash:
|
if source_hash:
|
||||||
source_sum = _get_source_sum(source_hash, cached_pkg, saltenv, **kwargs)
|
source_sum = _get_source_sum(
|
||||||
|
source_hash,
|
||||||
|
cached_pkg,
|
||||||
|
saltenv=saltenv,
|
||||||
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
|
)
|
||||||
log.debug(
|
log.debug(
|
||||||
"pkg.install: Source %s hash: %s",
|
"pkg.install: Source %s hash: %s",
|
||||||
source_sum["hash_type"],
|
source_sum["hash_type"],
|
||||||
|
@ -2126,7 +2133,7 @@ def remove(name=None, pkgs=None, **kwargs):
|
||||||
try:
|
try:
|
||||||
cached_pkg = __salt__["cp.cache_file"](
|
cached_pkg = __salt__["cp.cache_file"](
|
||||||
uninstaller,
|
uninstaller,
|
||||||
saltenv,
|
saltenv=saltenv,
|
||||||
verify_ssl=kwargs.get("verify_ssl", True),
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
|
@ -2150,7 +2157,7 @@ def remove(name=None, pkgs=None, **kwargs):
|
||||||
try:
|
try:
|
||||||
cached_pkg = __salt__["cp.cache_file"](
|
cached_pkg = __salt__["cp.cache_file"](
|
||||||
uninstaller,
|
uninstaller,
|
||||||
saltenv,
|
saltenv=saltenv,
|
||||||
verify_ssl=kwargs.get("verify_ssl", True),
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except MinionError as exc:
|
except MinionError as exc:
|
||||||
|
|
|
@ -9,7 +9,6 @@ import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
|
||||||
|
|
||||||
import salt.channel.client
|
import salt.channel.client
|
||||||
import salt.ext.tornado.gen
|
import salt.ext.tornado.gen
|
||||||
|
@ -1341,6 +1340,11 @@ class Pillar:
|
||||||
if self._closing:
|
if self._closing:
|
||||||
return
|
return
|
||||||
self._closing = True
|
self._closing = True
|
||||||
|
if self.client:
|
||||||
|
try:
|
||||||
|
self.client.destroy()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
# pylint: disable=W1701
|
# pylint: disable=W1701
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
|
|
|
@ -32,12 +32,10 @@ state:
|
||||||
- state: installed
|
- state: installed
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Import salt modules
|
|
||||||
import salt.fileclient
|
import salt.fileclient
|
||||||
import salt.utils.decorators.path
|
import salt.utils.decorators.path
|
||||||
from salt.utils.decorators import depends
|
from salt.utils.decorators import depends
|
||||||
|
@ -108,13 +106,6 @@ def __virtual__():
|
||||||
return __virtualname__
|
return __virtualname__
|
||||||
|
|
||||||
|
|
||||||
def _client():
|
|
||||||
"""
|
|
||||||
Get a fileclient
|
|
||||||
"""
|
|
||||||
return salt.fileclient.get_file_client(__opts__)
|
|
||||||
|
|
||||||
|
|
||||||
def _changes(plays):
|
def _changes(plays):
|
||||||
"""
|
"""
|
||||||
Find changes in ansible return data
|
Find changes in ansible return data
|
||||||
|
@ -171,7 +162,7 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs=
|
||||||
}
|
}
|
||||||
if git_repo:
|
if git_repo:
|
||||||
if not isinstance(rundir, str) or not os.path.isdir(rundir):
|
if not isinstance(rundir, str) or not os.path.isdir(rundir):
|
||||||
with _client() as client:
|
with salt.fileclient.get_file_client(__opts__) as client:
|
||||||
rundir = client._extrn_path(git_repo, "base")
|
rundir = client._extrn_path(git_repo, "base")
|
||||||
log.trace("rundir set to %s", rundir)
|
log.trace("rundir set to %s", rundir)
|
||||||
if not isinstance(git_kwargs, dict):
|
if not isinstance(git_kwargs, dict):
|
||||||
|
|
|
@ -760,7 +760,9 @@ def _find_install_targets(
|
||||||
err = "Unable to cache {0}: {1}"
|
err = "Unable to cache {0}: {1}"
|
||||||
try:
|
try:
|
||||||
cached_path = __salt__["cp.cache_file"](
|
cached_path = __salt__["cp.cache_file"](
|
||||||
version_string, saltenv=kwargs["saltenv"], **kwargs
|
version_string,
|
||||||
|
saltenv=kwargs["saltenv"],
|
||||||
|
verify_ssl=kwargs.get("verify_ssl", True),
|
||||||
)
|
)
|
||||||
except CommandExecutionError as exc:
|
except CommandExecutionError as exc:
|
||||||
problems.append(err.format(version_string, exc))
|
problems.append(err.format(version_string, exc))
|
||||||
|
|
|
@ -19,6 +19,13 @@ If your service states are running into trouble with init system detection,
|
||||||
please see the :ref:`Overriding Virtual Module Providers <module-provider-override>`
|
please see the :ref:`Overriding Virtual Module Providers <module-provider-override>`
|
||||||
section of Salt's module documentation to work around possible errors.
|
section of Salt's module documentation to work around possible errors.
|
||||||
|
|
||||||
|
For services managed by systemd, the systemd_service module includes a built-in
|
||||||
|
feature to reload the daemon when unit files are changed or extended. This
|
||||||
|
feature is used automatically by the service state and the systemd_service
|
||||||
|
module when running on a systemd minion, so there is no need to set up your own
|
||||||
|
methods of reloading the daemon. If you need to manually reload the daemon for
|
||||||
|
some reason, you can use the :func:`systemd_service.systemctl_reload <salt.modules.systemd_service.systemctl_reload>` function provided by Salt.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The current status of a service is determined by the return code of the init/rc
|
The current status of a service is determined by the return code of the init/rc
|
||||||
script status command. A status return code of 0 it is considered running. Any
|
script status command. A status return code of 0 it is considered running. Any
|
||||||
|
|
|
@ -72,23 +72,6 @@ def __virtual__():
|
||||||
return __virtualname__
|
return __virtualname__
|
||||||
|
|
||||||
|
|
||||||
def _format_changes(changes, key, v_name):
|
|
||||||
"""
|
|
||||||
Reformat the changes dictionary to group new and old together.
|
|
||||||
"""
|
|
||||||
new_changes = {"new": {}, "old": {}}
|
|
||||||
for item in changes:
|
|
||||||
if changes[item]["new"]:
|
|
||||||
new_changes["new"][item] = changes[item]["new"]
|
|
||||||
new_changes["new"]["key"] = key
|
|
||||||
new_changes["new"]["name"] = v_name
|
|
||||||
if changes[item]["old"]:
|
|
||||||
new_changes["old"][item] = changes[item]["old"]
|
|
||||||
new_changes["old"]["key"] = key
|
|
||||||
new_changes["old"]["name"] = v_name
|
|
||||||
return new_changes
|
|
||||||
|
|
||||||
|
|
||||||
def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"):
|
def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"):
|
||||||
r"""
|
r"""
|
||||||
Ensure a registry setting is present in the Registry.pol file.
|
Ensure a registry setting is present in the Registry.pol file.
|
||||||
|
@ -170,12 +153,16 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine")
|
||||||
key=key, v_name=name, policy_class=policy_class
|
key=key, v_name=name, policy_class=policy_class
|
||||||
)
|
)
|
||||||
|
|
||||||
changes = salt.utils.data.compare_dicts(old, new)
|
if str(new["data"]) == v_data and new["type"] == v_type:
|
||||||
|
ret["comment"] = "Registry.pol value has been set"
|
||||||
|
ret["result"] = True
|
||||||
|
else:
|
||||||
|
ret["comment"] = "Failed to set Registry.pol value"
|
||||||
|
|
||||||
|
changes = salt.utils.data.recursive_diff(old, new)
|
||||||
|
|
||||||
if changes:
|
if changes:
|
||||||
ret["comment"] = "Registry.pol value has been set"
|
ret["changes"] = changes
|
||||||
ret["changes"] = _format_changes(changes, key, name)
|
|
||||||
ret["result"] = True
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -238,12 +225,16 @@ def value_disabled(name, key, policy_class="Machine"):
|
||||||
key=key, v_name=name, policy_class=policy_class
|
key=key, v_name=name, policy_class=policy_class
|
||||||
)
|
)
|
||||||
|
|
||||||
changes = salt.utils.data.compare_dicts(old, new)
|
if "**del." in str(new["data"]) and new["type"] == "REG_SZ":
|
||||||
|
ret["comment"] = "Registry.pol value disabled"
|
||||||
|
ret["result"] = True
|
||||||
|
else:
|
||||||
|
ret["comment"] = "Failed to disable Registry.pol value"
|
||||||
|
|
||||||
|
changes = salt.utils.data.recursive_diff(old, new)
|
||||||
|
|
||||||
if changes:
|
if changes:
|
||||||
ret["comment"] = "Registry.pol value enabled"
|
ret["changes"] = changes
|
||||||
ret["changes"] = _format_changes(changes, key, name)
|
|
||||||
ret["result"] = True
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
@ -306,14 +297,17 @@ def value_absent(name, key, policy_class="Machine"):
|
||||||
key=key, v_name=name, policy_class=policy_class
|
key=key, v_name=name, policy_class=policy_class
|
||||||
)
|
)
|
||||||
|
|
||||||
if new is None:
|
if not new:
|
||||||
|
ret["comment"] = "Registry.pol value deleted"
|
||||||
|
ret["result"] = True
|
||||||
|
# We're setting this here in case new is None
|
||||||
new = {}
|
new = {}
|
||||||
|
else:
|
||||||
|
ret["comment"] = "Failed to delete Registry.pol value"
|
||||||
|
|
||||||
changes = salt.utils.data.compare_dicts(old, new)
|
changes = salt.utils.data.recursive_diff(old, new)
|
||||||
|
|
||||||
if changes:
|
if changes:
|
||||||
ret["comment"] = "Registry.pol value deleted"
|
ret["changes"] = changes
|
||||||
ret["changes"] = _format_changes(changes, key, name)
|
|
||||||
ret["result"] = True
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
|
@ -131,7 +131,7 @@ class SyncWrapper:
|
||||||
result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
|
result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs))
|
||||||
results.append(True)
|
results.append(True)
|
||||||
results.append(result)
|
results.append(result)
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
results.append(False)
|
results.append(False)
|
||||||
results.append(sys.exc_info())
|
results.append(sys.exc_info())
|
||||||
|
|
||||||
|
|
|
@ -1202,6 +1202,16 @@ def wait_for_passwd(
|
||||||
time.sleep(trysleep)
|
time.sleep(trysleep)
|
||||||
|
|
||||||
|
|
||||||
|
def _format_master_param(master):
|
||||||
|
"""
|
||||||
|
If the master is a list, we need to convert it to a comma delimited string
|
||||||
|
Otherwise, we just return master
|
||||||
|
"""
|
||||||
|
if isinstance(master, list):
|
||||||
|
return ",".join(master)
|
||||||
|
return master
|
||||||
|
|
||||||
|
|
||||||
def deploy_windows(
|
def deploy_windows(
|
||||||
host,
|
host,
|
||||||
port=445,
|
port=445,
|
||||||
|
@ -1337,17 +1347,18 @@ def deploy_windows(
|
||||||
conn=smb_conn,
|
conn=smb_conn,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cmd = "c:\\salttemp\\{}".format(installer)
|
||||||
|
args = [
|
||||||
|
"/S",
|
||||||
|
"/master={}".format(_format_master_param(master)),
|
||||||
|
"/minion-name={}".format(name),
|
||||||
|
]
|
||||||
|
|
||||||
if use_winrm:
|
if use_winrm:
|
||||||
winrm_cmd(
|
winrm_cmd(winrm_session, cmd, args)
|
||||||
winrm_session,
|
|
||||||
"c:\\salttemp\\{}".format(installer),
|
|
||||||
["/S", "/master={}".format(master), "/minion-name={}".format(name)],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
cmd = "c:\\salttemp\\{}".format(installer)
|
|
||||||
args = "/S /master={} /minion-name={}".format(master, name)
|
|
||||||
stdout, stderr, ret_code = run_psexec_command(
|
stdout, stderr, ret_code = run_psexec_command(
|
||||||
cmd, args, host, username, password
|
cmd, " ".join(args), host, username, password
|
||||||
)
|
)
|
||||||
|
|
||||||
if ret_code != 0:
|
if ret_code != 0:
|
||||||
|
|
|
@ -58,19 +58,6 @@ class SaltCacheLoader(BaseLoader):
|
||||||
and only loaded once per loader instance.
|
and only loaded once per loader instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_cached_pillar_client = None
|
|
||||||
_cached_client = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def shutdown(cls):
|
|
||||||
for attr in ("_cached_client", "_cached_pillar_client"):
|
|
||||||
client = getattr(cls, attr, None)
|
|
||||||
if client is not None:
|
|
||||||
# PillarClient and LocalClient objects do not have a destroy method
|
|
||||||
if hasattr(client, "destroy"):
|
|
||||||
client.destroy()
|
|
||||||
setattr(cls, attr, None)
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
opts,
|
opts,
|
||||||
|
@ -93,8 +80,7 @@ class SaltCacheLoader(BaseLoader):
|
||||||
log.debug("Jinja search path: %s", self.searchpath)
|
log.debug("Jinja search path: %s", self.searchpath)
|
||||||
self.cached = []
|
self.cached = []
|
||||||
self._file_client = _file_client
|
self._file_client = _file_client
|
||||||
# Instantiate the fileclient
|
self._close_file_client = _file_client is None
|
||||||
self.file_client()
|
|
||||||
|
|
||||||
def file_client(self):
|
def file_client(self):
|
||||||
"""
|
"""
|
||||||
|
@ -108,18 +94,10 @@ class SaltCacheLoader(BaseLoader):
|
||||||
or not hasattr(self._file_client, "opts")
|
or not hasattr(self._file_client, "opts")
|
||||||
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
|
or self._file_client.opts["file_roots"] != self.opts["file_roots"]
|
||||||
):
|
):
|
||||||
attr = "_cached_pillar_client" if self.pillar_rend else "_cached_client"
|
self._file_client = salt.fileclient.get_file_client(
|
||||||
cached_client = getattr(self, attr, None)
|
self.opts, self.pillar_rend
|
||||||
if (
|
)
|
||||||
cached_client is None
|
self._close_file_client = True
|
||||||
or not hasattr(cached_client, "opts")
|
|
||||||
or cached_client.opts["file_roots"] != self.opts["file_roots"]
|
|
||||||
):
|
|
||||||
cached_client = salt.fileclient.get_file_client(
|
|
||||||
self.opts, self.pillar_rend
|
|
||||||
)
|
|
||||||
setattr(SaltCacheLoader, attr, cached_client)
|
|
||||||
self._file_client = cached_client
|
|
||||||
return self._file_client
|
return self._file_client
|
||||||
|
|
||||||
def cache_file(self, template):
|
def cache_file(self, template):
|
||||||
|
@ -221,6 +199,27 @@ class SaltCacheLoader(BaseLoader):
|
||||||
# there is no template file within searchpaths
|
# there is no template file within searchpaths
|
||||||
raise TemplateNotFound(template)
|
raise TemplateNotFound(template)
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
if self._close_file_client is False:
|
||||||
|
return
|
||||||
|
if self._file_client is None:
|
||||||
|
return
|
||||||
|
file_client = self._file_client
|
||||||
|
self._file_client = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
file_client.destroy()
|
||||||
|
except AttributeError:
|
||||||
|
# PillarClient and LocalClient objects do not have a destroy method
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.file_client()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *args):
|
||||||
|
self.destroy()
|
||||||
|
|
||||||
|
|
||||||
class PrintableDict(OrderedDict):
|
class PrintableDict(OrderedDict):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -97,3 +97,10 @@ if HAS_MAKO:
|
||||||
self.cache[fpath] = self.file_client().get_file(
|
self.cache[fpath] = self.file_client().get_file(
|
||||||
fpath, "", True, self.saltenv
|
fpath, "", True, self.saltenv
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
if self.client:
|
||||||
|
try:
|
||||||
|
self.client.destroy()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
|
@ -362,163 +362,169 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
|
||||||
elif tmplstr.endswith("\n"):
|
elif tmplstr.endswith("\n"):
|
||||||
newline = "\n"
|
newline = "\n"
|
||||||
|
|
||||||
if not saltenv:
|
|
||||||
if tmplpath:
|
|
||||||
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
|
|
||||||
else:
|
|
||||||
loader = salt.utils.jinja.SaltCacheLoader(
|
|
||||||
opts,
|
|
||||||
saltenv,
|
|
||||||
pillar_rend=context.get("_pillar_rend", False),
|
|
||||||
_file_client=file_client,
|
|
||||||
)
|
|
||||||
|
|
||||||
env_args = {"extensions": [], "loader": loader}
|
|
||||||
|
|
||||||
if hasattr(jinja2.ext, "with_"):
|
|
||||||
env_args["extensions"].append("jinja2.ext.with_")
|
|
||||||
if hasattr(jinja2.ext, "do"):
|
|
||||||
env_args["extensions"].append("jinja2.ext.do")
|
|
||||||
if hasattr(jinja2.ext, "loopcontrols"):
|
|
||||||
env_args["extensions"].append("jinja2.ext.loopcontrols")
|
|
||||||
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
|
|
||||||
|
|
||||||
opt_jinja_env = opts.get("jinja_env", {})
|
|
||||||
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
|
|
||||||
|
|
||||||
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
|
|
||||||
opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
|
|
||||||
|
|
||||||
# Pass through trim_blocks and lstrip_blocks Jinja parameters
|
|
||||||
# trim_blocks removes newlines around Jinja blocks
|
|
||||||
# lstrip_blocks strips tabs and spaces from the beginning of
|
|
||||||
# line to the start of a block.
|
|
||||||
if opts.get("jinja_trim_blocks", False):
|
|
||||||
log.debug("Jinja2 trim_blocks is enabled")
|
|
||||||
log.warning(
|
|
||||||
"jinja_trim_blocks is deprecated and will be removed in a future release,"
|
|
||||||
" please use jinja_env and/or jinja_sls_env instead"
|
|
||||||
)
|
|
||||||
opt_jinja_env["trim_blocks"] = True
|
|
||||||
opt_jinja_sls_env["trim_blocks"] = True
|
|
||||||
if opts.get("jinja_lstrip_blocks", False):
|
|
||||||
log.debug("Jinja2 lstrip_blocks is enabled")
|
|
||||||
log.warning(
|
|
||||||
"jinja_lstrip_blocks is deprecated and will be removed in a future release,"
|
|
||||||
" please use jinja_env and/or jinja_sls_env instead"
|
|
||||||
)
|
|
||||||
opt_jinja_env["lstrip_blocks"] = True
|
|
||||||
opt_jinja_sls_env["lstrip_blocks"] = True
|
|
||||||
|
|
||||||
def opt_jinja_env_helper(opts, optname):
|
|
||||||
for k, v in opts.items():
|
|
||||||
k = k.lower()
|
|
||||||
if hasattr(jinja2.defaults, k.upper()):
|
|
||||||
log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
|
|
||||||
env_args[k] = v
|
|
||||||
else:
|
|
||||||
log.warning("Jinja2 environment %s is not recognized", k)
|
|
||||||
|
|
||||||
if "sls" in context and context["sls"] != "":
|
|
||||||
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
|
|
||||||
else:
|
|
||||||
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
|
|
||||||
|
|
||||||
if opts.get("allow_undefined", False):
|
|
||||||
jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
|
|
||||||
else:
|
|
||||||
jinja_env = jinja2.sandbox.SandboxedEnvironment(
|
|
||||||
undefined=jinja2.StrictUndefined, **env_args
|
|
||||||
)
|
|
||||||
|
|
||||||
indent_filter = jinja_env.filters.get("indent")
|
|
||||||
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
|
|
||||||
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
|
|
||||||
if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
|
|
||||||
# Use the existing indent filter on Jinja versions where it's not broken
|
|
||||||
jinja_env.filters["indent"] = indent_filter
|
|
||||||
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
|
|
||||||
|
|
||||||
# globals
|
|
||||||
jinja_env.globals["odict"] = OrderedDict
|
|
||||||
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
|
|
||||||
|
|
||||||
jinja_env.tests["list"] = salt.utils.data.is_list
|
|
||||||
|
|
||||||
decoded_context = {}
|
|
||||||
for key, value in context.items():
|
|
||||||
if not isinstance(value, str):
|
|
||||||
if isinstance(value, NamedLoaderContext):
|
|
||||||
decoded_context[key] = value.value()
|
|
||||||
else:
|
|
||||||
decoded_context[key] = value
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
decoded_context[key] = salt.utils.stringutils.to_unicode(
|
|
||||||
value, encoding=SLS_ENCODING
|
|
||||||
)
|
|
||||||
except UnicodeDecodeError as ex:
|
|
||||||
log.debug(
|
|
||||||
"Failed to decode using default encoding (%s), trying system encoding",
|
|
||||||
SLS_ENCODING,
|
|
||||||
)
|
|
||||||
decoded_context[key] = salt.utils.data.decode(value)
|
|
||||||
|
|
||||||
jinja_env.globals.update(decoded_context)
|
|
||||||
try:
|
try:
|
||||||
template = jinja_env.from_string(tmplstr)
|
if not saltenv:
|
||||||
output = template.render(**decoded_context)
|
if tmplpath:
|
||||||
except jinja2.exceptions.UndefinedError as exc:
|
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
|
||||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
|
||||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
|
||||||
if not line:
|
|
||||||
tmplstr = ""
|
|
||||||
raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
|
|
||||||
except (
|
|
||||||
jinja2.exceptions.TemplateRuntimeError,
|
|
||||||
jinja2.exceptions.TemplateSyntaxError,
|
|
||||||
jinja2.exceptions.SecurityError,
|
|
||||||
) as exc:
|
|
||||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
|
||||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
|
||||||
if not line:
|
|
||||||
tmplstr = ""
|
|
||||||
raise SaltRenderError(
|
|
||||||
"Jinja syntax error: {}{}".format(exc, out), line, tmplstr
|
|
||||||
)
|
|
||||||
except (SaltInvocationError, CommandExecutionError) as exc:
|
|
||||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
|
||||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
|
||||||
if not line:
|
|
||||||
tmplstr = ""
|
|
||||||
raise SaltRenderError(
|
|
||||||
"Problem running salt function in Jinja template: {}{}".format(exc, out),
|
|
||||||
line,
|
|
||||||
tmplstr,
|
|
||||||
)
|
|
||||||
except Exception as exc: # pylint: disable=broad-except
|
|
||||||
tracestr = traceback.format_exc()
|
|
||||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
|
||||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
|
||||||
if not line:
|
|
||||||
tmplstr = ""
|
|
||||||
else:
|
else:
|
||||||
tmplstr += "\n{}".format(tracestr)
|
loader = salt.utils.jinja.SaltCacheLoader(
|
||||||
log.debug("Jinja Error")
|
opts,
|
||||||
log.debug("Exception:", exc_info=True)
|
saltenv,
|
||||||
log.debug("Out: %s", out)
|
pillar_rend=context.get("_pillar_rend", False),
|
||||||
log.debug("Line: %s", line)
|
_file_client=file_client,
|
||||||
log.debug("TmplStr: %s", tmplstr)
|
)
|
||||||
log.debug("TraceStr: %s", tracestr)
|
|
||||||
|
|
||||||
raise SaltRenderError(
|
env_args = {"extensions": [], "loader": loader}
|
||||||
"Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
|
|
||||||
|
if hasattr(jinja2.ext, "with_"):
|
||||||
|
env_args["extensions"].append("jinja2.ext.with_")
|
||||||
|
if hasattr(jinja2.ext, "do"):
|
||||||
|
env_args["extensions"].append("jinja2.ext.do")
|
||||||
|
if hasattr(jinja2.ext, "loopcontrols"):
|
||||||
|
env_args["extensions"].append("jinja2.ext.loopcontrols")
|
||||||
|
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
|
||||||
|
|
||||||
|
opt_jinja_env = opts.get("jinja_env", {})
|
||||||
|
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
|
||||||
|
|
||||||
|
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
|
||||||
|
opt_jinja_sls_env = (
|
||||||
|
opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Pass through trim_blocks and lstrip_blocks Jinja parameters
|
||||||
|
# trim_blocks removes newlines around Jinja blocks
|
||||||
|
# lstrip_blocks strips tabs and spaces from the beginning of
|
||||||
|
# line to the start of a block.
|
||||||
|
if opts.get("jinja_trim_blocks", False):
|
||||||
|
log.debug("Jinja2 trim_blocks is enabled")
|
||||||
|
log.warning(
|
||||||
|
"jinja_trim_blocks is deprecated and will be removed in a future release,"
|
||||||
|
" please use jinja_env and/or jinja_sls_env instead"
|
||||||
|
)
|
||||||
|
opt_jinja_env["trim_blocks"] = True
|
||||||
|
opt_jinja_sls_env["trim_blocks"] = True
|
||||||
|
if opts.get("jinja_lstrip_blocks", False):
|
||||||
|
log.debug("Jinja2 lstrip_blocks is enabled")
|
||||||
|
log.warning(
|
||||||
|
"jinja_lstrip_blocks is deprecated and will be removed in a future release,"
|
||||||
|
" please use jinja_env and/or jinja_sls_env instead"
|
||||||
|
)
|
||||||
|
opt_jinja_env["lstrip_blocks"] = True
|
||||||
|
opt_jinja_sls_env["lstrip_blocks"] = True
|
||||||
|
|
||||||
|
def opt_jinja_env_helper(opts, optname):
|
||||||
|
for k, v in opts.items():
|
||||||
|
k = k.lower()
|
||||||
|
if hasattr(jinja2.defaults, k.upper()):
|
||||||
|
log.debug(
|
||||||
|
"Jinja2 environment %s was set to %s by %s", k, v, optname
|
||||||
|
)
|
||||||
|
env_args[k] = v
|
||||||
|
else:
|
||||||
|
log.warning("Jinja2 environment %s is not recognized", k)
|
||||||
|
|
||||||
|
if "sls" in context and context["sls"] != "":
|
||||||
|
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
|
||||||
|
else:
|
||||||
|
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
|
||||||
|
|
||||||
|
if opts.get("allow_undefined", False):
|
||||||
|
jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args)
|
||||||
|
else:
|
||||||
|
jinja_env = jinja2.sandbox.SandboxedEnvironment(
|
||||||
|
undefined=jinja2.StrictUndefined, **env_args
|
||||||
|
)
|
||||||
|
|
||||||
|
indent_filter = jinja_env.filters.get("indent")
|
||||||
|
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
|
||||||
|
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
|
||||||
|
if salt.utils.jinja.JINJA_VERSION >= Version("2.11"):
|
||||||
|
# Use the existing indent filter on Jinja versions where it's not broken
|
||||||
|
jinja_env.filters["indent"] = indent_filter
|
||||||
|
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
|
||||||
|
|
||||||
|
# globals
|
||||||
|
jinja_env.globals["odict"] = OrderedDict
|
||||||
|
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
|
||||||
|
|
||||||
|
jinja_env.tests["list"] = salt.utils.data.is_list
|
||||||
|
|
||||||
|
decoded_context = {}
|
||||||
|
for key, value in context.items():
|
||||||
|
if not isinstance(value, str):
|
||||||
|
if isinstance(value, NamedLoaderContext):
|
||||||
|
decoded_context[key] = value.value()
|
||||||
|
else:
|
||||||
|
decoded_context[key] = value
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
decoded_context[key] = salt.utils.stringutils.to_unicode(
|
||||||
|
value, encoding=SLS_ENCODING
|
||||||
|
)
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
log.debug(
|
||||||
|
"Failed to decode using default encoding (%s), trying system encoding",
|
||||||
|
SLS_ENCODING,
|
||||||
|
)
|
||||||
|
decoded_context[key] = salt.utils.data.decode(value)
|
||||||
|
|
||||||
|
jinja_env.globals.update(decoded_context)
|
||||||
|
try:
|
||||||
|
template = jinja_env.from_string(tmplstr)
|
||||||
|
output = template.render(**decoded_context)
|
||||||
|
except jinja2.exceptions.UndefinedError as exc:
|
||||||
|
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||||
|
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||||
|
if not line:
|
||||||
|
tmplstr = ""
|
||||||
|
raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr)
|
||||||
|
except (
|
||||||
|
jinja2.exceptions.TemplateRuntimeError,
|
||||||
|
jinja2.exceptions.TemplateSyntaxError,
|
||||||
|
jinja2.exceptions.SecurityError,
|
||||||
|
) as exc:
|
||||||
|
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||||
|
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||||
|
if not line:
|
||||||
|
tmplstr = ""
|
||||||
|
raise SaltRenderError(
|
||||||
|
"Jinja syntax error: {}{}".format(exc, out), line, tmplstr
|
||||||
|
)
|
||||||
|
except (SaltInvocationError, CommandExecutionError) as exc:
|
||||||
|
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||||
|
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||||
|
if not line:
|
||||||
|
tmplstr = ""
|
||||||
|
raise SaltRenderError(
|
||||||
|
"Problem running salt function in Jinja template: {}{}".format(
|
||||||
|
exc, out
|
||||||
|
),
|
||||||
|
line,
|
||||||
|
tmplstr,
|
||||||
|
)
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
tracestr = traceback.format_exc()
|
||||||
|
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||||
|
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||||
|
if not line:
|
||||||
|
tmplstr = ""
|
||||||
|
else:
|
||||||
|
tmplstr += "\n{}".format(tracestr)
|
||||||
|
log.debug("Jinja Error")
|
||||||
|
log.debug("Exception:", exc_info=True)
|
||||||
|
log.debug("Out: %s", out)
|
||||||
|
log.debug("Line: %s", line)
|
||||||
|
log.debug("TmplStr: %s", tmplstr)
|
||||||
|
log.debug("TraceStr: %s", tracestr)
|
||||||
|
|
||||||
|
raise SaltRenderError(
|
||||||
|
"Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr
|
||||||
|
)
|
||||||
finally:
|
finally:
|
||||||
if loader and hasattr(loader, "_file_client"):
|
if loader and isinstance(loader, salt.utils.jinja.SaltCacheLoader):
|
||||||
if hasattr(loader._file_client, "destroy"):
|
loader.destroy()
|
||||||
loader._file_client.destroy()
|
|
||||||
|
|
||||||
# Workaround a bug in Jinja that removes the final newline
|
# Workaround a bug in Jinja that removes the final newline
|
||||||
# (https://github.com/mitsuhiko/jinja2/issues/75)
|
# (https://github.com/mitsuhiko/jinja2/issues/75)
|
||||||
|
@ -569,9 +575,8 @@ def render_mako_tmpl(tmplstr, context, tmplpath=None):
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
raise SaltRenderError(mako.exceptions.text_error_template().render())
|
raise SaltRenderError(mako.exceptions.text_error_template().render())
|
||||||
finally:
|
finally:
|
||||||
if lookup and hasattr(lookup, "_file_client"):
|
if lookup and isinstance(lookup, SaltMakoTemplateLookup):
|
||||||
if hasattr(lookup._file_client, "destroy"):
|
lookup.destroy()
|
||||||
lookup._file_client.destroy()
|
|
||||||
|
|
||||||
|
|
||||||
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
|
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
|
||||||
|
|
|
@ -67,13 +67,11 @@ def search_reg_pol(search_string, policy_data):
|
||||||
gpt.ini
|
gpt.ini
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
search_string (str): The string to search for
|
search_string (str): The string to search for
|
||||||
|
|
||||||
policy_data (str): The data to be searched
|
policy_data (str): The data to be searched
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
bool: ``True`` if the regex search_string is found, otherwise ``False``
|
bool: ``True`` if the regex search_string is found, otherwise ``False``
|
||||||
"""
|
"""
|
||||||
if policy_data:
|
if policy_data:
|
||||||
|
@ -91,7 +89,6 @@ def read_reg_pol_file(reg_pol_path):
|
||||||
Helper function to read the content of the Registry.pol file
|
Helper function to read the content of the Registry.pol file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
reg_pol_path (str): The path to the Registry.pol file
|
reg_pol_path (str): The path to the Registry.pol file
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -120,7 +117,6 @@ def write_reg_pol_data(
|
||||||
to be processed
|
to be processed
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|
||||||
data_to_write (bytes): Data to write into the user/machine registry.pol
|
data_to_write (bytes): Data to write into the user/machine registry.pol
|
||||||
file
|
file
|
||||||
|
|
||||||
|
@ -132,6 +128,12 @@ def write_reg_pol_data(
|
||||||
gpt_extension_guid (str): ADMX registry extension guid for the class
|
gpt_extension_guid (str): ADMX registry extension guid for the class
|
||||||
|
|
||||||
gpt_ini_path (str): The path to the gpt.ini file
|
gpt_ini_path (str): The path to the gpt.ini file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if successful
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
CommandExecutionError: On failure
|
||||||
"""
|
"""
|
||||||
# Write Registry.pol file
|
# Write Registry.pol file
|
||||||
if not os.path.exists(policy_file_path):
|
if not os.path.exists(policy_file_path):
|
||||||
|
@ -254,6 +256,7 @@ def write_reg_pol_data(
|
||||||
)
|
)
|
||||||
log.exception(msg)
|
log.exception(msg)
|
||||||
raise CommandExecutionError(msg)
|
raise CommandExecutionError(msg)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reg_pol_to_dict(policy_data):
|
def reg_pol_to_dict(policy_data):
|
||||||
|
@ -273,6 +276,12 @@ def reg_pol_to_dict(policy_data):
|
||||||
# https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-gpreg/5c092c22-bf6b-4e7f-b180-b20743d368f5
|
# https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-gpreg/5c092c22-bf6b-4e7f-b180-b20743d368f5
|
||||||
|
|
||||||
reg_pol_header = REG_POL_HEADER.encode("utf-16-le")
|
reg_pol_header = REG_POL_HEADER.encode("utf-16-le")
|
||||||
|
|
||||||
|
# If policy_data is None, that means the Registry.pol file is missing
|
||||||
|
# So, we'll create it
|
||||||
|
if policy_data is None:
|
||||||
|
policy_data = reg_pol_header
|
||||||
|
|
||||||
if not policy_data.startswith(reg_pol_header):
|
if not policy_data.startswith(reg_pol_header):
|
||||||
msg = "LGPO_REG Util: Invalid Header. Registry.pol may be corrupt"
|
msg = "LGPO_REG Util: Invalid Header. Registry.pol may be corrupt"
|
||||||
raise CommandExecutionError(msg)
|
raise CommandExecutionError(msg)
|
||||||
|
|
37
tests/pytests/integration/master/test_payload.py
Normal file
37
tests/pytests/integration/master/test_payload.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
"""
|
||||||
|
Tests for payload
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.slow_test
|
||||||
|
@pytest.mark.skip_if_not_root
|
||||||
|
@pytest.mark.skip_on_windows
|
||||||
|
@pytest.mark.skip_on_darwin
|
||||||
|
def test_payload_no_exception(salt_cli, salt_master, salt_minion):
|
||||||
|
"""
|
||||||
|
Test to confirm that no exception is thrown with the jinja file
|
||||||
|
when executed on the minion
|
||||||
|
"""
|
||||||
|
test_set_hostname = """
|
||||||
|
{%- set host = pillar.get("hostname", "UNKNOWN") %}
|
||||||
|
{%- if host == 'UNKNOWN' %}
|
||||||
|
{{ raise("Unsupported UNKNOWN hostname") }}
|
||||||
|
{%- else %}
|
||||||
|
hostnamectl set-hostname {{ host }}
|
||||||
|
{%- endif %}
|
||||||
|
"""
|
||||||
|
with salt_master.state_tree.base.temp_file("set_hostname.j2", test_set_hostname):
|
||||||
|
|
||||||
|
ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id)
|
||||||
|
assert ret.returncode == 0
|
||||||
|
assert ret.data is True
|
||||||
|
|
||||||
|
ret = salt_cli.run(
|
||||||
|
"cmd.script",
|
||||||
|
"salt://set_hostname.j2",
|
||||||
|
"template=jinja",
|
||||||
|
pillar={"hostname": "test"},
|
||||||
|
minion_tgt=salt_minion.id,
|
||||||
|
)
|
||||||
|
assert "AttributeError:" not in ret.stdout
|
40
tests/pytests/integration/states/test_include.py
Normal file
40
tests/pytests/integration/states/test_include.py
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
"""
|
||||||
|
Integration tests for the jinja includes in states
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.slow_test
|
||||||
|
def test_issue_64111(salt_master, salt_minion, salt_call_cli):
|
||||||
|
# This needs to be an integration test. A functional test does not trigger
|
||||||
|
# the issue fixed.
|
||||||
|
|
||||||
|
macros_jinja = """
|
||||||
|
{% macro a_jinja_macro(arg) -%}
|
||||||
|
{{ arg }}
|
||||||
|
{%- endmacro %}
|
||||||
|
"""
|
||||||
|
|
||||||
|
init_sls = """
|
||||||
|
include:
|
||||||
|
- common.file1
|
||||||
|
"""
|
||||||
|
|
||||||
|
file1_sls = """
|
||||||
|
{% from 'common/macros.jinja' import a_jinja_macro with context %}
|
||||||
|
|
||||||
|
a state id:
|
||||||
|
cmd.run:
|
||||||
|
- name: echo {{ a_jinja_macro("hello world") }}
|
||||||
|
"""
|
||||||
|
tf = salt_master.state_tree.base.temp_file
|
||||||
|
|
||||||
|
with tf("common/macros.jinja", macros_jinja):
|
||||||
|
with tf("common/init.sls", init_sls):
|
||||||
|
with tf("common/file1.sls", file1_sls):
|
||||||
|
ret = salt_call_cli.run("state.apply", "common")
|
||||||
|
assert ret.returncode == 0
|
38
tests/pytests/unit/modules/test_cryptdev.py
Normal file
38
tests/pytests/unit/modules/test_cryptdev.py
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import salt.modules.cryptdev as cryptdev
|
||||||
|
from tests.support.mock import MagicMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def configure_loader_modules(minion_opts):
|
||||||
|
return {cryptdev: {"__opts__": minion_opts}}
|
||||||
|
|
||||||
|
|
||||||
|
def test_active(caplog):
|
||||||
|
with patch.dict(
|
||||||
|
cryptdev.__salt__,
|
||||||
|
{"cmd.run_stdout": MagicMock(return_value="my-device (253, 1)\n")},
|
||||||
|
):
|
||||||
|
assert cryptdev.active() == {
|
||||||
|
"my-device": {
|
||||||
|
"devname": "my-device",
|
||||||
|
"major": "253",
|
||||||
|
"minor": "1",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# debien output when no devices setup.
|
||||||
|
with patch.dict(cryptdev.__salt__, {"cmd.run_stdout": MagicMock(return_value="")}):
|
||||||
|
caplog.clear()
|
||||||
|
assert cryptdev.active() == {}
|
||||||
|
assert "dmsetup output does not match expected format" in caplog.text
|
||||||
|
|
||||||
|
# centos output of dmsetup when no devices setup.
|
||||||
|
with patch.dict(
|
||||||
|
cryptdev.__salt__,
|
||||||
|
{"cmd.run_stdout": MagicMock(return_value="No devices found")},
|
||||||
|
):
|
||||||
|
caplog.clear()
|
||||||
|
assert cryptdev.active() == {}
|
||||||
|
assert "dmsetup output does not match expected format" in caplog.text
|
|
@ -152,12 +152,16 @@ def test_get_key_invalid_policy_class():
|
||||||
|
|
||||||
|
|
||||||
def test_set_value(empty_reg_pol):
|
def test_set_value(empty_reg_pol):
|
||||||
expected = {"data": 1, "type": "REG_DWORD"}
|
|
||||||
key = "SOFTWARE\\MyKey"
|
key = "SOFTWARE\\MyKey"
|
||||||
v_name = "MyValue"
|
v_name = "MyValue"
|
||||||
lgpo_reg.set_value(key=key, v_name=v_name, v_data="1")
|
# Test command return
|
||||||
|
result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1")
|
||||||
|
assert result is True
|
||||||
|
# Test value actually set in Registry.pol
|
||||||
|
expected = {"data": 1, "type": "REG_DWORD"}
|
||||||
result = lgpo_reg.get_value(key=key, v_name=v_name)
|
result = lgpo_reg.get_value(key=key, v_name=v_name)
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
# Test that the registry value has been set
|
||||||
expected = {
|
expected = {
|
||||||
"hive": "HKLM",
|
"hive": "HKLM",
|
||||||
"key": key,
|
"key": key,
|
||||||
|
@ -249,14 +253,18 @@ def test_set_value_invalid_reg_dword():
|
||||||
|
|
||||||
|
|
||||||
def test_disable_value(reg_pol):
|
def test_disable_value(reg_pol):
|
||||||
|
key = "SOFTWARE\\MyKey1"
|
||||||
|
# Test that the command completed successfully
|
||||||
|
result = lgpo_reg.disable_value(key=key, v_name="MyValue1")
|
||||||
|
assert result is True
|
||||||
|
# Test that the value was actually set in Registry.pol
|
||||||
expected = {
|
expected = {
|
||||||
"**del.MyValue1": {"data": " ", "type": "REG_SZ"},
|
"**del.MyValue1": {"data": " ", "type": "REG_SZ"},
|
||||||
"**del.MyValue2": {"data": " ", "type": "REG_SZ"},
|
"**del.MyValue2": {"data": " ", "type": "REG_SZ"},
|
||||||
}
|
}
|
||||||
key = "SOFTWARE\\MyKey1"
|
|
||||||
lgpo_reg.disable_value(key=key, v_name="MyValue1")
|
|
||||||
result = lgpo_reg.get_key(key=key)
|
result = lgpo_reg.get_key(key=key)
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
# Test that the registry value has been removed
|
||||||
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue1")
|
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue1")
|
||||||
assert result is False
|
assert result is False
|
||||||
|
|
||||||
|
@ -283,16 +291,20 @@ def test_disable_value_invalid_policy_class():
|
||||||
|
|
||||||
|
|
||||||
def test_delete_value_existing(reg_pol):
|
def test_delete_value_existing(reg_pol):
|
||||||
|
key = "SOFTWARE\\MyKey1"
|
||||||
|
# Test that the command completes successfully
|
||||||
|
result = lgpo_reg.delete_value(key=key, v_name="MyValue1")
|
||||||
|
assert result is True
|
||||||
|
# Test that the value is actually removed from Registry.pol
|
||||||
expected = {
|
expected = {
|
||||||
"**del.MyValue2": {
|
"**del.MyValue2": {
|
||||||
"data": " ",
|
"data": " ",
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
key = "SOFTWARE\\MyKey1"
|
|
||||||
lgpo_reg.delete_value(key=key, v_name="MyValue1")
|
|
||||||
result = lgpo_reg.get_key(key=key)
|
result = lgpo_reg.get_key(key=key)
|
||||||
assert result == expected
|
assert result == expected
|
||||||
|
# Test that the registry entry has been removed
|
||||||
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue2")
|
result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue2")
|
||||||
assert result is False
|
assert result is False
|
||||||
|
|
||||||
|
|
|
@ -262,7 +262,7 @@ def test_pkg_install_verify_ssl_false():
|
||||||
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
|
result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False)
|
||||||
mock_cp.assert_called_once_with(
|
mock_cp.assert_called_once_with(
|
||||||
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
|
"http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe",
|
||||||
"base",
|
saltenv="base",
|
||||||
verify_ssl=False,
|
verify_ssl=False,
|
||||||
)
|
)
|
||||||
assert expected == result
|
assert expected == result
|
||||||
|
|
|
@ -3,6 +3,7 @@ import logging
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import salt.modules.beacons as beaconmod
|
import salt.modules.beacons as beaconmod
|
||||||
|
import salt.modules.cp as cp
|
||||||
import salt.modules.pkg_resource as pkg_resource
|
import salt.modules.pkg_resource as pkg_resource
|
||||||
import salt.modules.yumpkg as yumpkg
|
import salt.modules.yumpkg as yumpkg
|
||||||
import salt.states.beacon as beaconstate
|
import salt.states.beacon as beaconstate
|
||||||
|
@ -15,19 +16,28 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def configure_loader_modules():
|
def configure_loader_modules(minion_opts):
|
||||||
return {
|
return {
|
||||||
|
cp: {
|
||||||
|
"__opts__": minion_opts,
|
||||||
|
},
|
||||||
pkg: {
|
pkg: {
|
||||||
"__env__": "base",
|
"__env__": "base",
|
||||||
"__salt__": {},
|
"__salt__": {},
|
||||||
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
|
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
|
||||||
"__opts__": {"test": False, "cachedir": ""},
|
"__opts__": minion_opts,
|
||||||
"__instance_id__": "",
|
"__instance_id__": "",
|
||||||
"__low__": {},
|
"__low__": {},
|
||||||
"__utils__": {"state.gen_tag": state_utils.gen_tag},
|
"__utils__": {"state.gen_tag": state_utils.gen_tag},
|
||||||
},
|
},
|
||||||
beaconstate: {"__salt__": {}, "__opts__": {}},
|
beaconstate: {
|
||||||
beaconmod: {"__salt__": {}, "__opts__": {}},
|
"__salt__": {},
|
||||||
|
"__opts__": minion_opts,
|
||||||
|
},
|
||||||
|
beaconmod: {
|
||||||
|
"__salt__": {},
|
||||||
|
"__opts__": minion_opts,
|
||||||
|
},
|
||||||
pkg_resource: {
|
pkg_resource: {
|
||||||
"__salt__": {},
|
"__salt__": {},
|
||||||
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
|
"__grains__": {"os": "CentOS", "os_family": "RedHat"},
|
||||||
|
@ -35,7 +45,7 @@ def configure_loader_modules():
|
||||||
yumpkg: {
|
yumpkg: {
|
||||||
"__salt__": {},
|
"__salt__": {},
|
||||||
"__grains__": {"osarch": "x86_64", "osmajorrelease": 7},
|
"__grains__": {"osarch": "x86_64", "osmajorrelease": 7},
|
||||||
"__opts__": {},
|
"__opts__": minion_opts,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -565,6 +575,32 @@ def test_installed_with_changes_test_true(list_pkgs):
|
||||||
assert ret["changes"] == expected
|
assert ret["changes"] == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_installed_with_sources(list_pkgs, tmp_path):
|
||||||
|
"""
|
||||||
|
Test pkg.installed with passing `sources`
|
||||||
|
"""
|
||||||
|
|
||||||
|
list_pkgs = MagicMock(return_value=list_pkgs)
|
||||||
|
pkg_source = tmp_path / "pkga-package-0.3.0.deb"
|
||||||
|
|
||||||
|
with patch.dict(
|
||||||
|
pkg.__salt__,
|
||||||
|
{
|
||||||
|
"cp.cache_file": cp.cache_file,
|
||||||
|
"pkg.list_pkgs": list_pkgs,
|
||||||
|
"pkg_resource.pack_sources": pkg_resource.pack_sources,
|
||||||
|
"lowpkg.bin_pkg_info": MagicMock(),
|
||||||
|
},
|
||||||
|
), patch("salt.fileclient.get_file_client", return_value=MagicMock()):
|
||||||
|
try:
|
||||||
|
ret = pkg.installed("install-pkgd", sources=[{"pkga": str(pkg_source)}])
|
||||||
|
assert ret["result"] is False
|
||||||
|
except TypeError as exc:
|
||||||
|
if "got multiple values for keyword argument 'saltenv'" in str(exc):
|
||||||
|
pytest.fail(f"TypeError should have not been raised: {exc}")
|
||||||
|
raise exc from None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("action", ["removed", "purged"])
|
@pytest.mark.parametrize("action", ["removed", "purged"])
|
||||||
def test_removed_purged_with_changes_test_true(list_pkgs, action):
|
def test_removed_purged_with_changes_test_true(list_pkgs, action):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -84,8 +84,6 @@ def test_value_present(empty_reg_pol):
|
||||||
expected = {
|
expected = {
|
||||||
"changes": {
|
"changes": {
|
||||||
"new": {
|
"new": {
|
||||||
"name": "MyValue",
|
|
||||||
"key": "SOFTWARE\\MyKey",
|
|
||||||
"data": 1,
|
"data": 1,
|
||||||
"type": "REG_DWORD",
|
"type": "REG_DWORD",
|
||||||
},
|
},
|
||||||
|
@ -111,14 +109,10 @@ def test_value_present_existing_change(reg_pol):
|
||||||
expected = {
|
expected = {
|
||||||
"changes": {
|
"changes": {
|
||||||
"new": {
|
"new": {
|
||||||
"name": "MyValue1",
|
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"data": 2,
|
"data": 2,
|
||||||
"type": "REG_DWORD",
|
"type": "REG_DWORD",
|
||||||
},
|
},
|
||||||
"old": {
|
"old": {
|
||||||
"name": "MyValue1",
|
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"data": "squidward",
|
"data": "squidward",
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
|
@ -183,14 +177,10 @@ def test_value_present_existing_disabled(reg_pol):
|
||||||
"changes": {
|
"changes": {
|
||||||
"new": {
|
"new": {
|
||||||
"data": 2,
|
"data": 2,
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue2",
|
|
||||||
"type": "REG_DWORD",
|
"type": "REG_DWORD",
|
||||||
},
|
},
|
||||||
"old": {
|
"old": {
|
||||||
"data": "**del.MyValue2",
|
"data": "**del.MyValue2",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue2",
|
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -213,13 +203,11 @@ def test_value_disabled(empty_reg_pol):
|
||||||
"changes": {
|
"changes": {
|
||||||
"new": {
|
"new": {
|
||||||
"data": "**del.MyValue1",
|
"data": "**del.MyValue1",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue1",
|
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
"old": {},
|
"old": {},
|
||||||
},
|
},
|
||||||
"comment": "Registry.pol value enabled",
|
"comment": "Registry.pol value disabled",
|
||||||
"name": "MyValue1",
|
"name": "MyValue1",
|
||||||
"result": True,
|
"result": True,
|
||||||
}
|
}
|
||||||
|
@ -238,16 +226,12 @@ def test_value_disabled_existing_change(reg_pol):
|
||||||
"changes": {
|
"changes": {
|
||||||
"new": {
|
"new": {
|
||||||
"data": "**del.MyValue1",
|
"data": "**del.MyValue1",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue1",
|
|
||||||
},
|
},
|
||||||
"old": {
|
"old": {
|
||||||
"data": "squidward",
|
"data": "squidward",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue1",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"comment": "Registry.pol value enabled",
|
"comment": "Registry.pol value disabled",
|
||||||
"name": "MyValue1",
|
"name": "MyValue1",
|
||||||
"result": True,
|
"result": True,
|
||||||
}
|
}
|
||||||
|
@ -299,8 +283,6 @@ def test_value_absent(reg_pol):
|
||||||
"new": {},
|
"new": {},
|
||||||
"old": {
|
"old": {
|
||||||
"data": "squidward",
|
"data": "squidward",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue1",
|
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -335,8 +317,6 @@ def test_value_absent_disabled(reg_pol):
|
||||||
"new": {},
|
"new": {},
|
||||||
"old": {
|
"old": {
|
||||||
"data": "**del.MyValue2",
|
"data": "**del.MyValue2",
|
||||||
"key": "SOFTWARE\\MyKey1",
|
|
||||||
"name": "MyValue2",
|
|
||||||
"type": "REG_SZ",
|
"type": "REG_SZ",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -15,7 +15,7 @@ import salt.utils.json # pylint: disable=unused-import
|
||||||
import salt.utils.stringutils # pylint: disable=unused-import
|
import salt.utils.stringutils # pylint: disable=unused-import
|
||||||
import salt.utils.yaml # pylint: disable=unused-import
|
import salt.utils.yaml # pylint: disable=unused-import
|
||||||
from salt.utils.jinja import SaltCacheLoader
|
from salt.utils.jinja import SaltCacheLoader
|
||||||
from tests.support.mock import Mock, patch
|
from tests.support.mock import Mock, call, patch
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -224,14 +224,45 @@ def test_file_client_kwarg(minion_opts, mock_file_client):
|
||||||
assert loader._file_client is mock_file_client
|
assert loader._file_client is mock_file_client
|
||||||
|
|
||||||
|
|
||||||
def test_cache_loader_shutdown(minion_opts, mock_file_client):
|
def test_cache_loader_passed_file_client(minion_opts, mock_file_client):
|
||||||
"""
|
"""
|
||||||
The shudown method can be called without raising an exception when the
|
The shudown method can be called without raising an exception when the
|
||||||
file_client does not have a destroy method
|
file_client does not have a destroy method
|
||||||
"""
|
"""
|
||||||
assert not hasattr(mock_file_client, "destroy")
|
# Test SaltCacheLoader creating and destroying the file client created
|
||||||
mock_file_client.opts = minion_opts
|
file_client = Mock()
|
||||||
loader = SaltCacheLoader(minion_opts, _file_client=mock_file_client)
|
with patch("salt.fileclient.get_file_client", return_value=file_client):
|
||||||
assert loader._file_client is mock_file_client
|
loader = SaltCacheLoader(minion_opts)
|
||||||
# Shutdown method should not raise any exceptions
|
assert loader._file_client is None
|
||||||
loader.shutdown()
|
with loader:
|
||||||
|
assert loader._file_client is file_client
|
||||||
|
assert loader._file_client is None
|
||||||
|
assert file_client.mock_calls == [call.destroy()]
|
||||||
|
|
||||||
|
# Test SaltCacheLoader reusing the file client passed
|
||||||
|
file_client = Mock()
|
||||||
|
file_client.opts = {"file_roots": minion_opts["file_roots"]}
|
||||||
|
with patch("salt.fileclient.get_file_client", return_value=Mock()):
|
||||||
|
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
|
||||||
|
assert loader._file_client is file_client
|
||||||
|
with loader:
|
||||||
|
assert loader._file_client is file_client
|
||||||
|
assert loader._file_client is file_client
|
||||||
|
assert file_client.mock_calls == []
|
||||||
|
|
||||||
|
# Test SaltCacheLoader creating a client even though a file client was
|
||||||
|
# passed because the "file_roots" option is different, and, as such,
|
||||||
|
# the destroy method on the new file client is called, but not on the
|
||||||
|
# file client passed in.
|
||||||
|
file_client = Mock()
|
||||||
|
file_client.opts = {"file_roots": ""}
|
||||||
|
new_file_client = Mock()
|
||||||
|
with patch("salt.fileclient.get_file_client", return_value=new_file_client):
|
||||||
|
loader = SaltCacheLoader(minion_opts, _file_client=file_client)
|
||||||
|
assert loader._file_client is file_client
|
||||||
|
with loader:
|
||||||
|
assert loader._file_client is not file_client
|
||||||
|
assert loader._file_client is new_file_client
|
||||||
|
assert loader._file_client is None
|
||||||
|
assert file_client.mock_calls == []
|
||||||
|
assert new_file_client.mock_calls == [call.destroy()]
|
||||||
|
|
|
@ -605,3 +605,55 @@ def test_deploy_script_ssh_timeout():
|
||||||
ssh_kwargs = root_cmd.call_args.kwargs
|
ssh_kwargs = root_cmd.call_args.kwargs
|
||||||
assert "ssh_timeout" in ssh_kwargs
|
assert "ssh_timeout" in ssh_kwargs
|
||||||
assert ssh_kwargs["ssh_timeout"] == 34
|
assert ssh_kwargs["ssh_timeout"] == 34
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"master,expected",
|
||||||
|
[
|
||||||
|
(None, None),
|
||||||
|
("single_master", "single_master"),
|
||||||
|
(["master1", "master2", "master3"], "master1,master2,master3"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test__format_master_param(master, expected):
|
||||||
|
result = cloud._format_master_param(master)
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.")
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"master,expected",
|
||||||
|
[
|
||||||
|
(None, None),
|
||||||
|
("single_master", "single_master"),
|
||||||
|
(["master1", "master2", "master3"], "master1,master2,master3"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_deploy_windows_master(master, expected):
|
||||||
|
"""
|
||||||
|
Test deploy_windows with master parameter
|
||||||
|
"""
|
||||||
|
mock_true = MagicMock(return_value=True)
|
||||||
|
mock_tuple = MagicMock(return_value=(0, 0, 0))
|
||||||
|
with patch("salt.utils.smb.get_conn", MagicMock()), patch(
|
||||||
|
"salt.utils.smb.mkdirs", MagicMock()
|
||||||
|
), patch("salt.utils.smb.put_file", MagicMock()), patch(
|
||||||
|
"salt.utils.smb.delete_file", MagicMock()
|
||||||
|
), patch(
|
||||||
|
"salt.utils.smb.delete_directory", MagicMock()
|
||||||
|
), patch(
|
||||||
|
"time.sleep", MagicMock()
|
||||||
|
), patch.object(
|
||||||
|
cloud, "wait_for_port", mock_true
|
||||||
|
), patch.object(
|
||||||
|
cloud, "fire_event", MagicMock()
|
||||||
|
), patch.object(
|
||||||
|
cloud, "wait_for_psexecsvc", mock_true
|
||||||
|
), patch.object(
|
||||||
|
cloud, "run_psexec_command", mock_tuple
|
||||||
|
) as mock:
|
||||||
|
cloud.deploy_windows(host="test", win_installer="install.exe", master=master)
|
||||||
|
expected_cmd = "c:\\salttemp\\install.exe"
|
||||||
|
expected_args = "/S /master={} /minion-name=None".format(expected)
|
||||||
|
assert mock.call_args_list[0].args[0] == expected_cmd
|
||||||
|
assert mock.call_args_list[0].args[1] == expected_args
|
||||||
|
|
Loading…
Add table
Reference in a new issue