mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 09:40:20 +00:00
Merge remote-tracking branch 'upstream/master' into 62934
This commit is contained in:
commit
1c26f2412c
111 changed files with 3686 additions and 3174 deletions
1
changelog/60365.fixed
Normal file
1
changelog/60365.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions
|
1
changelog/61805.fixed
Normal file
1
changelog/61805.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function.
|
1
changelog/62131.fixed
Normal file
1
changelog/62131.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Ignore some command return codes in openbsdrcctl_service to prevent spurious errors
|
1
changelog/62446.added
Normal file
1
changelog/62446.added
Normal file
|
@ -0,0 +1 @@
|
|||
Add ability to provide conditions which convert normal state actions to no-op when true
|
1
changelog/62480.added
Normal file
1
changelog/62480.added
Normal file
|
@ -0,0 +1 @@
|
|||
Added debug log messages displaying the command being run when installing packages on Windows
|
1
changelog/63025.fixed
Normal file
1
changelog/63025.fixed
Normal file
|
@ -0,0 +1 @@
|
|||
Fix btrfs.subvolume_snapshot command failing
|
3
changelog/63058.fixed
Normal file
3
changelog/63058.fixed
Normal file
|
@ -0,0 +1,3 @@
|
|||
Fix mongo authentication for mongo ext_pillar and mongo returner
|
||||
|
||||
This fix also include the ability to use the mongo connection string for mongo ext_pillar
|
|
@ -600,6 +600,14 @@
|
|||
# - require
|
||||
# - require_in
|
||||
|
||||
# If set, this parameter expects a dictionary of state module names as keys
|
||||
# and list of conditions which must be satisfied in order to run any functions
|
||||
# in that state module.
|
||||
#
|
||||
#global_state_conditions:
|
||||
# "*": ["G@global_noop:false"]
|
||||
# service: ["not G@virtual_subtype:chroot"]
|
||||
|
||||
##### File Directory Settings #####
|
||||
##########################################
|
||||
# The Salt Minion can redirect all file server operations to a local directory,
|
||||
|
|
|
@ -2472,6 +2472,21 @@ default configuration set up at install time.
|
|||
|
||||
snapper_states_config: root
|
||||
|
||||
``global_state_conditions``
|
||||
---------------------------
|
||||
|
||||
Default: ``None``
|
||||
|
||||
If set, this parameter expects a dictionary of state module names as keys and a
|
||||
list of conditions which must be satisfied in order to run any functions in that
|
||||
state module.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
global_state_conditions:
|
||||
"*": ["G@global_noop:false"]
|
||||
service: ["not G@virtual_subtype:chroot"]
|
||||
|
||||
File Directory Settings
|
||||
=======================
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
mock >= 3.0.0
|
||||
# PyTest
|
||||
pytest >= 6.1.0; python_version < "3.6"
|
||||
pytest >= 7.0.1; python_version >= "3.6"
|
||||
pytest >= 7.0.1; python_version <= "3.6"
|
||||
pytest >= 7.2.0; python_version > "3.6"
|
||||
pytest-salt-factories >= 1.0.0rc21; sys_platform == 'win32'
|
||||
pytest-salt-factories[docker] >= 1.0.0rc21; sys_platform != 'win32'
|
||||
pytest-tempdir >= 2019.10.12
|
||||
|
|
|
@ -412,6 +412,8 @@ docker==5.0.2
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -641,8 +643,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -707,7 +707,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==2.0.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -412,6 +412,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -641,8 +643,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -705,7 +705,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -410,6 +410,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -637,8 +639,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -702,7 +702,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -424,6 +424,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -653,8 +655,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -724,7 +724,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -729,7 +729,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==2.0.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.0.1 ; python_version <= "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -746,7 +746,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.0.1 ; python_version <= "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -420,6 +420,8 @@ docker==5.0.2
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -684,8 +686,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -751,7 +751,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==2.0.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -418,6 +418,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -676,8 +678,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -742,7 +742,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -432,6 +432,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -690,8 +692,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -762,7 +762,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -12,8 +12,6 @@ async-timeout==4.0.2
|
|||
# via aiohttp
|
||||
asynctest==0.13.0
|
||||
# via aiohttp
|
||||
atomicwrites==1.3.0
|
||||
# via pytest
|
||||
attrs==20.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
|
@ -99,6 +97,8 @@ docker==2.7.0
|
|||
# via -r requirements/static/ci/common.in
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -223,8 +223,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -285,7 +283,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -418,6 +418,8 @@ docker==5.0.2
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -673,8 +675,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -740,7 +740,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==2.0.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -416,6 +416,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -666,8 +668,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -732,7 +732,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -430,6 +430,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -680,8 +682,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -752,7 +752,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -10,8 +10,6 @@ aiosignal==1.2.0
|
|||
# via aiohttp
|
||||
async-timeout==4.0.2
|
||||
# via aiohttp
|
||||
atomicwrites==1.3.0
|
||||
# via pytest
|
||||
attrs==20.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
|
@ -95,6 +93,8 @@ docker==2.7.0
|
|||
# via -r requirements/static/ci/common.in
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -211,8 +211,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -273,7 +271,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.1.2 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -418,6 +418,8 @@ docker==5.0.2
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -673,8 +675,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -743,7 +743,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==2.0.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -418,6 +418,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -670,8 +672,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -738,7 +738,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -416,6 +416,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -666,8 +668,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -735,7 +735,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -432,6 +432,8 @@ docker==5.0.3
|
|||
# pytest-salt-factories
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -682,8 +684,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -757,7 +757,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.0.1 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -10,8 +10,6 @@ aiosignal==1.2.0
|
|||
# via aiohttp
|
||||
async-timeout==4.0.2
|
||||
# via aiohttp
|
||||
atomicwrites==1.3.0
|
||||
# via pytest
|
||||
attrs==20.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
|
@ -95,6 +93,8 @@ docker==2.7.0
|
|||
# via -r requirements/static/ci/common.in
|
||||
etcd3-py==0.1.6 ; python_version >= "3.6"
|
||||
# via -r requirements/static/ci/common.in
|
||||
exceptiongroup==1.0.4
|
||||
# via pytest
|
||||
filelock==3.0.12
|
||||
# via virtualenv
|
||||
flaky==3.7.0
|
||||
|
@ -211,8 +211,6 @@ psutil==5.8.0
|
|||
# pytest-salt-factories
|
||||
# pytest-shell-utilities
|
||||
# pytest-system-statistics
|
||||
py==1.10.0
|
||||
# via pytest
|
||||
pyasn1-modules==0.2.4
|
||||
# via google-auth
|
||||
pyasn1==0.4.8
|
||||
|
@ -274,7 +272,7 @@ pytest-tempdir==2019.10.12
|
|||
# pytest-salt-factories
|
||||
pytest-timeout==1.4.2
|
||||
# via -r requirements/pytest.txt
|
||||
pytest==7.1.2 ; python_version >= "3.6"
|
||||
pytest==7.2.0 ; python_version > "3.6"
|
||||
# via
|
||||
# -r requirements/pytest.txt
|
||||
# pytest-custom-exit-code
|
||||
|
|
|
@ -955,6 +955,7 @@ VALID_OPTS = immutabletypes.freeze(
|
|||
# client via the Salt API
|
||||
"netapi_allow_raw_shell": bool,
|
||||
"disabled_requisites": (str, list),
|
||||
"global_state_conditions": (type(None), dict),
|
||||
# Feature flag config
|
||||
"features": dict,
|
||||
"fips_mode": bool,
|
||||
|
@ -1273,6 +1274,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze(
|
|||
"schedule": {},
|
||||
"ssh_merge_pillar": True,
|
||||
"disabled_requisites": [],
|
||||
"global_state_conditions": None,
|
||||
"reactor_niceness": None,
|
||||
"fips_mode": False,
|
||||
}
|
||||
|
|
|
@ -428,10 +428,6 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils):
|
|||
_proxy_minion = ProxyMinion(proxyopts)
|
||||
_proxy_minion.proc_dir = salt.minion.get_proc_dir(proxyopts["cachedir"], uid=uid)
|
||||
|
||||
_proxy_minion.proxy = salt.loader.proxy(
|
||||
proxyopts, utils=main_utils, context=proxy_context
|
||||
)
|
||||
|
||||
# And load the modules
|
||||
(
|
||||
_proxy_minion.functions,
|
||||
|
@ -461,6 +457,13 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils):
|
|||
context=proxy_context,
|
||||
)
|
||||
|
||||
# Create this after modules are synced to ensure
|
||||
# any custom modules, eg. custom proxy modules
|
||||
# are avaiable.
|
||||
_proxy_minion.proxy = salt.loader.proxy(
|
||||
proxyopts, utils=main_utils, context=proxy_context
|
||||
)
|
||||
|
||||
_proxy_minion.functions.pack["__proxy__"] = _proxy_minion.proxy
|
||||
_proxy_minion.proxy.pack["__salt__"] = _proxy_minion.functions
|
||||
_proxy_minion.proxy.pack["__ret__"] = _proxy_minion.returners
|
||||
|
|
|
@ -1222,6 +1222,9 @@ def subvolume_snapshot(source, dest=None, name=None, read_only=False):
|
|||
cmd = ["btrfs", "subvolume", "snapshot"]
|
||||
if read_only:
|
||||
cmd.append("-r")
|
||||
|
||||
cmd.append(source)
|
||||
|
||||
if dest and not name:
|
||||
cmd.append(dest)
|
||||
if dest and name:
|
||||
|
|
|
@ -838,6 +838,8 @@ def _run(
|
|||
stream_stderr=True,
|
||||
)
|
||||
ret["pid"] = proc.pid
|
||||
stdout = ""
|
||||
stderr = ""
|
||||
while proc.has_unread_data:
|
||||
try:
|
||||
try:
|
||||
|
@ -848,12 +850,8 @@ def _run(
|
|||
cstdout, cstderr = "", ""
|
||||
if cstdout:
|
||||
stdout += cstdout
|
||||
else:
|
||||
stdout = ""
|
||||
if cstderr:
|
||||
stderr += cstderr
|
||||
else:
|
||||
stderr = ""
|
||||
if timeout and (time.time() > will_timeout):
|
||||
ret["stderr"] = "SALT: Timeout after {}s\n{}".format(
|
||||
timeout, stderr
|
||||
|
|
|
@ -57,7 +57,7 @@ def available(name):
|
|||
salt '*' service.available sshd
|
||||
"""
|
||||
cmd = "{} get {}".format(_cmd(), name)
|
||||
if __salt__["cmd.retcode"](cmd) == 2:
|
||||
if __salt__["cmd.retcode"](cmd, ignore_retcode=True) == 2:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@ -198,7 +198,7 @@ def status(name, sig=None):
|
|||
return bool(__salt__["status.pid"](sig))
|
||||
|
||||
cmd = "{} check {}".format(_cmd(), name)
|
||||
return not __salt__["cmd.retcode"](cmd)
|
||||
return not __salt__["cmd.retcode"](cmd, ignore_retcode=True)
|
||||
|
||||
|
||||
def enable(name, **kwargs):
|
||||
|
@ -255,7 +255,7 @@ def disabled(name):
|
|||
salt '*' service.disabled <service name>
|
||||
"""
|
||||
cmd = "{} get {} status".format(_cmd(), name)
|
||||
return not __salt__["cmd.retcode"](cmd) == 0
|
||||
return not __salt__["cmd.retcode"](cmd, ignore_retcode=True) == 0
|
||||
|
||||
|
||||
def enabled(name, **kwargs):
|
||||
|
@ -274,7 +274,7 @@ def enabled(name, **kwargs):
|
|||
salt '*' service.enabled <service name> flags=<flags>
|
||||
"""
|
||||
cmd = "{} get {} status".format(_cmd(), name)
|
||||
if not __salt__["cmd.retcode"](cmd):
|
||||
if not __salt__["cmd.retcode"](cmd, ignore_retcode=True):
|
||||
# also consider a service disabled if the current flags are different
|
||||
# than the configured ones so we have a chance to update them
|
||||
flags = _get_flags(**kwargs)
|
||||
|
|
|
@ -350,7 +350,7 @@ def zone_compare(timezone):
|
|||
if "Solaris" in __grains__["os_family"] or "AIX" in __grains__["os_family"]:
|
||||
return timezone == get_zone()
|
||||
|
||||
if "FreeBSD" in __grains__["os_family"]:
|
||||
if "Arch" in __grains__["os_family"] or "FreeBSD" in __grains__["os_family"]:
|
||||
if not os.path.isfile(_get_localtime_path()):
|
||||
return timezone == get_zone()
|
||||
|
||||
|
|
|
@ -1768,6 +1768,8 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
|
||||
# Install the software
|
||||
# Check Use Scheduler Option
|
||||
log.debug("PKG : cmd: %s /s /c %s", cmd_shell, arguments)
|
||||
log.debug("PKG : pwd: %s", cache_path)
|
||||
if pkginfo[version_num].get("use_scheduler", False):
|
||||
# Create Scheduled Task
|
||||
__salt__["task.create_task"](
|
||||
|
@ -1834,6 +1836,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
|||
python_shell=False,
|
||||
redirect_stderr=True,
|
||||
)
|
||||
log.debug("PKG : retcode: %s", result["retcode"])
|
||||
if not result["retcode"]:
|
||||
ret[pkg_name] = {"install status": "success"}
|
||||
changed.append(pkg_name)
|
||||
|
|
|
@ -12,19 +12,39 @@ Salt Master Mongo Configuration
|
|||
===============================
|
||||
|
||||
The module shares the same base mongo connection variables as
|
||||
:py:mod:`salt.returners.mongo_return`. These variables go in your master
|
||||
:py:mod:`salt.returners.mongo_future_return`. These variables go in your master
|
||||
config file.
|
||||
|
||||
* ``mongo.db`` - The mongo database to connect to. Defaults to ``'salt'``.
|
||||
* ``mongo.host`` - The mongo host to connect to. Supports replica sets by
|
||||
specifying all hosts in the set, comma-delimited. Defaults to ``'salt'``.
|
||||
* ``mongo.port`` - The port that the mongo database is running on. Defaults
|
||||
to ``27017``.
|
||||
* ``mongo.user`` - The username for connecting to mongo. Only required if
|
||||
you are using mongo authentication. Defaults to ``''``.
|
||||
* ``mongo.password`` - The password for connecting to mongo. Only required
|
||||
if you are using mongo authentication. Defaults to ``''``.
|
||||
.. code-block:: yaml
|
||||
|
||||
mongo.db: <database name>
|
||||
mongo.host: <server ip address>
|
||||
mongo.user: <MongoDB username>
|
||||
mongo.password: <MongoDB user password>
|
||||
mongo.port: 27017
|
||||
|
||||
Or single URI:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mongo.uri: URI
|
||||
|
||||
where uri is in the format:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
mongodb://db1.example.net:27017/mydatabase
|
||||
mongodb://db1.example.net:27017,db2.example.net:2500/?replicaSet=test
|
||||
mongodb://db1.example.net:27017,db2.example.net:2500/?replicaSet=test&connectTimeoutMS=300000
|
||||
|
||||
More information on URI format can be found in
|
||||
https://docs.mongodb.com/manual/reference/connection-string/
|
||||
|
||||
Configuring the Mongo ext_pillar
|
||||
================================
|
||||
|
@ -57,6 +77,8 @@ Module Documentation
|
|||
import logging
|
||||
import re
|
||||
|
||||
import salt.exceptions
|
||||
|
||||
try:
|
||||
import pymongo
|
||||
|
||||
|
@ -65,15 +87,6 @@ except ImportError:
|
|||
HAS_PYMONGO = False
|
||||
|
||||
|
||||
__opts__ = {
|
||||
"mongo.db": "salt",
|
||||
"mongo.host": "salt",
|
||||
"mongo.password": "",
|
||||
"mongo.port": 27017,
|
||||
"mongo.user": "",
|
||||
}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PYMONGO:
|
||||
return False
|
||||
|
@ -116,20 +129,33 @@ def ext_pillar(
|
|||
careful with other fields in the document as they must be string
|
||||
serializable. Defaults to ``None``.
|
||||
"""
|
||||
host = __opts__["mongo.host"]
|
||||
port = __opts__["mongo.port"]
|
||||
log.info("connecting to %s:%s for mongo ext_pillar", host, port)
|
||||
conn = pymongo.MongoClient(host, port)
|
||||
|
||||
log.debug("using database '%s'", __opts__["mongo.db"])
|
||||
mdb = conn[__opts__["mongo.db"]]
|
||||
|
||||
uri = __opts__.get("mongo.uri")
|
||||
host = __opts__.get("mongo.host")
|
||||
port = __opts__.get("mongo.port")
|
||||
user = __opts__.get("mongo.user")
|
||||
password = __opts__.get("mongo.password")
|
||||
db = __opts__.get("mongo.db")
|
||||
|
||||
if user and password:
|
||||
log.debug("authenticating as '%s'", user)
|
||||
mdb.authenticate(user, password)
|
||||
if uri:
|
||||
if uri and host:
|
||||
raise salt.exceptions.SaltConfigurationError(
|
||||
"Mongo ext_pillar expects either uri or host configuration. Both were"
|
||||
" provided"
|
||||
)
|
||||
pymongo.uri_parser.parse_uri(uri)
|
||||
conn = pymongo.MongoClient(uri)
|
||||
log.info("connecting to %s for mongo ext_pillar", uri)
|
||||
mdb = conn.get_database()
|
||||
|
||||
else:
|
||||
log.info("connecting to %s:%s for mongo ext_pillar", host, port)
|
||||
conn = pymongo.MongoClient(
|
||||
host=host, port=port, username=user, password=password
|
||||
)
|
||||
|
||||
log.debug("using database '%s'", db)
|
||||
mdb = conn[db]
|
||||
|
||||
# Do the regex string replacement on the minion id
|
||||
if re_pattern:
|
||||
|
|
|
@ -178,17 +178,15 @@ def _get_conn(ret):
|
|||
mdb = conn.get_database()
|
||||
else:
|
||||
if PYMONGO_VERSION > _LooseVersion("2.3"):
|
||||
conn = pymongo.MongoClient(host, port)
|
||||
conn = pymongo.MongoClient(host, port, username=user, password=password)
|
||||
else:
|
||||
if uri:
|
||||
raise salt.exceptions.SaltConfigurationError(
|
||||
"pymongo <= 2.3 does not support uri format"
|
||||
)
|
||||
conn = pymongo.Connection(host, port)
|
||||
conn = pymongo.Connection(host, port, username=user, password=password)
|
||||
|
||||
mdb = conn[db_]
|
||||
if user and password:
|
||||
mdb.authenticate(user, password)
|
||||
|
||||
if indexes:
|
||||
if PYMONGO_VERSION > _LooseVersion("2.3"):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
This runner is used only for test purposes and servers no production purpose
|
||||
This runner is used only for test purposes and serves no production purpose
|
||||
"""
|
||||
|
||||
import time
|
||||
|
@ -10,6 +10,12 @@ def arg(*args, **kwargs):
|
|||
Output the given args and kwargs
|
||||
|
||||
Kwargs will be filtered for 'private' keynames.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.arg foo bar=baz
|
||||
"""
|
||||
kwargs = {k: v for k, v in kwargs.items() if not k.startswith("__")}
|
||||
|
||||
|
@ -23,6 +29,12 @@ def arg(*args, **kwargs):
|
|||
def raw_arg(*args, **kwargs):
|
||||
"""
|
||||
Output the given args and kwargs
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.arg foo __bar=baz
|
||||
"""
|
||||
ret = {
|
||||
"args": args,
|
||||
|
@ -34,6 +46,12 @@ def raw_arg(*args, **kwargs):
|
|||
def metasyntactic(locality="us"):
|
||||
"""
|
||||
Return common metasyntactic variables for the given locality
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.metasyntactic locality=uk
|
||||
"""
|
||||
lookup = {
|
||||
"us": [
|
||||
|
@ -60,6 +78,12 @@ def metasyntactic(locality="us"):
|
|||
def stdout_print():
|
||||
"""
|
||||
Print 'foo' and return 'bar'
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.stdout_print
|
||||
"""
|
||||
print("foo")
|
||||
return "bar"
|
||||
|
@ -68,6 +92,12 @@ def stdout_print():
|
|||
def sleep(s_time=10):
|
||||
"""
|
||||
Sleep t seconds, then return True
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.sleep s_time=5
|
||||
"""
|
||||
print(s_time)
|
||||
time.sleep(s_time)
|
||||
|
@ -76,7 +106,13 @@ def sleep(s_time=10):
|
|||
|
||||
def stream():
|
||||
"""
|
||||
Return True
|
||||
Fire a stream of 100 test events, then return True
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run test.stream
|
||||
"""
|
||||
ret = True
|
||||
for i in range(1, 100):
|
||||
|
|
|
@ -788,6 +788,44 @@ class State:
|
|||
self.instance_id = str(id(self))
|
||||
self.inject_globals = {}
|
||||
self.mocked = mocked
|
||||
self.global_state_conditions = None
|
||||
|
||||
def _match_global_state_conditions(self, full, state, name):
|
||||
"""
|
||||
Return ``None`` if global state conditions are met. Otherwise, pass a
|
||||
return dictionary which effectively creates a no-op outcome.
|
||||
|
||||
This operation is "explicit allow", in that ANY state and condition
|
||||
combination which matches will allow the state to be run.
|
||||
"""
|
||||
matches = []
|
||||
ret = None
|
||||
ret_dict = {
|
||||
"name": name,
|
||||
"comment": "Failed to meet global state conditions. State not called.",
|
||||
"changes": {},
|
||||
"result": None,
|
||||
}
|
||||
|
||||
if not isinstance(self.global_state_conditions, dict):
|
||||
self.global_state_conditions = (
|
||||
self.functions["config.option"]("global_state_conditions") or {}
|
||||
)
|
||||
|
||||
for state_match, conditions in self.global_state_conditions.items():
|
||||
if state_match in ["*", full, state]:
|
||||
if isinstance(conditions, str):
|
||||
conditions = [conditions]
|
||||
if isinstance(conditions, list):
|
||||
matches.extend(
|
||||
self.functions["match.compound"](condition)
|
||||
for condition in conditions
|
||||
)
|
||||
|
||||
if matches and not any(matches):
|
||||
ret = ret_dict
|
||||
|
||||
return ret
|
||||
|
||||
def _gather_pillar(self):
|
||||
"""
|
||||
|
@ -2318,7 +2356,15 @@ class State:
|
|||
ret = mock_ret(cdata)
|
||||
else:
|
||||
# Execute the state function
|
||||
if not low.get("__prereq__") and low.get("parallel"):
|
||||
ret = self._match_global_state_conditions(
|
||||
cdata["full"], low["state"], low["name"]
|
||||
)
|
||||
if ret:
|
||||
log.info(
|
||||
"Failed to meet global state conditions. State '%s' not called.",
|
||||
low["name"],
|
||||
)
|
||||
elif not low.get("__prereq__") and low.get("parallel"):
|
||||
# run the state call in parallel, but only if not in a prereq
|
||||
ret = self.call_parallel(cdata, low)
|
||||
else:
|
||||
|
|
|
@ -237,6 +237,10 @@ def pytest_configure(config):
|
|||
continue
|
||||
if dirname != TESTS_DIR:
|
||||
config.addinivalue_line("norecursedirs", str(CODE_DIR / dirname))
|
||||
config.addinivalue_line(
|
||||
"norecursedirs",
|
||||
str(TESTS_DIR / "unit" / "modules" / "inspectlib" / "tree_test"),
|
||||
)
|
||||
|
||||
# Expose the markers we use to pytest CLI
|
||||
config.addinivalue_line(
|
||||
|
|
|
@ -42,8 +42,8 @@ def minion_config_defaults():
|
|||
@pytest.fixture(scope="module")
|
||||
def minion_config_overrides():
|
||||
"""
|
||||
Functional test modules can provide this fixture to tweak the configuration overrides dictionary
|
||||
passed to the minion factory
|
||||
Functional test modules can provide this fixture to tweak the configuration
|
||||
overrides dictionary passed to the minion factory
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
@ -60,7 +60,14 @@ def minion_opts(
|
|||
minion_config_overrides.update(
|
||||
{
|
||||
"file_client": "local",
|
||||
"file_roots": {"base": [str(state_tree)], "prod": [str(state_tree_prod)]},
|
||||
"file_roots": {
|
||||
"base": [
|
||||
str(state_tree),
|
||||
],
|
||||
"prod": [
|
||||
str(state_tree_prod),
|
||||
],
|
||||
},
|
||||
}
|
||||
)
|
||||
factory = salt_factories.salt_minion_daemon(
|
||||
|
|
|
@ -40,6 +40,8 @@ async def test_webhook_auth(http_client):
|
|||
assert exc.value.code == 401
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
async def test_good_login(http_client, auth_creds, content_type_map, client_config):
|
||||
"""
|
||||
Test logging in
|
||||
|
@ -78,6 +80,8 @@ async def test_bad_login(http_client, content_type_map):
|
|||
assert exc.value.code == 401
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
async def test_logout(http_client, auth_creds, content_type_map):
|
||||
response = await http_client.fetch(
|
||||
"/login",
|
||||
|
|
|
@ -288,6 +288,8 @@ def http_server(io_loop, app, netapi_port, content_type_map):
|
|||
yield server
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.skip_if_not_root
|
||||
async def test_perms(http_client, auth_creds, external_auth):
|
||||
response = await http_client.fetch(
|
||||
"/login",
|
||||
|
|
|
@ -6,6 +6,11 @@ import salt.utils.json
|
|||
import salt.utils.yaml
|
||||
from tests.support.mock import patch
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(app):
|
||||
|
|
|
@ -7,6 +7,11 @@ import salt.utils.yaml
|
|||
from salt.ext.tornado.httpclient import HTTPError
|
||||
from salt.netapi.rest_tornado import saltnado
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app_urls():
|
||||
|
|
|
@ -9,6 +9,11 @@ import salt.utils.yaml
|
|||
from salt.ext.tornado.httpclient import HTTPError, HTTPRequest
|
||||
from salt.ext.tornado.websocket import websocket_connect
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.destructive_test,
|
||||
pytest.mark.skip_if_not_root,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(client_config):
|
||||
|
|
|
@ -452,3 +452,137 @@ def test_invalid_connection(
|
|||
|
||||
assert not factory.is_running()
|
||||
assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON)
|
||||
@pytest.mark.parametrize(
|
||||
"parallel_startup",
|
||||
[True, False],
|
||||
ids=["parallel_startup=True", "parallel_startup=False"],
|
||||
)
|
||||
def test_custom_proxy_module(
|
||||
salt_master,
|
||||
salt_cli,
|
||||
proxy_minion_id,
|
||||
parallel_startup,
|
||||
integration_files_dir,
|
||||
):
|
||||
"""
|
||||
Ensure the salt-proxy control proxy starts and
|
||||
is able to respond to test.ping, additionally ensure that
|
||||
the proxies being controlled also respond to test.ping.
|
||||
|
||||
Finally ensure correct exit status when salt-proxy exits correctly.
|
||||
|
||||
Skip on Windows because daemonization not supported
|
||||
"""
|
||||
|
||||
config_defaults = {
|
||||
"metaproxy": "deltaproxy",
|
||||
}
|
||||
proxy_one = "custom_dummy_proxy_one"
|
||||
proxy_two = "custom_dummy_proxy_two"
|
||||
|
||||
top_file = """
|
||||
base:
|
||||
{control}:
|
||||
- controlproxy
|
||||
{one}:
|
||||
- {one}
|
||||
{two}:
|
||||
- {two}
|
||||
""".format(
|
||||
control=proxy_minion_id,
|
||||
one=proxy_one,
|
||||
two=proxy_two,
|
||||
)
|
||||
controlproxy_pillar_file = """
|
||||
proxy:
|
||||
proxytype: deltaproxy
|
||||
parallel_startup: {}
|
||||
ids:
|
||||
- {}
|
||||
- {}
|
||||
""".format(
|
||||
parallel_startup, proxy_one, proxy_two
|
||||
)
|
||||
|
||||
dummy_proxy_one_pillar_file = """
|
||||
proxy:
|
||||
proxytype: custom_dummy
|
||||
"""
|
||||
|
||||
dummy_proxy_two_pillar_file = """
|
||||
proxy:
|
||||
proxytype: custom_dummy
|
||||
"""
|
||||
|
||||
module_contents = """
|
||||
__proxyenabled__ = ["custom_dummy"]
|
||||
|
||||
def __virtual__():
|
||||
return True
|
||||
|
||||
def init(opts):
|
||||
return True
|
||||
|
||||
def ping():
|
||||
return True
|
||||
"""
|
||||
|
||||
top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file)
|
||||
controlproxy_tempfile = salt_master.pillar_tree.base.temp_file(
|
||||
"controlproxy.sls", controlproxy_pillar_file
|
||||
)
|
||||
dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file(
|
||||
"{}.sls".format(proxy_one),
|
||||
dummy_proxy_one_pillar_file,
|
||||
)
|
||||
dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file(
|
||||
"{}.sls".format(proxy_two),
|
||||
dummy_proxy_two_pillar_file,
|
||||
)
|
||||
|
||||
custom_proxy_module = salt_master.state_tree.base.temp_file(
|
||||
"_proxy/custom_dummy.py", module_contents
|
||||
)
|
||||
with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile, custom_proxy_module:
|
||||
factory = salt_master.salt_proxy_minion_daemon(
|
||||
proxy_minion_id,
|
||||
defaults=config_defaults,
|
||||
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
|
||||
start_timeout=240,
|
||||
)
|
||||
|
||||
for minion_id in (proxy_minion_id, proxy_one, proxy_two):
|
||||
factory.before_start(
|
||||
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
|
||||
)
|
||||
factory.after_terminate(
|
||||
pytest.helpers.remove_stale_minion_key, salt_master, minion_id
|
||||
)
|
||||
factory.after_terminate(
|
||||
pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id
|
||||
)
|
||||
|
||||
with factory.started():
|
||||
assert factory.is_running()
|
||||
|
||||
# Let's issue a ping the control proxy
|
||||
ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
# Let's issue a ping to one of the controlled proxies
|
||||
ret = salt_cli.run("test.ping", minion_tgt=proxy_one)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
# Let's issue a ping to one of the controlled proxies
|
||||
ret = salt_cli.run("test.ping", minion_tgt=proxy_two)
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
||||
|
||||
# Terminate the proxy minion
|
||||
ret = factory.terminate()
|
||||
assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret
|
||||
|
|
|
@ -13,3 +13,12 @@ def test_exec_code_all(salt_call_cli, non_root_account):
|
|||
"cmd.exec_code_all", "bash", "echo good", runas=non_root_account.username
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
|
||||
|
||||
def test_long_stdout(salt_cli, salt_minion):
|
||||
echo_str = "salt" * 1000
|
||||
ret = salt_cli.run(
|
||||
"cmd.run", f"echo {echo_str}", use_vt=True, minion_tgt=salt_minion.id
|
||||
)
|
||||
assert ret.returncode == 0
|
||||
assert len(ret.data.strip()) == len(echo_str)
|
||||
|
|
122
tests/pytests/integration/ssh/test_log.py
Normal file
122
tests/pytests/integration/ssh/test_log.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
"""
|
||||
Integration tests for salt-ssh logging
|
||||
"""
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
from saltfactories.utils import random_string
|
||||
|
||||
from tests.support.helpers import Keys
|
||||
|
||||
pytest.importorskip("docker")
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.slow_test,
|
||||
pytest.mark.skip_if_binaries_missing("dockerd"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ssh_auth():
|
||||
return random_string("sshpassword"), "app-admin"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ssh_keys(tmp_path_factory):
|
||||
"""
|
||||
Temporary ssh key fixture
|
||||
"""
|
||||
with Keys(tmp_path_factory) as keys:
|
||||
yield keys
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ssh_docker_container(salt_factories, ssh_keys, ssh_auth):
|
||||
"""
|
||||
Temporary docker container with python 3.6 and ssh enabled
|
||||
"""
|
||||
ssh_pass, ssh_user = ssh_auth
|
||||
container = salt_factories.get_container(
|
||||
random_string("ssh-py_versions-"),
|
||||
"ghcr.io/saltstack/salt-ci-containers/ssh-minion:latest",
|
||||
container_run_kwargs={
|
||||
"ports": {
|
||||
"22/tcp": None,
|
||||
},
|
||||
"environment": {
|
||||
"SSH_USER": ssh_user,
|
||||
"SSH_AUTHORIZED_KEYS": ssh_keys.pub,
|
||||
"SSH_USER_PASSWORD": ssh_pass,
|
||||
},
|
||||
"cap_add": "IPC_LOCK",
|
||||
},
|
||||
pull_before_start=True,
|
||||
skip_on_pull_failure=True,
|
||||
skip_if_docker_client_not_connectable=True,
|
||||
)
|
||||
with container.started() as factory:
|
||||
factory.run(f"echo {ssh_pass} | passwd {ssh_user} --stdin")
|
||||
yield factory
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def ssh_port(ssh_docker_container):
|
||||
return ssh_docker_container.get_host_port_binding(22, protocol="tcp")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_ssh_roster_file(ssh_port, ssh_keys, salt_master, ssh_auth):
|
||||
"""
|
||||
Temporary roster for ssh docker container
|
||||
"""
|
||||
ssh_pass, ssh_user = ssh_auth
|
||||
roster = """
|
||||
pyvertest:
|
||||
host: localhost
|
||||
user: {}
|
||||
port: {}
|
||||
passwd: {}
|
||||
sudo: True
|
||||
sudo_user: root
|
||||
tty: True
|
||||
ssh_options:
|
||||
- StrictHostKeyChecking=no
|
||||
- UserKnownHostsFile=/dev/null
|
||||
""".format(
|
||||
ssh_user, ssh_port, ssh_pass
|
||||
)
|
||||
with pytest.helpers.temp_file(
|
||||
"py_versions_roster", roster, salt_master.config_dir
|
||||
) as roster_file:
|
||||
yield roster_file
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def salt_ssh_cli(salt_master, salt_ssh_roster_file, ssh_keys, ssh_docker_container):
|
||||
assert salt_master.is_running()
|
||||
assert ssh_docker_container.is_running()
|
||||
return salt_master.salt_ssh_cli(
|
||||
timeout=180,
|
||||
roster_file=salt_ssh_roster_file,
|
||||
target_host="localhost",
|
||||
base_script_args=["--ignore-host-keys"],
|
||||
ssh_user="app-admin",
|
||||
)
|
||||
|
||||
|
||||
def test_log_password(salt_ssh_cli, caplog, ssh_auth):
|
||||
"""
|
||||
Test to ensure password is not logged when
|
||||
using sudo and a password
|
||||
"""
|
||||
ssh_pass, _ = ssh_auth
|
||||
with caplog.at_level(logging.TRACE):
|
||||
ret = salt_ssh_cli.run("--log-level=trace", "test.ping", minion_tgt="pyvertest")
|
||||
if "kex_exchange_identification" in ret.stdout:
|
||||
pytest.skip("Container closed ssh connection, skipping for now")
|
||||
assert ssh_pass not in caplog.text
|
||||
assert ret.returncode == 0
|
||||
assert ret.data is True
|
17
tests/pytests/unit/cache/test_mysql_cache.py
vendored
17
tests/pytests/unit/cache/test_mysql_cache.py
vendored
|
@ -8,8 +8,6 @@ import logging
|
|||
import pytest
|
||||
|
||||
import salt.cache.mysql_cache as mysql_cache
|
||||
import salt.payload
|
||||
import salt.utils.files
|
||||
from salt.exceptions import SaltCacheError
|
||||
from tests.support.mock import MagicMock, call, patch
|
||||
|
||||
|
@ -27,13 +25,6 @@ def configure_loader_modules():
|
|||
return {mysql_cache: {}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def master_config():
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
opts["__role"] = "master"
|
||||
return opts
|
||||
|
||||
|
||||
def test_run_query():
|
||||
"""
|
||||
Tests that a SaltCacheError is raised when there is a problem writing to the
|
||||
|
@ -45,7 +36,7 @@ def test_run_query():
|
|||
mock_connect.assert_has_calls((expected_calls,), True)
|
||||
|
||||
|
||||
def test_store(master_config):
|
||||
def test_store():
|
||||
"""
|
||||
Tests that the store function writes the data to the serializer for storage.
|
||||
"""
|
||||
|
@ -101,7 +92,7 @@ def test_store(master_config):
|
|||
assert expected in str(exc_info.value)
|
||||
|
||||
|
||||
def test_fetch(master_config):
|
||||
def test_fetch():
|
||||
"""
|
||||
Tests that the fetch function reads the data from the serializer for storage.
|
||||
"""
|
||||
|
@ -157,7 +148,7 @@ def test_flush():
|
|||
mock_run_query.assert_has_calls(expected_calls, True)
|
||||
|
||||
|
||||
def test_init_client(master_config):
|
||||
def test_init_client():
|
||||
"""
|
||||
Tests that the _init_client places the correct information in __context__
|
||||
"""
|
||||
|
@ -204,7 +195,7 @@ def test_init_client(master_config):
|
|||
)
|
||||
|
||||
|
||||
def test_create_table(master_config):
|
||||
def test_create_table():
|
||||
"""
|
||||
Tests that the _create_table
|
||||
"""
|
||||
|
|
|
@ -1,22 +1,20 @@
|
|||
import logging
|
||||
|
||||
import salt.client.netapi
|
||||
import salt.config
|
||||
from tests.support.mock import Mock, patch
|
||||
|
||||
|
||||
def test_run_log(caplog):
|
||||
def test_run_log(caplog, master_opts):
|
||||
"""
|
||||
test salt.client.netapi logs correct message
|
||||
"""
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
opts["rest_cherrypy"] = {"port": 8000}
|
||||
master_opts["rest_cherrypy"] = {"port": 8000}
|
||||
mock_process = Mock()
|
||||
mock_process.add_process.return_value = True
|
||||
patch_process = patch.object(salt.utils.process, "ProcessManager", mock_process)
|
||||
with caplog.at_level(logging.INFO):
|
||||
with patch_process:
|
||||
netapi = salt.client.netapi.NetapiClient(opts)
|
||||
netapi = salt.client.netapi.NetapiClient(master_opts)
|
||||
netapi.run()
|
||||
assert "Starting RunNetapi(salt.loaded.int.netapi.rest_cherrypy)" in caplog.text
|
||||
|
||||
|
|
|
@ -1,20 +1,16 @@
|
|||
import copy
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
from salt.cloud import Cloud
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def master_config():
|
||||
opts = copy.deepcopy(salt.config.DEFAULT_MASTER_OPTS)
|
||||
opts["parallel"] = False
|
||||
opts["providers"] = {
|
||||
def master_config(master_opts):
|
||||
master_opts["parallel"] = False
|
||||
master_opts["providers"] = {
|
||||
"test": {},
|
||||
}
|
||||
return opts
|
||||
return master_opts
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -125,5 +121,5 @@ def test_vm_config_merger():
|
|||
"size": "t2.micro",
|
||||
"name": "test_vm",
|
||||
}
|
||||
vm = salt.cloud.Cloud.vm_config("test_vm", main, provider, profile, {})
|
||||
vm = Cloud.vm_config("test_vm", main, provider, profile, {})
|
||||
assert expected == vm
|
||||
|
|
37
tests/pytests/unit/conftest.py
Normal file
37
tests/pytests/unit/conftest.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
import pytest
|
||||
|
||||
import salt.config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_opts(tmp_path):
|
||||
"""
|
||||
Default minion configuration with relative temporary paths to not require root permissions.
|
||||
"""
|
||||
root_dir = tmp_path / "minion"
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["__role"] = "minion"
|
||||
opts["root_dir"] = str(root_dir)
|
||||
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
|
||||
dirpath = root_dir / name
|
||||
dirpath.mkdir(parents=True)
|
||||
opts[name] = str(dirpath)
|
||||
opts["log_file"] = "logs/minion.log"
|
||||
return opts
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def master_opts(tmp_path):
|
||||
"""
|
||||
Default master configuration with relative temporary paths to not require root permissions.
|
||||
"""
|
||||
root_dir = tmp_path / "master"
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
opts["__role"] = "master"
|
||||
opts["root_dir"] = str(root_dir)
|
||||
for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"):
|
||||
dirpath = root_dir / name
|
||||
dirpath.mkdir(parents=True)
|
||||
opts[name] = str(dirpath)
|
||||
opts["log_file"] = "logs/master.log"
|
||||
return opts
|
|
@ -4,16 +4,14 @@ unit tests for the script engine
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.engines.script as script
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS
|
||||
return {script: {"__opts__": opts}}
|
||||
def configure_loader_modules(master_opts):
|
||||
return {script: {"__opts__": master_opts}}
|
||||
|
||||
|
||||
def test__get_serializer():
|
||||
|
|
|
@ -92,13 +92,13 @@ def configure_loader_modules():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def slack_client():
|
||||
def slack_client(minion_opts):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
app_token = "xapp-x-xxxxxxxxxxx-xxxxxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
bot_token = "xoxb-xxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
trigger = "!"
|
||||
|
||||
with patch.dict(slack_engine.__opts__, mock_opts):
|
||||
with patch.dict(slack_engine.__opts__, minion_opts):
|
||||
with patch(
|
||||
"slack_bolt.App", MagicMock(autospec=True, return_value=MockSlackBoltApp())
|
||||
):
|
||||
|
|
|
@ -6,7 +6,6 @@ import logging
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.modules.dockermod as docker_mod
|
||||
import salt.utils.platform
|
||||
|
@ -21,9 +20,9 @@ pytest.importorskip(
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(
|
||||
salt.config.DEFAULT_MINION_OPTS.copy(),
|
||||
minion_opts,
|
||||
whitelist=[
|
||||
"args",
|
||||
"docker",
|
||||
|
|
340
tests/pytests/unit/modules/test_acme.py
Normal file
340
tests/pytests/unit/modules/test_acme.py
Normal file
|
@ -0,0 +1,340 @@
|
|||
"""
|
||||
:codeauthor: Herbert Buurman <herbert.buurman@ogd.nl>
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
# Import Salt Module
|
||||
import salt.modules.acme as acme
|
||||
import salt.utils.dictupdate
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {acme: {}}
|
||||
|
||||
|
||||
def test_certs():
|
||||
"""
|
||||
Test listing certs
|
||||
"""
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"file.readdir": MagicMock(
|
||||
return_value=[".", "..", "README", "test_expired", "test_valid"]
|
||||
)
|
||||
},
|
||||
), patch(
|
||||
"os.path.isdir",
|
||||
side_effect=lambda path: path
|
||||
in [
|
||||
os.path.join(acme.LE_LIVE, "test_expired"),
|
||||
os.path.join(acme.LE_LIVE, "test_valid"),
|
||||
],
|
||||
):
|
||||
assert acme.certs() == ["test_expired", "test_valid"]
|
||||
|
||||
|
||||
def test_has():
|
||||
"""
|
||||
Test checking if certificate (does not) exist.
|
||||
"""
|
||||
with patch.dict(
|
||||
acme.__salt__, {"file.file_exists": MagicMock(return_value=True)}
|
||||
): # pylint: disable=no-member
|
||||
assert acme.has("test_expired")
|
||||
with patch.dict(
|
||||
acme.__salt__, {"file.file_exists": MagicMock(return_value=False)}
|
||||
): # pylint: disable=no-member
|
||||
assert not acme.has("test_invalid")
|
||||
|
||||
|
||||
def test_needs_renewal():
|
||||
"""
|
||||
Test if expired certs do indeed need renewal.
|
||||
"""
|
||||
expired = (
|
||||
datetime.date.today() - datetime.timedelta(days=3) - datetime.date(1970, 1, 1)
|
||||
)
|
||||
valid = (
|
||||
datetime.date.today() + datetime.timedelta(days=3) - datetime.date(1970, 1, 1)
|
||||
)
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"tls.cert_info": MagicMock(
|
||||
return_value={"not_after": expired.total_seconds()}
|
||||
)
|
||||
},
|
||||
):
|
||||
assert acme.needs_renewal("test_expired")
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"tls.cert_info": MagicMock(
|
||||
return_value={"not_after": valid.total_seconds()}
|
||||
)
|
||||
},
|
||||
):
|
||||
assert not acme.needs_renewal("test_valid")
|
||||
# Test with integer window parameter
|
||||
assert acme.needs_renewal("test_valid", window=5)
|
||||
# Test with string-like window parameter
|
||||
assert acme.needs_renewal("test_valid", window="5")
|
||||
# Test with 'force' parameter
|
||||
assert acme.needs_renewal("test_valid", window="force")
|
||||
# Test with 'true' parameter
|
||||
assert acme.needs_renewal("test_valid", window=True)
|
||||
# Test with invalid window parameter
|
||||
pytest.raises(
|
||||
SaltInvocationError, acme.needs_renewal, "test_valid", window="foo"
|
||||
)
|
||||
|
||||
|
||||
def test_expires():
|
||||
"""
|
||||
Test if expires function functions properly.
|
||||
"""
|
||||
test_value = datetime.datetime.today() - datetime.timedelta(days=3)
|
||||
test_stamp = test_value - datetime.datetime(1970, 1, 1)
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"tls.cert_info": MagicMock(
|
||||
return_value={"not_after": test_stamp.total_seconds()}
|
||||
)
|
||||
},
|
||||
):
|
||||
assert (
|
||||
acme.expires("test_expired")
|
||||
== datetime.datetime.fromtimestamp(test_stamp.total_seconds()).isoformat()
|
||||
)
|
||||
|
||||
|
||||
def test_info():
|
||||
"""
|
||||
Test certificate information retrieval.
|
||||
"""
|
||||
certinfo_tls_result = {
|
||||
"not_after": 1559471377,
|
||||
"signature_algorithm": "sha256WithRSAEncryption",
|
||||
"extensions": {},
|
||||
"fingerprint": (
|
||||
"FB:A4:5F:71:D6:5D:6C:B6:1D:2C:FD:91:09:2C:1C:52:"
|
||||
"3C:EC:B6:4D:1A:95:65:37:04:D0:E2:5E:C7:64:0C:9C"
|
||||
),
|
||||
"serial_number": 6461481982668892235,
|
||||
"issuer": {},
|
||||
"not_before": 1559557777,
|
||||
"subject": {},
|
||||
}
|
||||
certinfo_x509_result = {
|
||||
"Not After": "2019-06-02 10:29:37",
|
||||
"Subject Hash": "54:3B:6C:A4",
|
||||
"Serial Number": "59:AB:CB:A0:FB:90:E8:4B",
|
||||
"SHA1 Finger Print": (
|
||||
"F1:8D:F3:26:1B:D3:88:32:CD:B6:FA:3B:85:58:DA:C7:6F:62:BE:7E"
|
||||
),
|
||||
"SHA-256 Finger Print": (
|
||||
"FB:A4:5F:71:D6:5D:6C:B6:1D:2C:FD:91:09:2C:1C:52:"
|
||||
"3C:EC:B6:4D:1A:95:65:37:04:D0:E2:5E:C7:64:0C:9C"
|
||||
),
|
||||
"MD5 Finger Print": "95:B5:96:9B:42:A5:9E:20:78:FD:99:09:4B:21:1E:97",
|
||||
"Version": 3,
|
||||
"Key Size": 2048,
|
||||
"Public Key": (
|
||||
"-----BEGIN PUBLIC KEY-----\n"
|
||||
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsVO2vwQPKU92PSBnuGid\n"
|
||||
"k8t6KWVE2jEBM10u7CgqQmD/JCnYflEHAo1nOsD7wxdhBrxhf5Qs+pEX1HOsh8VA\n"
|
||||
"HDTim0iE8nQVJ0Iuen2SrwaWMhwKmZTSJRYMgd46oCMi2RdlCvcgF2Hw6RTwF7FT\n"
|
||||
"hnksc4HBT91XddnP32N558tOT3YejafQNvClz5WcR+E0JzqGrV/+wfe3o+j/q5eK\n"
|
||||
"UowttWazeSMvuROtqj/fEk0rop4D14pgzZqWi30tjwhJNl6fSPFWBrLEHGNyDJ+O\n"
|
||||
"zfov0B2MRLJibH7GMkOCwsP2g1lVOReqcml+ju6zAKW8nHBTRg0iXB18Ifxef57Y\n"
|
||||
"AQIDAQAB\n"
|
||||
"-----END PUBLIC KEY-----\n"
|
||||
),
|
||||
"Issuer": {},
|
||||
"Issuer Hash": "54:3B:6C:A4",
|
||||
"Not Before": "2019-06-03 10:29:37",
|
||||
"Subject": {},
|
||||
}
|
||||
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"file.file_exists": MagicMock(return_value=True),
|
||||
"tls.cert_info": MagicMock(return_value=certinfo_tls_result),
|
||||
},
|
||||
):
|
||||
assert acme.info("test") == certinfo_tls_result
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"file.file_exists": MagicMock(return_value=True),
|
||||
"x509.read_certificate": MagicMock(return_value=certinfo_x509_result),
|
||||
},
|
||||
):
|
||||
assert acme.info("test") == certinfo_x509_result
|
||||
with patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"file.file_exists": MagicMock(return_value=True),
|
||||
"cmd.run": MagicMock(return_value="foo"),
|
||||
},
|
||||
):
|
||||
assert acme.info("test") == {"text": "foo"}
|
||||
|
||||
|
||||
def test_cert():
|
||||
"""
|
||||
Test certificate retrieval/renewal
|
||||
"""
|
||||
valid_timestamp = (
|
||||
datetime.datetime.now()
|
||||
+ datetime.timedelta(days=30)
|
||||
- datetime.datetime(1970, 1, 1, 0, 0, 0, 0)
|
||||
).total_seconds()
|
||||
expired_timestamp = (
|
||||
datetime.datetime.now()
|
||||
- datetime.timedelta(days=3)
|
||||
- datetime.datetime(1970, 1, 1, 0, 0, 0, 0)
|
||||
).total_seconds()
|
||||
cmd_new_cert = {
|
||||
"stdout": textwrap.dedent(
|
||||
"""
|
||||
IMPORTANT NOTES:
|
||||
- Congratulations! Your certificate and chain have been saved at:
|
||||
/etc/letsencrypt/live/test/fullchain.pem
|
||||
Your key file has been saved at:
|
||||
/etc/letsencrypt/live/test/privkey.pem
|
||||
Your cert will expire on 2019-08-07. To obtain a new or tweaked
|
||||
version of this certificate in the future, simply run certbot
|
||||
again. To non-interactively renew *all* of your certificates, run
|
||||
"certbot renew"
|
||||
- If you like Certbot, please consider supporting our work by:
|
||||
|
||||
Donating to ISRG / Let's Encrypt: https://letsencrypt.org/donate
|
||||
Donating to EFF: https://eff.org/donate-le
|
||||
"""
|
||||
),
|
||||
"stderr": textwrap.dedent(
|
||||
"""
|
||||
Saving debug log to /var/log/letsencrypt/letsencrypt.log
|
||||
Plugins selected: Authenticator standalone, Installer None
|
||||
Starting new HTTPS connection (1): acme-v02.api.letsencrypt.org
|
||||
Obtaining a new certificate
|
||||
Resetting dropped connection: acme-v02.api.letsencrypt.org
|
||||
"""
|
||||
),
|
||||
"retcode": 0,
|
||||
}
|
||||
result_new_cert = {
|
||||
"comment": "Certificate test obtained",
|
||||
"not_after": datetime.datetime.fromtimestamp(valid_timestamp).isoformat(),
|
||||
"changes": {"mode": "0640"},
|
||||
"result": True,
|
||||
}
|
||||
|
||||
cmd_no_renew = {
|
||||
"stdout": textwrap.dedent(
|
||||
"""
|
||||
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
Certificate not yet due for renewal; no action taken.
|
||||
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
"""
|
||||
),
|
||||
"stderr": textwrap.dedent(
|
||||
"""Saving debug log to /var/log/letsencrypt/letsencrypt.log
|
||||
Plugins selected: Authenticator standalone, Installer None
|
||||
Starting new HTTPS connection (1): acme-v02.api.letsencrypt.org
|
||||
Cert not yet due for renewal
|
||||
Keeping the existing certificate
|
||||
"""
|
||||
),
|
||||
"retcode": 0,
|
||||
}
|
||||
if salt.utils.platform.is_freebsd():
|
||||
result_no_renew = {
|
||||
"comment": "Certificate "
|
||||
+ os.path.join("/usr/local/etc/letsencrypt/live/test", "cert.pem")
|
||||
+ " unchanged",
|
||||
"not_after": datetime.datetime.fromtimestamp(valid_timestamp).isoformat(),
|
||||
"changes": {},
|
||||
"result": True,
|
||||
}
|
||||
else:
|
||||
result_no_renew = {
|
||||
"comment": "Certificate "
|
||||
+ os.path.join("/etc/letsencrypt/live/test", "cert.pem")
|
||||
+ " unchanged",
|
||||
"not_after": datetime.datetime.fromtimestamp(valid_timestamp).isoformat(),
|
||||
"changes": {},
|
||||
"result": True,
|
||||
}
|
||||
result_renew = {
|
||||
"comment": "Certificate test renewed",
|
||||
"not_after": datetime.datetime.fromtimestamp(expired_timestamp).isoformat(),
|
||||
"changes": {},
|
||||
"result": True,
|
||||
}
|
||||
|
||||
# Test fetching new certificate
|
||||
with patch("salt.modules.acme.LEA", "certbot"), patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"cmd.run_all": MagicMock(return_value=cmd_new_cert),
|
||||
"file.file_exists": MagicMock(return_value=False),
|
||||
"tls.cert_info": MagicMock(return_value={"not_after": valid_timestamp}),
|
||||
"file.check_perms": MagicMock(
|
||||
side_effect=lambda a, x, b, c, d, follow_symlinks: (
|
||||
salt.utils.dictupdate.set_dict_key_value(x, "changes:mode", "0640"),
|
||||
None,
|
||||
)
|
||||
),
|
||||
},
|
||||
):
|
||||
assert acme.cert("test") == result_new_cert
|
||||
assert acme.cert("testing.example.com", certname="test") == result_new_cert
|
||||
# Test not renewing a valid certificate
|
||||
with patch("salt.modules.acme.LEA", "certbot"), patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"cmd.run_all": MagicMock(return_value=cmd_no_renew),
|
||||
"file.file_exists": MagicMock(return_value=True),
|
||||
"tls.cert_info": MagicMock(return_value={"not_after": valid_timestamp}),
|
||||
"file.check_perms": MagicMock(
|
||||
side_effect=lambda a, x, b, c, d, follow_symlinks: (
|
||||
salt.utils.dictupdate.set_dict_key_value(x, "result", True),
|
||||
None,
|
||||
)
|
||||
),
|
||||
},
|
||||
):
|
||||
assert acme.cert("test") == result_no_renew
|
||||
assert acme.cert("testing.example.com", certname="test") == result_no_renew
|
||||
# Test renewing an expired certificate
|
||||
with patch("salt.modules.acme.LEA", "certbot"), patch.dict(
|
||||
acme.__salt__,
|
||||
{ # pylint: disable=no-member
|
||||
"cmd.run_all": MagicMock(return_value=cmd_new_cert),
|
||||
"file.file_exists": MagicMock(return_value=True),
|
||||
"tls.cert_info": MagicMock(return_value={"not_after": expired_timestamp}),
|
||||
"file.check_perms": MagicMock(
|
||||
side_effect=lambda a, x, b, c, d, follow_symlinks: (
|
||||
salt.utils.dictupdate.set_dict_key_value(x, "result", True),
|
||||
None,
|
||||
)
|
||||
),
|
||||
},
|
||||
):
|
||||
assert acme.cert("test") == result_renew
|
||||
assert acme.cert("testing.example.com", certname="test") == result_renew
|
|
@ -4,7 +4,6 @@ import os
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.modules.ansiblegate as ansiblegate
|
||||
import salt.utils.json
|
||||
|
@ -142,7 +141,7 @@ def test_ansible_playbooks_return_retcode():
|
|||
assert "retcode" in ret
|
||||
|
||||
|
||||
def test_ansible_targets():
|
||||
def test_ansible_targets(minion_opts):
|
||||
"""
|
||||
Test ansible.targets execution module function.
|
||||
:return:
|
||||
|
@ -174,8 +173,7 @@ def test_ansible_targets():
|
|||
"""
|
||||
ansible_inventory_mock = MagicMock(return_value=ansible_inventory_ret)
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=True)):
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts, whitelist=["ansible"])
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["ansible"])
|
||||
with patch("salt.modules.cmdmod.run", ansible_inventory_mock), patch.dict(
|
||||
ansiblegate.__utils__, utils
|
||||
), patch("os.path.isfile", MagicMock(return_value=True)):
|
||||
|
|
332
tests/pytests/unit/modules/test_apache.py
Normal file
332
tests/pytests/unit/modules/test_apache.py
Normal file
|
@ -0,0 +1,332 @@
|
|||
"""
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import urllib.error
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.apache as apache
|
||||
from salt.utils.odict import OrderedDict
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {apache: {}}
|
||||
|
||||
|
||||
# 'version' function tests: 1
|
||||
|
||||
|
||||
def test_version():
|
||||
"""
|
||||
Test if return server version (``apachectl -v``)
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="Server version: Apache/2.4.7")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.version() == "Apache/2.4.7"
|
||||
|
||||
|
||||
# 'fullversion' function tests: 1
|
||||
|
||||
|
||||
def test_fullversion():
|
||||
"""
|
||||
Test if return server version (``apachectl -V``)
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="Server version: Apache/2.4.7")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.fullversion() == {
|
||||
"compiled_with": [],
|
||||
"server_version": "Apache/2.4.7",
|
||||
}
|
||||
|
||||
|
||||
# 'modules' function tests: 1
|
||||
|
||||
|
||||
def test_modules():
|
||||
"""
|
||||
Test if return list of static and shared modules
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(
|
||||
return_value=(
|
||||
"unixd_module (static)\n "
|
||||
" access_compat_module (shared)"
|
||||
)
|
||||
)
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.modules() == {
|
||||
"shared": ["access_compat_module"],
|
||||
"static": ["unixd_module"],
|
||||
}
|
||||
|
||||
|
||||
# 'servermods' function tests: 1
|
||||
|
||||
|
||||
def test_servermods():
|
||||
"""
|
||||
Test if return list of modules compiled into the server
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="core.c\nmod_so.c")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.servermods() == ["core.c", "mod_so.c"]
|
||||
|
||||
|
||||
# 'directives' function tests: 1
|
||||
|
||||
|
||||
def test_directives():
|
||||
"""
|
||||
Test if return list of directives
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="Salt")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.directives() == {"Salt": ""}
|
||||
|
||||
|
||||
# 'vhosts' function tests: 1
|
||||
|
||||
|
||||
def test_vhosts():
|
||||
"""
|
||||
Test if it shows the virtualhost settings
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.vhosts() == {}
|
||||
|
||||
|
||||
# 'signal' function tests: 2
|
||||
|
||||
|
||||
def test_signal():
|
||||
"""
|
||||
Test if return no signal for httpd
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
mock = MagicMock(return_value="")
|
||||
with patch.dict(apache.__salt__, {"cmd.run": mock}):
|
||||
assert apache.signal(None) is None
|
||||
|
||||
|
||||
def test_signal_args():
|
||||
"""
|
||||
Test if return httpd signal to start, restart, or stop.
|
||||
"""
|
||||
with patch("salt.modules.apache._detect_os", MagicMock(return_value="apachectl")):
|
||||
ret = 'Command: "apachectl -k start" completed successfully!'
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": ""})
|
||||
with patch.dict(apache.__salt__, {"cmd.run_all": mock}):
|
||||
assert apache.signal("start") == ret
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "Syntax OK", "stdout": ""}
|
||||
)
|
||||
with patch.dict(apache.__salt__, {"cmd.run_all": mock}):
|
||||
assert apache.signal("start") == "Syntax OK"
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 0, "stderr": "Syntax OK", "stdout": ""}
|
||||
)
|
||||
with patch.dict(apache.__salt__, {"cmd.run_all": mock}):
|
||||
assert apache.signal("start") == "Syntax OK"
|
||||
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(apache.__salt__, {"cmd.run_all": mock}):
|
||||
assert apache.signal("start") == "Salt"
|
||||
|
||||
|
||||
# 'useradd' function tests: 1
|
||||
|
||||
|
||||
def test_useradd():
|
||||
"""
|
||||
Test if it add HTTP user using the ``htpasswd`` command
|
||||
"""
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(apache.__salt__, {"webutil.useradd": mock}):
|
||||
assert apache.useradd("htpasswd", "salt", "badpassword") is True
|
||||
|
||||
|
||||
# 'userdel' function tests: 1
|
||||
|
||||
|
||||
def test_userdel():
|
||||
"""
|
||||
Test if it delete HTTP user using the ``htpasswd`` file
|
||||
"""
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(apache.__salt__, {"webutil.userdel": mock}):
|
||||
assert apache.userdel("htpasswd", "salt") is True
|
||||
|
||||
|
||||
# 'server_status' function tests: 2
|
||||
|
||||
|
||||
def test_server_status():
|
||||
"""
|
||||
Test if return get information from the Apache server-status
|
||||
"""
|
||||
with patch("salt.modules.apache.server_status", MagicMock(return_value={})):
|
||||
mock = MagicMock(return_value="")
|
||||
with patch.dict(apache.__salt__, {"config.get": mock}):
|
||||
assert apache.server_status() == {}
|
||||
|
||||
|
||||
def test_server_status_error():
|
||||
"""
|
||||
Test if return get error from the Apache server-status
|
||||
"""
|
||||
mock = MagicMock(side_effect=urllib.error.URLError("error"))
|
||||
with patch("urllib.request.urlopen", mock):
|
||||
mock = MagicMock(return_value="")
|
||||
with patch.dict(apache.__salt__, {"config.get": mock}):
|
||||
assert apache.server_status() == "error"
|
||||
|
||||
|
||||
# 'config' function tests: 1
|
||||
|
||||
|
||||
def test_config():
|
||||
"""
|
||||
Test if it create VirtualHost configuration files
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.apache._parse_config", MagicMock(return_value="Listen 22")
|
||||
):
|
||||
with patch("salt.utils.files.fopen", mock_open()):
|
||||
assert apache.config("/ports.conf", [{"Listen": "22"}]) == "Listen 22"
|
||||
|
||||
|
||||
# '_parse_config' function tests: 2
|
||||
|
||||
|
||||
def test__parse_config_dict():
|
||||
"""
|
||||
Test parsing function which creates configs from dict like (legacy way):
|
||||
- VirtualHost:
|
||||
this: '*:80'
|
||||
ServerName: website.com
|
||||
ServerAlias:
|
||||
- www
|
||||
- dev
|
||||
Directory:
|
||||
this: /var/www/vhosts/website.com
|
||||
Order: Deny,Allow
|
||||
Allow from:
|
||||
- 127.0.0.1
|
||||
- 192.168.100.0/24
|
||||
|
||||
"""
|
||||
data_in = OrderedDict(
|
||||
[
|
||||
(
|
||||
"Directory",
|
||||
OrderedDict(
|
||||
[
|
||||
("this", "/var/www/vhosts/website.com"),
|
||||
("Order", "Deny,Allow"),
|
||||
("Allow from", ["127.0.0.1", "192.168.100.0/24"]),
|
||||
]
|
||||
),
|
||||
),
|
||||
("this", "*:80"),
|
||||
("ServerName", "website.com"),
|
||||
("ServerAlias", ["www", "dev"]),
|
||||
]
|
||||
)
|
||||
dataout = (
|
||||
"<VirtualHost *:80>\n"
|
||||
"<Directory /var/www/vhosts/website.com>\n"
|
||||
"Order Deny,Allow\n"
|
||||
"Allow from 127.0.0.1\n"
|
||||
"Allow from 192.168.100.0/24\n\n"
|
||||
"</Directory>\n\n"
|
||||
"ServerName website.com\n"
|
||||
"ServerAlias www\n"
|
||||
"ServerAlias dev\n\n"
|
||||
"</VirtualHost>\n"
|
||||
)
|
||||
# pylint: disable=protected-access
|
||||
parse = apache._parse_config(data_in, "VirtualHost")
|
||||
assert parse == dataout
|
||||
|
||||
|
||||
def test__parse_config_list():
|
||||
"""
|
||||
Test parsing function which creates configs from variable structure (list of dicts or
|
||||
list of dicts of dicts/lists) like:
|
||||
- VirtualHost:
|
||||
- this: '*:80'
|
||||
- ServerName: website.com
|
||||
- ServerAlias:
|
||||
- www
|
||||
- dev
|
||||
- Directory:
|
||||
this: /var/www/vhosts/website.com
|
||||
Order: Deny,Allow
|
||||
Allow from:
|
||||
- 127.0.0.1
|
||||
- 192.168.100.0/24
|
||||
- Directory:
|
||||
- this: /var/www/vhosts/website.com/private
|
||||
- Order: Deny,Allow
|
||||
- Allow from:
|
||||
- 127.0.0.1
|
||||
- 192.168.100.0/24
|
||||
- If:
|
||||
this: some condition
|
||||
do: something
|
||||
"""
|
||||
data_in = [
|
||||
OrderedDict(
|
||||
[
|
||||
("ServerName", "website.com"),
|
||||
("ServerAlias", ["www", "dev"]),
|
||||
(
|
||||
"Directory",
|
||||
[
|
||||
OrderedDict(
|
||||
[
|
||||
("this", "/var/www/vhosts/website.com/private"),
|
||||
("Order", "Deny,Allow"),
|
||||
("Allow from", ["127.0.0.1", "192.168.100.0/24"]),
|
||||
(
|
||||
"If",
|
||||
{"this": "some condition", "do": "something"},
|
||||
),
|
||||
]
|
||||
)
|
||||
],
|
||||
),
|
||||
("this", "*:80"),
|
||||
]
|
||||
)
|
||||
]
|
||||
dataout = (
|
||||
"<VirtualHost *:80>\n"
|
||||
"ServerName website.com\n"
|
||||
"ServerAlias www\n"
|
||||
"ServerAlias dev\n\n"
|
||||
"<Directory /var/www/vhosts/website.com/private>\n"
|
||||
"Order Deny,Allow\n"
|
||||
"Allow from 127.0.0.1\n"
|
||||
"Allow from 192.168.100.0/24\n\n"
|
||||
"<If some condition>\n"
|
||||
"do something\n"
|
||||
"</If>\n\n"
|
||||
"</Directory>\n\n"
|
||||
"</VirtualHost>\n"
|
||||
)
|
||||
# pylint: disable=protected-access
|
||||
parse = apache._parse_config(data_in, "VirtualHost")
|
||||
assert parse == dataout
|
407
tests/pytests/unit/modules/test_artifactory.py
Normal file
407
tests/pytests/unit/modules/test_artifactory.py
Normal file
|
@ -0,0 +1,407 @@
|
|||
import pytest
|
||||
|
||||
import salt.modules.artifactory as artifactory
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {artifactory: {}}
|
||||
|
||||
|
||||
def test_artifact_get_metadata():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_artifact_metadata_xml",
|
||||
MagicMock(
|
||||
return_value="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.company.sampleapp.web-module</groupId>
|
||||
<artifactId>web</artifactId>
|
||||
<versioning>
|
||||
<latest>1.1_RC11</latest>
|
||||
<release>1.0.1</release>
|
||||
<versions>
|
||||
<version>1.0_RC20</version>
|
||||
<version>1.0_RC22</version>
|
||||
</versions>
|
||||
<lastUpdated>20140623120632</lastUpdated>
|
||||
</versioning>
|
||||
</metadata>
|
||||
"""
|
||||
),
|
||||
):
|
||||
metadata = artifactory._get_artifact_metadata(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-releases",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
headers={},
|
||||
)
|
||||
assert metadata["latest_version"] == "1.1_RC11"
|
||||
|
||||
|
||||
def test_snapshot_version_get_metadata():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_snapshot_version_metadata_xml",
|
||||
MagicMock(
|
||||
return_value="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.company.sampleapp.web-module</groupId>
|
||||
<artifactId>web</artifactId>
|
||||
<version>1.1_RC8-SNAPSHOT</version>
|
||||
<versioning>
|
||||
<snapshot>
|
||||
<timestamp>20140418.150212</timestamp>
|
||||
<buildNumber>1</buildNumber>
|
||||
</snapshot>
|
||||
<lastUpdated>20140623104055</lastUpdated>
|
||||
<snapshotVersions>
|
||||
<snapshotVersion>
|
||||
<extension>pom</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
<snapshotVersion>
|
||||
<extension>war</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
</snapshotVersions>
|
||||
</versioning>
|
||||
</metadata>
|
||||
"""
|
||||
),
|
||||
):
|
||||
metadata = artifactory._get_snapshot_version_metadata(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-releases",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
version="1.1_RC8-SNAPSHOT",
|
||||
headers={},
|
||||
)
|
||||
assert metadata["snapshot_versions"]["war"] == "1.1_RC8-20140418.150212-1"
|
||||
|
||||
|
||||
def test_artifact_metadata_url():
|
||||
metadata_url = artifactory._get_artifact_metadata_url(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-releases",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
)
|
||||
|
||||
assert (
|
||||
metadata_url
|
||||
== "http://artifactory.example.com/artifactory/libs-releases/com/company/sampleapp/web-module/web/maven-metadata.xml"
|
||||
)
|
||||
|
||||
|
||||
def test_snapshot_version_metadata_url():
|
||||
metadata_url = artifactory._get_snapshot_version_metadata_url(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
version="1.0_RC10-SNAPSHOT",
|
||||
)
|
||||
|
||||
assert (
|
||||
metadata_url
|
||||
== "http://artifactory.example.com/artifactory/libs-snapshots/com/company/sampleapp/web-module/web/1.0_RC10-SNAPSHOT/maven-metadata.xml"
|
||||
)
|
||||
|
||||
|
||||
def test_construct_url_for_released_version():
|
||||
artifact_url, file_name = artifactory._get_release_url(
|
||||
repository="libs-releases",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
packaging="war",
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
version="1.0_RC20",
|
||||
)
|
||||
|
||||
assert (
|
||||
artifact_url
|
||||
== "http://artifactory.example.com/artifactory/libs-releases/com/company/sampleapp/web-module/web/1.0_RC20/web-1.0_RC20.war"
|
||||
)
|
||||
assert file_name == "web-1.0_RC20.war"
|
||||
|
||||
|
||||
def test_construct_url_for_snapshot_version():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_snapshot_version_metadata",
|
||||
MagicMock(
|
||||
return_value={"snapshot_versions": {"war": "1.0_RC10-20131127.105838-2"}}
|
||||
),
|
||||
):
|
||||
|
||||
artifact_url, file_name = artifactory._get_snapshot_url(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
version="1.0_RC10-SNAPSHOT",
|
||||
packaging="war",
|
||||
headers={},
|
||||
)
|
||||
|
||||
assert (
|
||||
artifact_url
|
||||
== "http://artifactory.example.com/artifactory/libs-snapshots/com/company/sampleapp/web-module/web/1.0_RC10-SNAPSHOT/web-1.0_RC10-20131127.105838-2.war"
|
||||
)
|
||||
assert file_name == "web-1.0_RC10-20131127.105838-2.war"
|
||||
|
||||
|
||||
def test_get_snapshot_url_with_classifier():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_snapshot_version_metadata_xml",
|
||||
MagicMock(
|
||||
return_value="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.company.sampleapp.web-module</groupId>
|
||||
<artifactId>web</artifactId>
|
||||
<version>1.1_RC8-SNAPSHOT</version>
|
||||
<versioning>
|
||||
<snapshot>
|
||||
<timestamp>20140418.150212</timestamp>
|
||||
<buildNumber>1</buildNumber>
|
||||
</snapshot>
|
||||
<lastUpdated>20140623104055</lastUpdated>
|
||||
<snapshotVersions>
|
||||
<snapshotVersion>
|
||||
<extension>pom</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
<snapshotVersion>
|
||||
<classifier>test</classifier>
|
||||
<extension>war</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
</snapshotVersions>
|
||||
</versioning>
|
||||
</metadata>
|
||||
"""
|
||||
),
|
||||
):
|
||||
artifact_url, file_name = artifactory._get_snapshot_url(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
version="1.1_RC8-SNAPSHOT",
|
||||
packaging="war",
|
||||
classifier="test",
|
||||
headers={},
|
||||
)
|
||||
|
||||
assert (
|
||||
artifact_url
|
||||
== "http://artifactory.example.com/artifactory/libs-snapshots/com/company/sampleapp/web-module/web/1.1_RC8-SNAPSHOT/web-1.1_RC8-20140418.150212-1-test.war"
|
||||
)
|
||||
|
||||
|
||||
def test_get_snapshot_url_without_classifier():
|
||||
"""
|
||||
test when classifier not set and packaging
|
||||
does not match snapshot_versions in the metadata.
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_snapshot_version_metadata_xml",
|
||||
MagicMock(
|
||||
return_value="""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<metadata>
|
||||
<groupId>com.company.sampleapp.web-module</groupId>
|
||||
<artifactId>web</artifactId>
|
||||
<version>1.1_RC8-SNAPSHOT</version>
|
||||
<versioning>
|
||||
<snapshot>
|
||||
<timestamp>20140418.150212</timestamp>
|
||||
<buildNumber>1</buildNumber>
|
||||
</snapshot>
|
||||
<lastUpdated>20140623104055</lastUpdated>
|
||||
<snapshotVersions>
|
||||
<snapshotVersion>
|
||||
<extension>pom</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
<snapshotVersion>
|
||||
<classifier>test</classifier>
|
||||
<extension>war</extension>
|
||||
<value>1.1_RC8-20140418.150212-1</value>
|
||||
<updated>20140418150212</updated>
|
||||
</snapshotVersion>
|
||||
</snapshotVersions>
|
||||
</versioning>
|
||||
</metadata>
|
||||
"""
|
||||
),
|
||||
):
|
||||
with pytest.raises(artifactory.ArtifactoryError):
|
||||
artifact_url, file_name = artifactory._get_snapshot_url(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
version="1.1_RC8-SNAPSHOT",
|
||||
packaging="war",
|
||||
headers={},
|
||||
)
|
||||
|
||||
|
||||
def test_get_latest_snapshot_username_password():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_artifact_metadata",
|
||||
return_value={"latest_version": "1.1"},
|
||||
), patch(
|
||||
"salt.modules.artifactory._get_snapshot_url",
|
||||
return_value=(
|
||||
"http://artifactory.example.com/artifactory/snapshot",
|
||||
"/path/to/file",
|
||||
),
|
||||
), patch(
|
||||
"salt.modules.artifactory.__save_artifact", return_value={}
|
||||
) as save_artifact_mock:
|
||||
artifactory.get_latest_snapshot(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
packaging="war",
|
||||
username="user",
|
||||
password="password",
|
||||
)
|
||||
save_artifact_mock.assert_called_with(
|
||||
"http://artifactory.example.com/artifactory/snapshot",
|
||||
"/path/to/file",
|
||||
{"Authorization": "Basic dXNlcjpwYXNzd29yZA==\n"},
|
||||
)
|
||||
|
||||
|
||||
def test_get_snapshot_username_password():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_snapshot_url",
|
||||
return_value=(
|
||||
"http://artifactory.example.com/artifactory/snapshot",
|
||||
"/path/to/file",
|
||||
),
|
||||
), patch(
|
||||
"salt.modules.artifactory.__save_artifact", return_value={}
|
||||
) as save_artifact_mock:
|
||||
artifactory.get_snapshot(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
packaging="war",
|
||||
version="1.1",
|
||||
username="user",
|
||||
password="password",
|
||||
)
|
||||
save_artifact_mock.assert_called_with(
|
||||
"http://artifactory.example.com/artifactory/snapshot",
|
||||
"/path/to/file",
|
||||
{"Authorization": "Basic dXNlcjpwYXNzd29yZA==\n"},
|
||||
)
|
||||
|
||||
|
||||
def test_get_latest_release_username_password():
|
||||
with patch(
|
||||
"salt.modules.artifactory.__find_latest_version",
|
||||
return_value="1.1",
|
||||
), patch(
|
||||
"salt.modules.artifactory._get_release_url",
|
||||
return_value=(
|
||||
"http://artifactory.example.com/artifactory/release",
|
||||
"/path/to/file",
|
||||
),
|
||||
), patch(
|
||||
"salt.modules.artifactory.__save_artifact", return_value={}
|
||||
) as save_artifact_mock:
|
||||
artifactory.get_latest_release(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
packaging="war",
|
||||
username="user",
|
||||
password="password",
|
||||
)
|
||||
save_artifact_mock.assert_called_with(
|
||||
"http://artifactory.example.com/artifactory/release",
|
||||
"/path/to/file",
|
||||
{"Authorization": "Basic dXNlcjpwYXNzd29yZA==\n"},
|
||||
)
|
||||
|
||||
|
||||
def test_get_release_username_password():
|
||||
with patch(
|
||||
"salt.modules.artifactory._get_release_url",
|
||||
return_value=(
|
||||
"http://artifactory.example.com/artifactory/release",
|
||||
"/path/to/file",
|
||||
),
|
||||
), patch(
|
||||
"salt.modules.artifactory.__save_artifact", return_value={}
|
||||
) as save_artifact_mock:
|
||||
artifactory.get_release(
|
||||
artifactory_url="http://artifactory.example.com/artifactory",
|
||||
repository="libs-snapshots",
|
||||
group_id="com.company.sampleapp.web-module",
|
||||
artifact_id="web",
|
||||
packaging="war",
|
||||
version="1.1",
|
||||
username="user",
|
||||
password="password",
|
||||
)
|
||||
save_artifact_mock.assert_called_with(
|
||||
"http://artifactory.example.com/artifactory/release",
|
||||
"/path/to/file",
|
||||
{"Authorization": "Basic dXNlcjpwYXNzd29yZA==\n"},
|
||||
)
|
||||
|
||||
|
||||
def test_save_artifact_file_exists_checksum_equal():
|
||||
artifact_url = "http://artifactory.example.com/artifactory/artifact"
|
||||
target_file = "/path/to/file"
|
||||
sum_str = "0123456789abcdef0123456789abcdef01234567"
|
||||
sum_bin = sum_str.encode()
|
||||
with patch("os.path.isfile", return_value=True), patch.dict(
|
||||
artifactory.__salt__, {"file.get_hash": MagicMock(return_value=sum_str)}
|
||||
):
|
||||
with patch(
|
||||
"salt.modules.artifactory.__download",
|
||||
return_value=(True, sum_bin, None),
|
||||
):
|
||||
result = getattr(artifactory, "__save_artifact")(
|
||||
artifact_url=artifact_url, target_file=target_file, headers={}
|
||||
)
|
||||
assert result == {
|
||||
"status": True,
|
||||
"changes": {},
|
||||
"target_file": target_file,
|
||||
"comment": (
|
||||
"File {} already exists, checksum matches with Artifactory.\n"
|
||||
"Checksum URL: {}.sha1".format(target_file, artifact_url)
|
||||
),
|
||||
}
|
||||
with patch(
|
||||
"salt.modules.artifactory.__download",
|
||||
return_value=(True, sum_str, None),
|
||||
):
|
||||
result = getattr(artifactory, "__save_artifact")(
|
||||
artifact_url=artifact_url, target_file=target_file, headers={}
|
||||
)
|
||||
assert result == {
|
||||
"status": True,
|
||||
"changes": {},
|
||||
"target_file": target_file,
|
||||
"comment": (
|
||||
"File {} already exists, checksum matches with Artifactory.\n"
|
||||
"Checksum URL: {}.sha1".format(target_file, artifact_url)
|
||||
),
|
||||
}
|
|
@ -7,7 +7,6 @@ import os
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.modules.beacons as beacons
|
||||
from salt.utils.event import SaltEvent
|
||||
from tests.support.mock import MagicMock, call, mock_open, patch
|
||||
|
@ -16,15 +15,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def sock_dir(tmp_path):
|
||||
return str(tmp_path / "test-socks")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(sock_dir):
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["sock_dir"] = sock_dir
|
||||
return {beacons: {"__opts__": opts}}
|
||||
def configure_loader_modules(minion_opts):
|
||||
return {beacons: {"__opts__": minion_opts}}
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
|
|
781
tests/pytests/unit/modules/test_btrfs.py
Normal file
781
tests/pytests/unit/modules/test_btrfs.py
Normal file
|
@ -0,0 +1,781 @@
|
|||
"""
|
||||
Test cases for salt.modules.btrfs
|
||||
|
||||
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.btrfs as btrfs
|
||||
import salt.utils.files
|
||||
import salt.utils.fsutils
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {btrfs: {"__salt__": {}}}
|
||||
|
||||
|
||||
# 'version' function tests: 1
|
||||
def test_version():
|
||||
"""
|
||||
Test if it return BTRFS version.
|
||||
"""
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
assert btrfs.version() == {"version": "Salt"}
|
||||
|
||||
|
||||
# 'info' function tests: 1
|
||||
|
||||
|
||||
def test_info():
|
||||
"""
|
||||
Test if it get BTRFS filesystem information.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._verify_run", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"Salt": "salt"})
|
||||
with patch.object(btrfs, "_parse_btrfs_info", mock):
|
||||
assert btrfs.info("/dev/sda1") == {"Salt": "salt"}
|
||||
|
||||
|
||||
# 'devices' function tests: 1
|
||||
|
||||
|
||||
def test_devices():
|
||||
"""
|
||||
Test if it get known BTRFS formatted devices on the system.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._blkid_output", MagicMock(return_value="Salt")):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
assert btrfs.devices() == "Salt"
|
||||
|
||||
|
||||
# 'defragment' function tests: 2
|
||||
|
||||
|
||||
def test_defragment():
|
||||
"""
|
||||
Test if it defragment mounted BTRFS filesystem.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=False)):
|
||||
with patch("os.path.exists", MagicMock(return_value=True)):
|
||||
ret = [
|
||||
{
|
||||
"range": "/dev/sda1",
|
||||
"mount_point": False,
|
||||
"log": False,
|
||||
"passed": True,
|
||||
}
|
||||
]
|
||||
mock_run = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "", "stdout": "Salt"}
|
||||
)
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock_run}):
|
||||
mock_file = mock_open(read_data="/dev/sda1 / ext4 rw,data=ordered 0 0")
|
||||
with patch.object(salt.utils.files, "fopen", mock_file):
|
||||
assert btrfs.defragment("/dev/sda1") == ret
|
||||
|
||||
|
||||
def test_defragment_error():
|
||||
"""
|
||||
Test if it gives device not mount error
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock_run = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "", "stdout": "Salt"}
|
||||
)
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock_run}):
|
||||
mock_file = mock_open(read_data="/dev/sda1 / ext4 rw,data=ordered 0 0")
|
||||
with patch.object(salt.utils.files, "fopen", mock_file):
|
||||
pytest.raises(CommandExecutionError, btrfs.defragment, "/dev/sda1")
|
||||
|
||||
|
||||
# 'features' function tests: 1
|
||||
|
||||
|
||||
def test_features():
|
||||
"""
|
||||
Test if it list currently available BTRFS features.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._verify_run", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
assert btrfs.features() == {}
|
||||
|
||||
|
||||
# 'usage' function tests: 1
|
||||
|
||||
|
||||
def test_usage():
|
||||
"""
|
||||
Test if it shows in which disk the chunks are allocated.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._verify_run", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"Salt": "salt"})
|
||||
with patch.object(btrfs, "_usage_specific", mock):
|
||||
assert btrfs.usage("/dev/sda1") == {"Salt": "salt"}
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "", "stdout": "Unallocated:\n"}
|
||||
)
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"/dev/sda1": True})
|
||||
with patch.object(btrfs, "_usage_unallocated", mock):
|
||||
assert btrfs.usage("/dev/sda1") == {"unallocated": {"/dev/sda1": True}}
|
||||
|
||||
mock = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "", "stdout": "Overall:\n"}
|
||||
)
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"/dev/sda1": True})
|
||||
with patch.object(btrfs, "_usage_overall", mock):
|
||||
assert btrfs.usage("/dev/sda1") == {"overall": {"/dev/sda1": True}}
|
||||
|
||||
|
||||
# 'mkfs' function tests: 3
|
||||
|
||||
|
||||
def test_mkfs():
|
||||
"""
|
||||
Test if it create a file system on the specified device.
|
||||
"""
|
||||
mock_cmd = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
mock_info = MagicMock(return_value=[])
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock_cmd, "btrfs.info": mock_info}):
|
||||
mock_file = mock_open(read_data="/dev/sda1 / ext4 rw,data=ordered 0 0")
|
||||
with patch.object(salt.utils.files, "fopen", mock_file):
|
||||
assert btrfs.mkfs("/dev/sda1") == {"log": "Salt"}
|
||||
|
||||
|
||||
def test_mkfs_error():
|
||||
"""
|
||||
Test if it No devices specified error
|
||||
"""
|
||||
pytest.raises(CommandExecutionError, btrfs.mkfs)
|
||||
|
||||
|
||||
def test_mkfs_mount_error():
|
||||
"""
|
||||
Test if it device mount error
|
||||
"""
|
||||
mock = MagicMock(return_value={"/dev/sda1": True})
|
||||
with patch.object(salt.utils.fsutils, "_get_mounts", mock):
|
||||
pytest.raises(CommandExecutionError, btrfs.mkfs, "/dev/sda1")
|
||||
|
||||
|
||||
# 'resize' function tests: 4
|
||||
|
||||
|
||||
def test_resize():
|
||||
"""
|
||||
Test if it resize filesystem.
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
mock_info = MagicMock(return_value=[])
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock, "btrfs.info": mock_info}):
|
||||
mock = MagicMock(return_value={"/dev/sda1": True})
|
||||
with patch.object(salt.utils.fsutils, "_get_mounts", mock):
|
||||
assert btrfs.resize("/dev/sda1", "max") == {"log": "Salt"}
|
||||
|
||||
|
||||
def test_resize_valid_error():
|
||||
"""
|
||||
Test if it gives device should be mounted error
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=False)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
pytest.raises(CommandExecutionError, btrfs.resize, "/dev/sda1", "max")
|
||||
|
||||
|
||||
def test_resize_mount_error():
|
||||
"""
|
||||
Test if it gives mount point error
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"/dev/sda1": False})
|
||||
with patch.object(salt.utils.fsutils, "_get_mounts", mock):
|
||||
pytest.raises(CommandExecutionError, btrfs.resize, "/dev/sda1", "max")
|
||||
|
||||
|
||||
def test_resize_size_error():
|
||||
"""
|
||||
Test if it gives unknown size error
|
||||
"""
|
||||
pytest.raises(CommandExecutionError, btrfs.resize, "/dev/sda1", "250m")
|
||||
|
||||
|
||||
# 'convert' function tests: 5
|
||||
|
||||
|
||||
def test_convert():
|
||||
"""
|
||||
Test if it convert ext2/3/4 to BTRFS
|
||||
"""
|
||||
with patch("os.path.exists", MagicMock(return_value=True)):
|
||||
ret = {
|
||||
"after": {
|
||||
"balance_log": "Salt",
|
||||
"ext4_image": "removed",
|
||||
"ext4_image_info": "N/A",
|
||||
"fsck_status": "N/A",
|
||||
"mount_point": None,
|
||||
"type": "ext4",
|
||||
},
|
||||
"before": {
|
||||
"fsck_status": "Filesystem errors corrected",
|
||||
"mount_point": None,
|
||||
"type": "ext4",
|
||||
},
|
||||
}
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"/dev/sda3": {"type": "ext4"}})
|
||||
with patch.object(salt.utils.fsutils, "_blkid_output", mock):
|
||||
mock = MagicMock(return_value={"/dev/sda3": [{"mount_point": None}]})
|
||||
with patch.object(salt.utils.fsutils, "_get_mounts", mock):
|
||||
assert btrfs.convert("/dev/sda3", permanent=True) == ret
|
||||
|
||||
|
||||
def test_convert_device_error():
|
||||
"""
|
||||
Test if it gives device not found error
|
||||
"""
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"/dev/sda1": False})
|
||||
with patch.object(salt.utils.fsutils, "_blkid_output", mock):
|
||||
pytest.raises(CommandExecutionError, btrfs.convert, "/dev/sda1")
|
||||
|
||||
|
||||
def test_convert_filesystem_error():
|
||||
"""
|
||||
Test if it gives file system error
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(return_value={"/dev/sda1": {"type": "ext"}})
|
||||
with patch.object(salt.utils.fsutils, "_blkid_output", mock):
|
||||
pytest.raises(CommandExecutionError, btrfs.convert, "/dev/sda1")
|
||||
|
||||
|
||||
def test_convert_error():
|
||||
"""
|
||||
Test if it gives error cannot convert root
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
mock = MagicMock(
|
||||
return_value={"/dev/sda1": {"type": "ext4", "mount_point": "/"}}
|
||||
)
|
||||
with patch.object(salt.utils.fsutils, "_blkid_output", mock):
|
||||
mock = MagicMock(return_value={"/dev/sda1": [{"mount_point": "/"}]})
|
||||
with patch.object(salt.utils.fsutils, "_get_mounts", mock):
|
||||
pytest.raises(CommandExecutionError, btrfs.convert, "/dev/sda1")
|
||||
|
||||
|
||||
def test_convert_migration_error():
|
||||
"""
|
||||
Test if it gives migration error
|
||||
"""
|
||||
with patch("salt.utils.fsutils._is_device", MagicMock(return_value=True)):
|
||||
mock_run = MagicMock(
|
||||
return_value={"retcode": 1, "stderr": "", "stdout": "Salt"}
|
||||
)
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock_run}):
|
||||
mock_blk = MagicMock(return_value={"/dev/sda1": {"type": "ext4"}})
|
||||
with patch.object(salt.utils.fsutils, "_blkid_output", mock_blk):
|
||||
mock_file = mock_open(read_data="/dev/sda1 / ext4 rw,data=ordered 0 0")
|
||||
with patch.object(salt.utils.files, "fopen", mock_file):
|
||||
pytest.raises(CommandExecutionError, btrfs.convert, "/dev/sda1")
|
||||
|
||||
|
||||
# 'add' function tests: 1
|
||||
|
||||
|
||||
def test_add():
|
||||
"""
|
||||
Test if it add a devices to a BTRFS filesystem.
|
||||
"""
|
||||
with patch("salt.modules.btrfs._restripe", MagicMock(return_value={})):
|
||||
assert btrfs.add("/mountpoint", "/dev/sda1", "/dev/sda2") == {}
|
||||
|
||||
|
||||
# 'delete' function tests: 1
|
||||
|
||||
|
||||
def test_delete():
|
||||
"""
|
||||
Test if it delete a devices to a BTRFS filesystem.
|
||||
"""
|
||||
with patch("salt.modules.btrfs._restripe", MagicMock(return_value={})):
|
||||
assert btrfs.delete("/mountpoint", "/dev/sda1", "/dev/sda2") == {}
|
||||
|
||||
|
||||
# 'properties' function tests: 1
|
||||
|
||||
|
||||
def test_properties():
|
||||
"""
|
||||
Test if list properties for given btrfs object
|
||||
"""
|
||||
with patch("salt.utils.fsutils._verify_run", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value={"retcode": 1, "stderr": "", "stdout": "Salt"})
|
||||
with patch.dict(btrfs.__salt__, {"cmd.run_all": mock}):
|
||||
assert btrfs.properties("/dev/sda1", "subvol") == {}
|
||||
|
||||
|
||||
def test_properties_unknown_error():
|
||||
"""
|
||||
Test if it gives unknown property error
|
||||
"""
|
||||
pytest.raises(CommandExecutionError, btrfs.properties, "/dev/sda1", "a")
|
||||
|
||||
|
||||
def test_properties_error():
|
||||
"""
|
||||
Test if it gives exception error
|
||||
"""
|
||||
pytest.raises(CommandExecutionError, btrfs.properties, "/dev/sda1", "subvol", True)
|
||||
|
||||
|
||||
def test_subvolume_exists():
|
||||
"""
|
||||
Test subvolume_exists
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.retcode": MagicMock(return_value=0),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_exists("/mnt/one")
|
||||
|
||||
|
||||
def test_subvolume_not_exists():
|
||||
"""
|
||||
Test subvolume_exists
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.retcode": MagicMock(return_value=1),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert not btrfs.subvolume_exists("/mnt/nowhere")
|
||||
|
||||
|
||||
def test_subvolume_create_fails_parameters():
|
||||
"""
|
||||
Test btrfs subvolume create
|
||||
"""
|
||||
# Fails when qgroupids is not a list
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_create("var", qgroupids="1")
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_create_already_exists(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume create
|
||||
"""
|
||||
subvolume_exists.return_value = True
|
||||
assert not btrfs.subvolume_create("var", dest="/mnt")
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_create(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume create
|
||||
"""
|
||||
subvolume_exists.return_value = False
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(return_value={"recode": 0}),
|
||||
}
|
||||
expected_path = os.path.join("/mnt", "var")
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_create("var", dest="/mnt")
|
||||
subvolume_exists.assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "create", expected_path]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_delete_fails_parameters():
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
# We need to provide name or names
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_delete()
|
||||
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_delete(names="var")
|
||||
|
||||
|
||||
def test_subvolume_delete_fails_parameter_commit():
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
# Parameter commit can be 'after' or 'each'
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_delete(name="var", commit="maybe")
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_delete_already_missing(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
subvolume_exists.return_value = False
|
||||
assert not btrfs.subvolume_delete(name="var", names=["tmp"])
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_delete_already_missing_name(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
subvolume_exists.return_value = False
|
||||
assert not btrfs.subvolume_delete(name="var")
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_delete_already_missing_names(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
subvolume_exists.return_value = False
|
||||
assert not btrfs.subvolume_delete(names=["tmp"])
|
||||
|
||||
|
||||
@patch("salt.modules.btrfs.subvolume_exists")
|
||||
def test_subvolume_delete(subvolume_exists):
|
||||
"""
|
||||
Test btrfs subvolume delete
|
||||
"""
|
||||
subvolume_exists.return_value = True
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(return_value={"recode": 0}),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_delete("var", names=["tmp"])
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "delete", "var", "tmp"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_find_new_empty():
|
||||
"""
|
||||
Test btrfs subvolume find-new
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={"recode": 0, "stdout": "transid marker was 1024"}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_find_new("var", "2000") == {
|
||||
"files": [],
|
||||
"transid": "1024",
|
||||
}
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "find-new", "var", "2000"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_find_new():
|
||||
"""
|
||||
Test btrfs subvolume find-new
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={
|
||||
"recode": 0,
|
||||
"stdout": """inode 185148 ... gen 2108 flags NONE var/log/audit/audit.log
|
||||
inode 187390 ... INLINE etc/openvpn/openvpn-status.log
|
||||
transid marker was 1024""",
|
||||
}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_find_new("var", "1023") == {
|
||||
"files": ["var/log/audit/audit.log", "etc/openvpn/openvpn-status.log"],
|
||||
"transid": "1024",
|
||||
}
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "find-new", "var", "1023"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_get_default_free():
|
||||
"""
|
||||
Test btrfs subvolume get-default
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={"recode": 0, "stdout": "ID 5 (FS_TREE)"}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_get_default("/mnt") == {
|
||||
"id": "5",
|
||||
"name": "(FS_TREE)",
|
||||
}
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "get-default", "/mnt"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_get_default():
|
||||
"""
|
||||
Test btrfs subvolume get-default
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={
|
||||
"recode": 0,
|
||||
"stdout": "ID 257 gen 8 top level 5 path var",
|
||||
}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_get_default("/mnt") == {
|
||||
"id": "257",
|
||||
"name": "var",
|
||||
}
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "get-default", "/mnt"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_list_fails_parameters():
|
||||
"""
|
||||
Test btrfs subvolume list
|
||||
"""
|
||||
# Fails when sort is not a list
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_list("/mnt", sort="-rootid")
|
||||
|
||||
# Fails when sort is not recognized
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_list("/mnt", sort=["-root"])
|
||||
|
||||
|
||||
def test_subvolume_list_simple():
|
||||
"""
|
||||
Test btrfs subvolume list
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={
|
||||
"recode": 0,
|
||||
"stdout": """ID 257 gen 8 top level 5 path one
|
||||
ID 258 gen 10 top level 5 path another one
|
||||
""",
|
||||
}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_list("/mnt") == [
|
||||
{"id": "257", "gen": "8", "top level": "5", "path": "one"},
|
||||
{"id": "258", "gen": "10", "top level": "5", "path": "another one"},
|
||||
]
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "list", "/mnt"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_list():
|
||||
"""
|
||||
Test btrfs subvolume list
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={
|
||||
"recode": 0,
|
||||
"stdout": """\
|
||||
ID 257 gen 8 cgen 8 parent 5 top level 5 parent_uuid - received_uuid - \
|
||||
uuid 777...-..05 path one
|
||||
ID 258 gen 10 cgen 10 parent 5 top level 5 parent_uuid - received_uuid - \
|
||||
uuid a90...-..01 path another one
|
||||
""",
|
||||
}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_list(
|
||||
"/mnt",
|
||||
parent_id=True,
|
||||
absolute=True,
|
||||
ogeneration=True,
|
||||
generation=True,
|
||||
subvolumes=True,
|
||||
uuid=True,
|
||||
parent_uuid=True,
|
||||
sent_subvolume_uuid=True,
|
||||
generation_cmp="-100",
|
||||
ogeneration_cmp="+5",
|
||||
sort=["-rootid", "gen"],
|
||||
) == [
|
||||
{
|
||||
"id": "257",
|
||||
"gen": "8",
|
||||
"cgen": "8",
|
||||
"parent": "5",
|
||||
"top level": "5",
|
||||
"parent_uuid": "-",
|
||||
"received_uuid": "-",
|
||||
"uuid": "777...-..05",
|
||||
"path": "one",
|
||||
},
|
||||
{
|
||||
"id": "258",
|
||||
"gen": "10",
|
||||
"cgen": "10",
|
||||
"parent": "5",
|
||||
"top level": "5",
|
||||
"parent_uuid": "-",
|
||||
"received_uuid": "-",
|
||||
"uuid": "a90...-..01",
|
||||
"path": "another one",
|
||||
},
|
||||
]
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
[
|
||||
"btrfs",
|
||||
"subvolume",
|
||||
"list",
|
||||
"-p",
|
||||
"-a",
|
||||
"-c",
|
||||
"-g",
|
||||
"-o",
|
||||
"-u",
|
||||
"-q",
|
||||
"-R",
|
||||
"-G",
|
||||
"-100",
|
||||
"-C",
|
||||
"+5",
|
||||
"--sort=-rootid,gen",
|
||||
"/mnt",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_set_default():
|
||||
"""
|
||||
Test btrfs subvolume set-default
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(return_value={"recode": 0}),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_set_default("257", "/mnt")
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "set-default", "257", "/mnt"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_show():
|
||||
"""
|
||||
Test btrfs subvolume show
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(
|
||||
return_value={
|
||||
"recode": 0,
|
||||
"stdout": """@/var
|
||||
Name: var
|
||||
UUID: 7a14...-...04
|
||||
Parent UUID: -
|
||||
Received UUID: -
|
||||
Creation time: 2018-10-01 14:33:12 +0200
|
||||
Subvolume ID: 258
|
||||
Generation: 82479
|
||||
Gen at creation: 10
|
||||
Parent ID: 256
|
||||
Top level ID: 256
|
||||
Flags: -
|
||||
Snapshot(s):
|
||||
""",
|
||||
}
|
||||
),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_show("/var") == {
|
||||
"@/var": {
|
||||
"name": "var",
|
||||
"uuid": "7a14...-...04",
|
||||
"parent uuid": "-",
|
||||
"received uuid": "-",
|
||||
"creation time": "2018-10-01 14:33:12 +0200",
|
||||
"subvolume id": "258",
|
||||
"generation": "82479",
|
||||
"gen at creation": "10",
|
||||
"parent id": "256",
|
||||
"top level id": "256",
|
||||
"flags": "-",
|
||||
"snapshot(s)": "",
|
||||
},
|
||||
}
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "show", "/var"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_sync_fail_parameters():
|
||||
"""
|
||||
Test btrfs subvolume sync
|
||||
"""
|
||||
# Fails when subvolids is not a list
|
||||
with pytest.raises(CommandExecutionError):
|
||||
btrfs.subvolume_sync("/mnt", subvolids="257")
|
||||
|
||||
|
||||
def test_subvolume_sync():
|
||||
"""
|
||||
Test btrfs subvolume sync
|
||||
"""
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(return_value={"recode": 0}),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_sync("/mnt", subvolids=["257"], sleep="1")
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "sync", "-s", "1", "/mnt", "257"]
|
||||
)
|
||||
|
||||
|
||||
def test_subvolume_snapshot():
|
||||
salt_mock = {
|
||||
"cmd.run_all": MagicMock(return_value={"recode": 0}),
|
||||
}
|
||||
with patch.dict(btrfs.__salt__, salt_mock):
|
||||
assert btrfs.subvolume_snapshot("/mnt", name="tmp_snap")
|
||||
salt_mock["cmd.run_all"].assert_called_once()
|
||||
salt_mock["cmd.run_all"].assert_called_with(
|
||||
["btrfs", "subvolume", "snapshot", "/mnt", "tmp_snap"]
|
||||
)
|
|
@ -29,9 +29,8 @@ MOCK_SHELL_FILE = "# List of acceptable shells\n\n/bin/bash\n"
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
return {cmdmod: {"__opts__": opts}}
|
||||
def configure_loader_modules(minion_opts):
|
||||
return {cmdmod: {"__opts__": minion_opts}}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
|
77
tests/pytests/unit/modules/test_openbsdrcctl_service.py
Normal file
77
tests/pytests/unit/modules/test_openbsdrcctl_service.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import pytest
|
||||
|
||||
import salt.modules.openbsdrcctl_service as openbsdrcctl
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def rcctl():
|
||||
cmd = "rcctl"
|
||||
with patch.object(openbsdrcctl, "_cmd", return_value=cmd):
|
||||
yield cmd
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def retcode_mock():
|
||||
return MagicMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(retcode_mock):
|
||||
return {
|
||||
openbsdrcctl: {
|
||||
"__salt__": {"cmd.retcode": retcode_mock},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_available(retcode_mock, rcctl):
|
||||
retcode_mock.return_value = 0
|
||||
assert openbsdrcctl.available("test") is True
|
||||
retcode_mock.assert_called_with("{} get test".format(rcctl), ignore_retcode=True)
|
||||
retcode_mock.return_value = 2
|
||||
assert openbsdrcctl.available("test") is False
|
||||
retcode_mock.assert_called_with("{} get test".format(rcctl), ignore_retcode=True)
|
||||
|
||||
|
||||
def test_status(retcode_mock, rcctl):
|
||||
retcode_mock.return_value = 0
|
||||
assert openbsdrcctl.status("test") is True
|
||||
retcode_mock.assert_called_with("{} check test".format(rcctl), ignore_retcode=True)
|
||||
retcode_mock.return_value = 2
|
||||
assert openbsdrcctl.status("test") is False
|
||||
retcode_mock.assert_called_with("{} check test".format(rcctl), ignore_retcode=True)
|
||||
|
||||
|
||||
def test_disabled(retcode_mock, rcctl):
|
||||
retcode_mock.return_value = 0
|
||||
assert openbsdrcctl.disabled("test") is False
|
||||
retcode_mock.assert_called_with(
|
||||
"{} get test status".format(rcctl), ignore_retcode=True
|
||||
)
|
||||
retcode_mock.return_value = 2
|
||||
assert openbsdrcctl.disabled("test") is True
|
||||
retcode_mock.assert_called_with(
|
||||
"{} get test status".format(rcctl), ignore_retcode=True
|
||||
)
|
||||
|
||||
|
||||
def test_enabled(retcode_mock, rcctl):
|
||||
retcode_mock.return_value = 0
|
||||
flags_return = {"flag1": "value1"}
|
||||
stdout_mock = MagicMock(return_value=flags_return)
|
||||
salt_mock = {
|
||||
"cmd.run_stdout": stdout_mock,
|
||||
"config.option": MagicMock(),
|
||||
}
|
||||
with patch.dict(openbsdrcctl.__salt__, salt_mock):
|
||||
assert openbsdrcctl.enabled("test", flags=flags_return) is True
|
||||
retcode_mock.assert_called_with(
|
||||
"{} get test status".format(rcctl), ignore_retcode=True
|
||||
)
|
||||
retcode_mock.return_value = 2
|
||||
stdout_mock.reset_mock()
|
||||
assert openbsdrcctl.enabled("test") is False
|
||||
retcode_mock.assert_called_with(
|
||||
"{} get test status".format(rcctl), ignore_retcode=True
|
||||
)
|
|
@ -28,26 +28,20 @@ def job1():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def sock_dir(tmp_path):
|
||||
return str(tmp_path / "test-socks")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {schedule: {}}
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["schedule"] = {}
|
||||
return {schedule: {"__opts__": minion_opts}}
|
||||
|
||||
|
||||
# 'purge' function tests: 1
|
||||
@pytest.mark.slow_test
|
||||
def test_purge(sock_dir, job1):
|
||||
def test_purge(job1):
|
||||
"""
|
||||
Test if it purge all the jobs currently scheduled on the minion.
|
||||
"""
|
||||
_schedule_data = {"job1": job1}
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
@ -74,9 +68,7 @@ def test_purge(sock_dir, job1):
|
|||
|
||||
changes = {"job1": "removed", "job2": "removed", "job3": "removed"}
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": "salt"}})
|
||||
patch_schedule_list = patch.object(
|
||||
schedule, "list_", MagicMock(return_value=_schedule_data)
|
||||
)
|
||||
|
@ -98,14 +90,12 @@ def test_purge(sock_dir, job1):
|
|||
|
||||
# 'delete' function tests: 1
|
||||
@pytest.mark.slow_test
|
||||
def test_delete(sock_dir, job1):
|
||||
def test_delete(job1):
|
||||
"""
|
||||
Test if it delete a job from the minion's schedule.
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
@ -125,9 +115,7 @@ def test_delete(sock_dir, job1):
|
|||
schedule, "list_", MagicMock(return_value=_schedule_data)
|
||||
)
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": "salt"}})
|
||||
|
||||
comm = "Deleted Job job1 from schedule."
|
||||
changes = {"job1": "removed"}
|
||||
|
@ -148,7 +136,7 @@ def test_delete(sock_dir, job1):
|
|||
|
||||
|
||||
# 'build_schedule_item' function tests: 1
|
||||
def test_build_schedule_item(sock_dir):
|
||||
def test_build_schedule_item():
|
||||
"""
|
||||
Test if it build a schedule job.
|
||||
"""
|
||||
|
@ -183,7 +171,7 @@ def test_build_schedule_item(sock_dir):
|
|||
# 'build_schedule_item_invalid_when' function tests: 1
|
||||
|
||||
|
||||
def test_build_schedule_item_invalid_when(sock_dir):
|
||||
def test_build_schedule_item_invalid_when():
|
||||
"""
|
||||
Test if it build a schedule job.
|
||||
"""
|
||||
|
@ -194,7 +182,7 @@ def test_build_schedule_item_invalid_when(sock_dir):
|
|||
) == {"comment": comment, "result": False}
|
||||
|
||||
|
||||
def test_build_schedule_item_invalid_jobs_args(sock_dir):
|
||||
def test_build_schedule_item_invalid_jobs_args():
|
||||
"""
|
||||
Test failure if job_arg and job_kwargs are passed correctly
|
||||
"""
|
||||
|
@ -214,7 +202,7 @@ def test_build_schedule_item_invalid_jobs_args(sock_dir):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_add(sock_dir):
|
||||
def test_add():
|
||||
"""
|
||||
Test if it add a job to the schedule.
|
||||
"""
|
||||
|
@ -227,9 +215,7 @@ def test_add(sock_dir):
|
|||
comm4 = "Job: job2 would be added to schedule."
|
||||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": "salt"}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": "salt"}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
@ -304,15 +290,13 @@ def test_add(sock_dir):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_run_job(sock_dir, job1):
|
||||
def test_run_job(job1):
|
||||
"""
|
||||
Test if it run a scheduled job on the minion immediately.
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -334,15 +318,13 @@ def test_run_job(sock_dir, job1):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_enable_job(sock_dir):
|
||||
def test_enable_job():
|
||||
"""
|
||||
Test if it enable a job in the minion's schedule.
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -363,15 +345,13 @@ def test_enable_job(sock_dir):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_disable_job(sock_dir):
|
||||
def test_disable_job():
|
||||
"""
|
||||
Test if it disable a job in the minion's schedule.
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -392,14 +372,14 @@ def test_disable_job(sock_dir):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_save(sock_dir):
|
||||
def test_save():
|
||||
"""
|
||||
Test if it save all scheduled jobs on the minion.
|
||||
"""
|
||||
comm1 = "Schedule (non-pillar items) saved."
|
||||
with patch.dict(
|
||||
schedule.__opts__,
|
||||
{"schedule": {}, "default_include": "/tmp", "sock_dir": sock_dir},
|
||||
{"default_include": "/tmp"},
|
||||
):
|
||||
with patch("os.makedirs", MagicMock(return_value=True)):
|
||||
mock = MagicMock(return_value=True)
|
||||
|
@ -412,7 +392,7 @@ def test_save(sock_dir):
|
|||
# 'enable' function tests: 1
|
||||
|
||||
|
||||
def test_enable(sock_dir):
|
||||
def test_enable():
|
||||
"""
|
||||
Test if it enable all scheduled jobs on the minion.
|
||||
"""
|
||||
|
@ -426,7 +406,7 @@ def test_enable(sock_dir):
|
|||
# 'disable' function tests: 1
|
||||
|
||||
|
||||
def test_disable(sock_dir):
|
||||
def test_disable():
|
||||
"""
|
||||
Test if it disable all scheduled jobs on the minion.
|
||||
"""
|
||||
|
@ -441,7 +421,7 @@ def test_disable(sock_dir):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_move(sock_dir, job1):
|
||||
def test_move(job1):
|
||||
"""
|
||||
Test if it move scheduled job to another minion or minions.
|
||||
"""
|
||||
|
@ -451,9 +431,7 @@ def test_move(sock_dir, job1):
|
|||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -545,7 +523,7 @@ def test_move(sock_dir, job1):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_copy(sock_dir, job1):
|
||||
def test_copy(job1):
|
||||
"""
|
||||
Test if it copy scheduled job to another minion or minions.
|
||||
"""
|
||||
|
@ -555,9 +533,7 @@ def test_copy(sock_dir, job1):
|
|||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -648,7 +624,7 @@ def test_copy(sock_dir, job1):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_modify(sock_dir, job1):
|
||||
def test_modify(job1):
|
||||
"""
|
||||
Test if modifying job to the schedule.
|
||||
"""
|
||||
|
@ -713,9 +689,7 @@ def test_modify(sock_dir, job1):
|
|||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
|
@ -857,7 +831,7 @@ def test_modify(sock_dir, job1):
|
|||
# 'is_enabled' function tests: 1
|
||||
|
||||
|
||||
def test_is_enabled(sock_dir):
|
||||
def test_is_enabled():
|
||||
"""
|
||||
Test is_enabled
|
||||
"""
|
||||
|
@ -870,9 +844,7 @@ def test_is_enabled(sock_dir):
|
|||
mock_lst = MagicMock(return_value=mock_schedule)
|
||||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_salt = patch.dict(
|
||||
schedule.__salt__,
|
||||
{"event.fire": MagicMock(return_value=True), "schedule.list": mock_lst},
|
||||
|
@ -894,7 +866,7 @@ def test_is_enabled(sock_dir):
|
|||
# 'job_status' function tests: 1
|
||||
|
||||
|
||||
def test_job_status(sock_dir):
|
||||
def test_job_status():
|
||||
"""
|
||||
Test is_enabled
|
||||
"""
|
||||
|
@ -912,9 +884,7 @@ def test_job_status(sock_dir):
|
|||
mock_lst = MagicMock(return_value=mock_schedule)
|
||||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {"job1": job1}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {"job1": job1}})
|
||||
patch_schedule_salt = patch.dict(
|
||||
schedule.__salt__,
|
||||
{"event.fire": MagicMock(return_value=True), "schedule.list": mock_lst},
|
||||
|
@ -935,7 +905,7 @@ def test_job_status(sock_dir):
|
|||
|
||||
# 'purge' function tests: 1
|
||||
@pytest.mark.slow_test
|
||||
def test_list(sock_dir, job1):
|
||||
def test_list(job1):
|
||||
"""
|
||||
Test schedule.list
|
||||
"""
|
||||
|
@ -965,9 +935,7 @@ def test_list(sock_dir, job1):
|
|||
"""
|
||||
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
@ -1011,7 +979,6 @@ def test_list(sock_dir, job1):
|
|||
ret = schedule.list_()
|
||||
assert ret == expected
|
||||
|
||||
_schedule_data = {"job1": job1}
|
||||
_ret_schedule_data = {
|
||||
"function": "test.ping",
|
||||
"seconds": 10,
|
||||
|
@ -1048,7 +1015,7 @@ def test_list(sock_dir, job1):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_list_global_enabled(sock_dir, job1):
|
||||
def test_list_global_enabled(job1):
|
||||
"""
|
||||
Test schedule.list when enabled globally
|
||||
"""
|
||||
|
@ -1084,9 +1051,7 @@ def test_list_global_enabled(sock_dir, job1):
|
|||
seconds: 10
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
@ -1106,7 +1071,7 @@ def test_list_global_enabled(sock_dir, job1):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_list_global_disabled(sock_dir, job1):
|
||||
def test_list_global_disabled(job1):
|
||||
"""
|
||||
Test schedule.list when disabled globally
|
||||
"""
|
||||
|
@ -1143,9 +1108,7 @@ def test_list_global_disabled(sock_dir, job1):
|
|||
seconds: 10
|
||||
"""
|
||||
patch_makedirs = patch("os.makedirs", MagicMock(return_value=True))
|
||||
patch_schedule_opts = patch.dict(
|
||||
schedule.__opts__, {"schedule": {}, "sock_dir": sock_dir}
|
||||
)
|
||||
patch_schedule_opts = patch.dict(schedule.__opts__, {"schedule": {}})
|
||||
patch_schedule_event_fire = patch.dict(
|
||||
schedule.__salt__, {"event.fire": MagicMock(return_value=True)}
|
||||
)
|
||||
|
|
560
tests/pytests/unit/modules/test_timezone.py
Normal file
560
tests/pytests/unit/modules/test_timezone.py
Normal file
|
@ -0,0 +1,560 @@
|
|||
import os
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.timezone as timezone
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
GET_ZONE_FILE = "salt.modules.timezone._get_zone_file"
|
||||
GET_LOCALTIME_PATH = "salt.modules.timezone._get_localtime_path"
|
||||
TEST_TZ = "UTC"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
timezone: {
|
||||
"__grains__": {"os": "", "os_family": "Ubuntu"},
|
||||
"__salt__": {
|
||||
"file.sed": MagicMock(),
|
||||
"cmd.run": MagicMock(),
|
||||
"cmd.retcode": MagicMock(return_value=0),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tempfiles():
|
||||
tempfiles = []
|
||||
yield tempfiles
|
||||
for tempfile in tempfiles:
|
||||
try:
|
||||
os.remove(tempfile.name)
|
||||
except OSError:
|
||||
pass
|
||||
del tempfiles
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patch_os():
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch("os.path.exists", MagicMock(return_value=True)):
|
||||
with patch("os.unlink", MagicMock()):
|
||||
with patch("os.symlink", MagicMock()):
|
||||
yield
|
||||
|
||||
|
||||
def test_zone_compare_equal(tempfiles):
|
||||
etc_localtime = create_tempfile_with_contents("a", tempfiles)
|
||||
zone_path = create_tempfile_with_contents("a", tempfiles)
|
||||
|
||||
with patch(GET_ZONE_FILE, lambda p: zone_path.name):
|
||||
with patch(GET_LOCALTIME_PATH, lambda: etc_localtime.name):
|
||||
|
||||
assert timezone.zone_compare("foo")
|
||||
|
||||
|
||||
def test_zone_compare_arch(tempfiles):
|
||||
"""
|
||||
test zone_compare function when OS is arch
|
||||
"""
|
||||
etc_localtime = create_tempfile_with_contents("a", tempfiles)
|
||||
zone_path = create_tempfile_with_contents("a", tempfiles)
|
||||
mock_zone = MagicMock(return_value="foo")
|
||||
patch_zone = patch.object(timezone, "get_zone", mock_zone)
|
||||
|
||||
with patch_zone:
|
||||
with patch.dict(timezone.__grains__, {"os_family": "Arch"}):
|
||||
with patch(GET_ZONE_FILE, lambda p: zone_path.name):
|
||||
with patch(GET_LOCALTIME_PATH, lambda: etc_localtime.name):
|
||||
with patch("os.path.isfile", return_value=False):
|
||||
assert timezone.zone_compare("foo")
|
||||
mock_zone.assert_called()
|
||||
|
||||
|
||||
def test_zone_compare_nonexistent(tempfiles):
|
||||
etc_localtime = create_tempfile_with_contents("a", tempfiles)
|
||||
|
||||
with patch(GET_ZONE_FILE, lambda p: "/foopath/nonexistent"):
|
||||
with patch(GET_LOCALTIME_PATH, lambda: etc_localtime.name):
|
||||
|
||||
pytest.raises(SaltInvocationError, timezone.zone_compare, "foo")
|
||||
|
||||
|
||||
def test_zone_compare_unequal(tempfiles):
|
||||
etc_localtime = create_tempfile_with_contents("a", tempfiles)
|
||||
zone_path = create_tempfile_with_contents("b", tempfiles)
|
||||
|
||||
with patch(GET_ZONE_FILE, lambda p: zone_path.name):
|
||||
with patch(GET_LOCALTIME_PATH, lambda: etc_localtime.name):
|
||||
|
||||
assert not timezone.zone_compare("foo")
|
||||
|
||||
|
||||
def test_missing_localtime():
|
||||
with patch(GET_ZONE_FILE, lambda p: "/nonexisting"):
|
||||
with patch(GET_LOCALTIME_PATH, lambda: "/also-missing"):
|
||||
pytest.raises(CommandExecutionError, timezone.zone_compare, "foo")
|
||||
|
||||
|
||||
def create_tempfile_with_contents(contents, tempfiles=None):
|
||||
temp = NamedTemporaryFile(delete=False)
|
||||
temp.write(salt.utils.stringutils.to_bytes(contents))
|
||||
temp.close()
|
||||
tempfiles.append(temp)
|
||||
return temp
|
||||
|
||||
|
||||
def test_get_zone_centos():
|
||||
"""
|
||||
Test CentOS is recognized
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os": "centos"}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_etc_localtime",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
def test_get_zone_os_family_rh_suse():
|
||||
"""
|
||||
Test RedHat and Suse are recognized
|
||||
:return:
|
||||
"""
|
||||
for osfamily in ["RedHat", "Suse"]:
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os_family": [osfamily]}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_sysconfig",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
def test_get_zone_os_family_debian_gentoo():
|
||||
"""
|
||||
Test Debian and Gentoo are recognized
|
||||
:return:
|
||||
"""
|
||||
for osfamily in ["Debian", "Gentoo"]:
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os_family": [osfamily]}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_etc_timezone",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
def test_get_zone_os_family_allbsd_nilinuxrt_slackware():
|
||||
"""
|
||||
Test *BSD, NILinuxRT and Slackware are recognized
|
||||
:return:
|
||||
"""
|
||||
for osfamily in ["FreeBSD", "OpenBSD", "NetBSD", "NILinuxRT", "Slackware"]:
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os_family": osfamily}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_etc_localtime",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
def test_get_zone_os_family_slowlaris():
|
||||
"""
|
||||
Test Slowlaris is recognized
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Solaris"]}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_solaris",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
def test_get_zone_os_family_aix():
|
||||
"""
|
||||
Test IBM AIX is recognized
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["AIX"]}):
|
||||
with patch(
|
||||
"salt.modules.timezone._get_zone_aix",
|
||||
MagicMock(return_value=TEST_TZ),
|
||||
):
|
||||
assert timezone.get_zone() == TEST_TZ
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_os_family_nilinuxrt(patch_os):
|
||||
"""
|
||||
Test zone set on NILinuxRT
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["NILinuxRT"]}), patch.dict(
|
||||
timezone.__grains__, {"lsb_distrib_id": "nilrt"}
|
||||
):
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_os_family_allbsd_slackware(patch_os):
|
||||
"""
|
||||
Test zone set on *BSD and Slackware
|
||||
:return:
|
||||
"""
|
||||
for osfamily in ["FreeBSD", "OpenBSD", "NetBSD", "Slackware"]:
|
||||
with patch.dict(timezone.__grains__, {"os_family": osfamily}):
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_redhat(patch_os):
|
||||
"""
|
||||
Test zone set on RH series
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["RedHat"]}):
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/sysconfig/clock", "^ZONE=.*", 'ZONE="UTC"')
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_suse(patch_os):
|
||||
"""
|
||||
Test zone set on SUSE series
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Suse"]}):
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/sysconfig/clock", "^TIMEZONE=.*", 'TIMEZONE="UTC"')
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_gentoo(patch_os):
|
||||
"""
|
||||
Test zone set on Gentoo series
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Gentoo"]}):
|
||||
with patch("salt.utils.files.fopen", mock_open()) as m_open:
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
fh_ = m_open.filehandles["/etc/timezone"][0]
|
||||
assert fh_.call.args == ("/etc/timezone", "w"), fh_.call.args
|
||||
assert fh_.write_calls == ["UTC", "\n"], fh_.write_calls
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_zone_debian(patch_os):
|
||||
"""
|
||||
Test zone set on Debian series
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Debian"]}):
|
||||
with patch("salt.utils.files.fopen", mock_open()) as m_open:
|
||||
assert timezone.set_zone(TEST_TZ)
|
||||
fh_ = m_open.filehandles["/etc/timezone"][0]
|
||||
assert fh_.call.args == ("/etc/timezone", "w"), fh_.call.args
|
||||
assert fh_.write_calls == ["UTC", "\n"], fh_.write_calls
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_timedate_utc():
|
||||
"""
|
||||
Test get hwclock UTC/localtime
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=True)):
|
||||
with patch("os.path.exists", MagicMock(return_value=True)):
|
||||
with patch("os.unlink", MagicMock()):
|
||||
with patch("os.symlink", MagicMock()):
|
||||
with patch(
|
||||
"salt.modules.timezone._timedatectl",
|
||||
MagicMock(return_value={"stdout": "rtc in local tz"}),
|
||||
):
|
||||
assert timezone.get_hwclock() == "UTC"
|
||||
with patch(
|
||||
"salt.modules.timezone._timedatectl",
|
||||
MagicMock(return_value={"stdout": "rtc in local tz:yes"}),
|
||||
):
|
||||
assert timezone.get_hwclock() == "localtime"
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_suse(patch_os):
|
||||
"""
|
||||
Test get hwclock on SUSE
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Suse"]}):
|
||||
timezone.get_hwclock()
|
||||
name, args, kwarg = timezone.__salt__["cmd.run"].mock_calls[0]
|
||||
assert args == (["tail", "-n", "1", "/etc/adjtime"],)
|
||||
assert kwarg == {"python_shell": False}
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_redhat(patch_os):
|
||||
"""
|
||||
Test get hwclock on RedHat
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["RedHat"]}):
|
||||
timezone.get_hwclock()
|
||||
name, args, kwarg = timezone.__salt__["cmd.run"].mock_calls[0]
|
||||
assert args == (["tail", "-n", "1", "/etc/adjtime"],)
|
||||
assert kwarg == {"python_shell": False}
|
||||
|
||||
|
||||
def _test_get_hwclock_debian(
|
||||
patch_os,
|
||||
): # TODO: Enable this when testing environment is working properly
|
||||
"""
|
||||
Test get hwclock on Debian
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Debian"]}):
|
||||
timezone.get_hwclock()
|
||||
name, args, kwarg = timezone.__salt__["cmd.run"].mock_calls[0]
|
||||
assert args == (["tail", "-n", "1", "/etc/adjtime"],)
|
||||
assert kwarg == {"python_shell": False}
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_solaris(patch_os):
|
||||
"""
|
||||
Test get hwclock on Solaris
|
||||
:return:
|
||||
"""
|
||||
# Incomplete
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Solaris"]}):
|
||||
assert timezone.get_hwclock() == "UTC"
|
||||
with patch("salt.utils.files.fopen", mock_open()):
|
||||
assert timezone.get_hwclock() == "localtime"
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_aix(patch_os):
|
||||
"""
|
||||
Test get hwclock on AIX
|
||||
:return:
|
||||
"""
|
||||
# Incomplete
|
||||
hwclock = "localtime"
|
||||
if not os.path.isfile("/etc/environment"):
|
||||
hwclock = "UTC"
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["AIX"]}):
|
||||
assert timezone.get_hwclock() == hwclock
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_slackware_with_adjtime(patch_os):
|
||||
"""
|
||||
Test get hwclock on Slackware with /etc/adjtime present
|
||||
:return:
|
||||
"""
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Slackware"]}):
|
||||
timezone.get_hwclock()
|
||||
name, args, kwarg = timezone.__salt__["cmd.run"].mock_calls[0]
|
||||
assert args == (["tail", "-n", "1", "/etc/adjtime"],)
|
||||
assert kwarg == {"python_shell": False}
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_get_hwclock_slackware_without_adjtime():
|
||||
"""
|
||||
Test get hwclock on Slackware without /etc/adjtime present
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=False)):
|
||||
with patch("os.path.exists", MagicMock(return_value=False)):
|
||||
with patch("os.unlink", MagicMock()):
|
||||
with patch("os.symlink", MagicMock()):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Slackware"]}):
|
||||
with patch(
|
||||
"salt.utils.files.fopen", mock_open(read_data="UTC")
|
||||
):
|
||||
assert timezone.get_hwclock() == "UTC"
|
||||
with patch(
|
||||
"salt.utils.files.fopen", mock_open(read_data="localtime")
|
||||
):
|
||||
assert timezone.get_hwclock() == "localtime"
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_timedatectl():
|
||||
"""
|
||||
Test set hwclock with timedatectl
|
||||
:return:
|
||||
"""
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=True)):
|
||||
timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["cmd.retcode"].mock_calls[0]
|
||||
assert args == (["timedatectl", "set-local-rtc", "false"],)
|
||||
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=True)):
|
||||
timezone.set_hwclock("localtime")
|
||||
with patch("salt.utils.path.which", MagicMock(return_value=True)):
|
||||
name, args, kwargs = timezone.__salt__["cmd.retcode"].mock_calls[1]
|
||||
assert args == (["timedatectl", "set-local-rtc", "true"],)
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_aix_nilinuxrt(patch_os):
|
||||
"""
|
||||
Test set hwclock on AIX and NILinuxRT
|
||||
:return:
|
||||
"""
|
||||
for osfamily in ["AIX", "NILinuxRT"]:
|
||||
with patch.dict(timezone.__grains__, {"os_family": osfamily}):
|
||||
with pytest.raises(SaltInvocationError):
|
||||
assert timezone.set_hwclock("forty two")
|
||||
assert timezone.set_hwclock("UTC")
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_solaris(patch_os):
|
||||
"""
|
||||
Test set hwclock on Solaris
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(
|
||||
timezone.__grains__, {"os_family": ["Solaris"], "cpuarch": "x86"}
|
||||
):
|
||||
with pytest.raises(SaltInvocationError):
|
||||
assert timezone.set_hwclock("forty two")
|
||||
assert timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["cmd.retcode"].mock_calls[0]
|
||||
assert args == (["rtc", "-z", "GMT"],)
|
||||
assert kwargs == {"python_shell": False}
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_arch(patch_os):
|
||||
"""
|
||||
Test set hwclock on arch
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Arch"]}):
|
||||
assert timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["cmd.retcode"].mock_calls[0]
|
||||
assert args == (["timezonectl", "set-local-rtc", "false"],)
|
||||
assert kwargs == {"python_shell": False}
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_redhat(patch_os):
|
||||
"""
|
||||
Test set hwclock on RedHat
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["RedHat"]}):
|
||||
assert timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/sysconfig/clock", "^ZONE=.*", 'ZONE="TEST_TIMEZONE"')
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_suse(patch_os):
|
||||
"""
|
||||
Test set hwclock on SUSE
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Suse"]}):
|
||||
assert timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == (
|
||||
"/etc/sysconfig/clock",
|
||||
"^TIMEZONE=.*",
|
||||
'TIMEZONE="TEST_TIMEZONE"',
|
||||
)
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_debian(patch_os):
|
||||
"""
|
||||
Test set hwclock on Debian
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Debian"]}):
|
||||
assert timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/default/rcS", "^UTC=.*", "UTC=yes")
|
||||
|
||||
assert timezone.set_hwclock("localtime")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[1]
|
||||
assert args == ("/etc/default/rcS", "^UTC=.*", "UTC=no")
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_gentoo(patch_os):
|
||||
"""
|
||||
Test set hwclock on Gentoo
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Gentoo"]}):
|
||||
with pytest.raises(SaltInvocationError):
|
||||
timezone.set_hwclock("forty two")
|
||||
|
||||
timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/conf.d/hwclock", "^clock=.*", 'clock="UTC"')
|
||||
|
||||
timezone.set_hwclock("localtime")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[1]
|
||||
assert args == ("/etc/conf.d/hwclock", "^clock=.*", 'clock="local"')
|
||||
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), "os.symlink not available in Windows")
|
||||
def test_set_hwclock_slackware(patch_os):
|
||||
"""
|
||||
Test set hwclock on Slackware
|
||||
:return:
|
||||
"""
|
||||
with patch(
|
||||
"salt.modules.timezone.get_zone", MagicMock(return_value="TEST_TIMEZONE")
|
||||
):
|
||||
with patch.dict(timezone.__grains__, {"os_family": ["Slackware"]}):
|
||||
with pytest.raises(SaltInvocationError):
|
||||
timezone.set_hwclock("forty two")
|
||||
|
||||
timezone.set_hwclock("UTC")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[0]
|
||||
assert args == ("/etc/hardwareclock", "^(UTC|localtime)", "UTC")
|
||||
|
||||
timezone.set_hwclock("localtime")
|
||||
name, args, kwargs = timezone.__salt__["file.sed"].mock_calls[1]
|
||||
assert args == ("/etc/hardwareclock", "^(UTC|localtime)", "localtime")
|
|
@ -1,6 +1,8 @@
|
|||
"""
|
||||
Tests for the win_pkg module
|
||||
"""
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.config as config
|
||||
|
@ -279,6 +281,53 @@ def test_pkg_install_single_pkg():
|
|||
assert "-e True -test_flag True" in str(mock_cmd_run_all.call_args[0])
|
||||
|
||||
|
||||
def test_pkg_install_log_message(caplog):
|
||||
"""
|
||||
test pkg.install pkg with extra_install_flags
|
||||
"""
|
||||
ret__get_package_info = {
|
||||
"3.03": {
|
||||
"uninstaller": "%program.exe",
|
||||
"reboot": False,
|
||||
"msiexec": False,
|
||||
"installer": "runme.exe",
|
||||
"uninstall_flags": "/S",
|
||||
"locale": "en_US",
|
||||
"install_flags": "/s",
|
||||
"full_name": "Firebox 3.03 (x86 en-US)",
|
||||
}
|
||||
}
|
||||
|
||||
mock_cmd_run_all = MagicMock(return_value={"retcode": 0})
|
||||
with patch.object(
|
||||
salt.utils.data, "is_true", MagicMock(return_value=True)
|
||||
), patch.object(
|
||||
win_pkg, "_get_package_info", MagicMock(return_value=ret__get_package_info)
|
||||
), patch.dict(
|
||||
win_pkg.__salt__,
|
||||
{
|
||||
"pkg_resource.parse_targets": MagicMock(
|
||||
return_value=[{"firebox": "3.03"}, None]
|
||||
),
|
||||
"cp.is_cached": MagicMock(return_value="C:\\fake\\path.exe"),
|
||||
"cmd.run_all": mock_cmd_run_all,
|
||||
},
|
||||
), caplog.at_level(
|
||||
logging.DEBUG
|
||||
):
|
||||
win_pkg.install(
|
||||
pkgs=["firebox"],
|
||||
version="3.03",
|
||||
extra_install_flags="-e True -test_flag True",
|
||||
)
|
||||
assert (
|
||||
'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /s /c "runme.exe" /s -e '
|
||||
"True -test_flag True"
|
||||
).lower() in [x.lower() for x in caplog.messages]
|
||||
assert "PKG : pwd: ".lower() in [x.lower() for x in caplog.messages]
|
||||
assert "PKG : retcode: 0" in caplog.messages
|
||||
|
||||
|
||||
def test_pkg_install_multiple_pkgs():
|
||||
"""
|
||||
test pkg.install pkg with extra_install_flags
|
||||
|
|
|
@ -25,12 +25,11 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(
|
||||
opts, whitelist=["zfs", "args", "systemd", "path", "platform"]
|
||||
minion_opts, whitelist=["zfs", "args", "systemd", "path", "platform"]
|
||||
)
|
||||
zfs_obj = {zfs: {"__opts__": opts, "__utils__": utils}}
|
||||
zfs_obj = {zfs: {"__opts__": minion_opts, "__utils__": utils}}
|
||||
return zfs_obj
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ Tests for salt.modules.zfs on Solaris
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.modules.zfs as zfs
|
||||
import salt.utils.zfs
|
||||
|
@ -18,12 +17,11 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts, whitelist=["zfs"])
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zfs_obj = {
|
||||
zfs: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {
|
||||
"osarch": "sparcv9",
|
||||
"os_family": "Solaris",
|
||||
|
@ -38,7 +36,7 @@ def configure_loader_modules():
|
|||
return zfs_obj
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_sunos(reason="test to ensure no -t only applies to Solaris")
|
||||
@pytest.mark.skip_unless_on_sunos
|
||||
def test_get_success_solaris():
|
||||
"""
|
||||
Tests zfs get success
|
||||
|
|
|
@ -4,7 +4,6 @@ Tests for salt.modules.zfs on Solaris
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.modules.zfs as zfs
|
||||
import salt.utils.zfs
|
||||
|
@ -18,12 +17,11 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts, whitelist=["zfs"])
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zfs_obj = {
|
||||
zfs: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {
|
||||
"osarch": "sparcv9",
|
||||
"os_family": "Solaris",
|
||||
|
@ -38,7 +36,7 @@ def configure_loader_modules():
|
|||
return zfs_obj
|
||||
|
||||
|
||||
@pytest.mark.skip_unless_on_sunos(reason="test to ensure no -t only applies to Solaris")
|
||||
@pytest.mark.skip_unless_on_sunos
|
||||
def test_get_success_solaris():
|
||||
"""
|
||||
Tests zfs get success
|
||||
|
|
|
@ -26,12 +26,16 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(
|
||||
opts, whitelist=["zfs", "args", "systemd", "path", "platform"]
|
||||
minion_opts, whitelist=["zfs", "args", "systemd", "path", "platform"]
|
||||
)
|
||||
zpool_obj = {zpool: {"__opts__": opts, "__utils__": utils}}
|
||||
zpool_obj = {
|
||||
zpool: {
|
||||
"__opts__": minion_opts,
|
||||
"__utils__": utils,
|
||||
},
|
||||
}
|
||||
|
||||
return zpool_obj
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ import sys
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.output.highstate as highstate
|
||||
import salt.utils.stringutils
|
||||
from tests.support.mock import patch
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
|
@ -13,15 +13,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
overrides = {
|
||||
"extension_modules": "",
|
||||
"optimization_order": [0, 1, 2],
|
||||
"color": False,
|
||||
"state_output_pct": True,
|
||||
}
|
||||
minion_opts.update(overrides)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts.update({"color": False, "state_output_pct": True})
|
||||
return {highstate: {"__opts__": minion_opts}}
|
||||
|
||||
|
||||
|
|
30
tests/pytests/unit/pillar/test_mongo.py
Normal file
30
tests/pytests/unit/pillar/test_mongo.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import pytest
|
||||
|
||||
import salt.exceptions
|
||||
import salt.pillar.mongo as mongo
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
|
||||
return {
|
||||
mongo: {
|
||||
"__opts__": {
|
||||
"mongo.uri": "mongodb://root:pass@localhost27017/salt?authSource=admin"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_config_exception():
|
||||
opts = {
|
||||
"mongo.host": "localhost",
|
||||
"mongo.port": 27017,
|
||||
"mongo.user": "root",
|
||||
"mongo.password": "pass",
|
||||
"mongo.uri": "mongodb://root:pass@localhost27017/salt?authSource=admin",
|
||||
}
|
||||
with patch.dict(mongo.__opts__, opts):
|
||||
with pytest.raises(salt.exceptions.SaltConfigurationError):
|
||||
mongo.ext_pillar("minion1", {})
|
|
@ -4,18 +4,18 @@ from textwrap import dedent
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.renderers.gpg as gpg
|
||||
from salt.exceptions import SaltRenderError
|
||||
from tests.support.mock import MagicMock, Mock, call, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
def configure_loader_modules(minion_opts):
|
||||
"""
|
||||
GPG renderer configuration
|
||||
"""
|
||||
return {gpg: {"__opts__": {"gpg_decrypt_must_succeed": True}}}
|
||||
minion_opts["gpg_decrypt_must_succeed"] = True
|
||||
return {gpg: {"__opts__": minion_opts}}
|
||||
|
||||
|
||||
def test__get_gpg_exec():
|
||||
|
@ -256,7 +256,7 @@ def test_render_without_cache():
|
|||
popen_mock.assert_has_calls([gpg_call] * 3)
|
||||
|
||||
|
||||
def test_render_with_cache():
|
||||
def test_render_with_cache(minion_opts):
|
||||
key_dir = "/etc/salt/gpgkeys"
|
||||
secret = "Use more salt."
|
||||
expected = "\n".join([secret] * 3)
|
||||
|
@ -274,7 +274,6 @@ def test_render_with_cache():
|
|||
"""
|
||||
)
|
||||
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["gpg_cache"] = True
|
||||
with patch.dict(gpg.__opts__, minion_opts):
|
||||
with patch("salt.renderers.gpg.Popen") as popen_mock:
|
||||
|
|
31
tests/pytests/unit/returners/test_mongo_future_return.py
Normal file
31
tests/pytests/unit/returners/test_mongo_future_return.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
import pytest
|
||||
|
||||
import salt.exceptions
|
||||
import salt.returners.mongo_future_return as mongo
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
|
||||
return {
|
||||
mongo: {
|
||||
"__opts__": {
|
||||
"mongo.uri": "mongodb://root:pass@localhost27017/salt?authSource=admin"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@patch("salt.returners.mongo_future_return.PYMONGO_VERSION", "4.3.2", create=True)
|
||||
def test_config_exception():
|
||||
opts = {
|
||||
"mongo.host": "localhost",
|
||||
"mongo.port": 27017,
|
||||
"mongo.user": "root",
|
||||
"mongo.password": "pass",
|
||||
"mongo.uri": "mongodb://root:pass@localhost27017/salt?authSource=admin",
|
||||
}
|
||||
with patch.dict(mongo.__opts__, opts):
|
||||
with pytest.raises(salt.exceptions.SaltConfigurationError):
|
||||
mongo.returner({})
|
|
@ -3,7 +3,6 @@ import shutil
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.spm
|
||||
import salt.utils.files
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
@ -61,8 +60,8 @@ class SPMTestUserInterface(salt.spm.SPMUserInterface):
|
|||
|
||||
|
||||
@pytest.fixture()
|
||||
def setup_spm(tmp_path):
|
||||
minion_config = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def setup_spm(tmp_path, minion_opts):
|
||||
minion_config = minion_opts.copy()
|
||||
minion_config.update(
|
||||
{
|
||||
"spm_logfile": str(tmp_path / "log"),
|
||||
|
@ -90,7 +89,6 @@ def setup_spm(tmp_path):
|
|||
)
|
||||
ui = SPMTestUserInterface()
|
||||
client = salt.spm.SPMClient(ui, minion_config)
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
return tmp_path, ui, client, minion_config, minion_opts
|
||||
|
||||
|
||||
|
|
65
tests/pytests/unit/state/test_global_state_conditions.py
Normal file
65
tests/pytests/unit/state/test_global_state_conditions.py
Normal file
|
@ -0,0 +1,65 @@
|
|||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.state
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_config():
|
||||
cfg = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
cfg["file_client"] = "local"
|
||||
cfg["id"] = "foo01"
|
||||
return cfg
|
||||
|
||||
|
||||
def test_global_state_conditions_unconfigured(minion_config):
|
||||
state_obj = salt.state.State(minion_config)
|
||||
ret = state_obj._match_global_state_conditions(
|
||||
"test.succeed_with_changes", "test", "mytest"
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize("condition", [["foo01"], "foo01"])
|
||||
def test_global_state_conditions_match(minion_config, condition):
|
||||
minion_config["global_state_conditions"] = {
|
||||
"test": condition,
|
||||
}
|
||||
state_obj = salt.state.State(minion_config)
|
||||
ret = state_obj._match_global_state_conditions(
|
||||
"test.succeed_with_changes", "test", "mytest"
|
||||
)
|
||||
assert ret is None
|
||||
|
||||
|
||||
def test_global_state_conditions_no_match(minion_config):
|
||||
minion_config["global_state_conditions"] = {
|
||||
"test.succeed_with_changes": ["bar01"],
|
||||
}
|
||||
state_obj = salt.state.State(minion_config)
|
||||
ret = state_obj._match_global_state_conditions(
|
||||
"test.succeed_with_changes", "test", "mytest"
|
||||
)
|
||||
assert ret == {
|
||||
"changes": {},
|
||||
"comment": "Failed to meet global state conditions. State not called.",
|
||||
"name": "mytest",
|
||||
"result": None,
|
||||
}
|
||||
|
||||
|
||||
def test_global_state_conditions_match_one_of_many(minion_config):
|
||||
minion_config["global_state_conditions"] = {
|
||||
"test.succeed_with_changes": ["bar01"],
|
||||
"test": ["baz01"],
|
||||
"*": ["foo01"],
|
||||
}
|
||||
state_obj = salt.state.State(minion_config)
|
||||
ret = state_obj._match_global_state_conditions(
|
||||
"test.succeed_with_changes", "test", "mytest"
|
||||
)
|
||||
assert ret is None
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
import logging
|
||||
|
||||
import pytest # pylint: disable=unused-import
|
||||
import pytest
|
||||
|
||||
import salt.exceptions
|
||||
import salt.state
|
||||
|
@ -34,7 +34,7 @@ def test_format_log_non_ascii_character():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_render_error_on_invalid_requisite():
|
||||
def test_render_error_on_invalid_requisite(minion_opts):
|
||||
"""
|
||||
Test that the state compiler correctly deliver a rendering
|
||||
exception when a requisite cannot be resolved
|
||||
|
@ -74,14 +74,13 @@ def test_render_error_on_invalid_requisite():
|
|||
]
|
||||
)
|
||||
}
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["pillar"] = {"git": OrderedDict([("test1", "test")])}
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with pytest.raises(salt.exceptions.SaltRenderError):
|
||||
state_obj.call_high(high_data)
|
||||
|
||||
|
||||
def test_verify_onlyif_parse():
|
||||
def test_verify_onlyif_parse(minion_opts):
|
||||
low_data = {
|
||||
"onlyif": [{"fun": "test.arg", "args": ["arg1", "arg2"]}],
|
||||
"name": "mysql-server-5.7",
|
||||
|
@ -100,13 +99,12 @@ def test_verify_onlyif_parse():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_parse_deep_return():
|
||||
def test_verify_onlyif_parse_deep_return(minion_opts):
|
||||
low_data = {
|
||||
"state": "test",
|
||||
"name": "foo",
|
||||
|
@ -128,13 +126,12 @@ def test_verify_onlyif_parse_deep_return():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_cmd_error():
|
||||
def test_verify_onlyif_cmd_error(minion_opts):
|
||||
"""
|
||||
Simulates a failure in cmd.retcode from onlyif
|
||||
This could occur if runas is specified with a user that does not exist
|
||||
|
@ -159,7 +156,6 @@ def test_verify_onlyif_cmd_error():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
mock = MagicMock(side_effect=CommandExecutionError("Boom!"))
|
||||
with patch.dict(state_obj.functions, {"cmd.retcode": mock}):
|
||||
|
@ -170,7 +166,7 @@ def test_verify_onlyif_cmd_error():
|
|||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_cmd_error():
|
||||
def test_verify_unless_cmd_error(minion_opts):
|
||||
"""
|
||||
Simulates a failure in cmd.retcode from unless
|
||||
This could occur if runas is specified with a user that does not exist
|
||||
|
@ -195,7 +191,6 @@ def test_verify_unless_cmd_error():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
mock = MagicMock(side_effect=CommandExecutionError("Boom!"))
|
||||
with patch.dict(state_obj.functions, {"cmd.retcode": mock}):
|
||||
|
@ -206,7 +201,7 @@ def test_verify_unless_cmd_error():
|
|||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_list_cmd():
|
||||
def test_verify_unless_list_cmd(minion_opts):
|
||||
"""
|
||||
If any of the unless commands return False (non 0) then the state should
|
||||
run (no skip_watch).
|
||||
|
@ -228,13 +223,12 @@ def test_verify_unless_list_cmd():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_list_cmd_different_order():
|
||||
def test_verify_unless_list_cmd_different_order(minion_opts):
|
||||
"""
|
||||
If any of the unless commands return False (non 0) then the state should
|
||||
run (no skip_watch). The order shouldn't matter.
|
||||
|
@ -256,13 +250,12 @@ def test_verify_unless_list_cmd_different_order():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_list_cmd_different_order():
|
||||
def test_verify_onlyif_list_cmd_different_order(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -281,13 +274,12 @@ def test_verify_onlyif_list_cmd_different_order():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_list_cmd_valid():
|
||||
def test_verify_unless_list_cmd_valid(minion_opts):
|
||||
"""
|
||||
If any of the unless commands return False (non 0) then the state should
|
||||
run (no skip_watch). This tests all commands return False.
|
||||
|
@ -306,13 +298,12 @@ def test_verify_unless_list_cmd_valid():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_list_cmd_valid():
|
||||
def test_verify_onlyif_list_cmd_valid(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -327,13 +318,12 @@ def test_verify_onlyif_list_cmd_valid():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_list_cmd_invalid():
|
||||
def test_verify_unless_list_cmd_invalid(minion_opts):
|
||||
"""
|
||||
If any of the unless commands return False (non 0) then the state should
|
||||
run (no skip_watch). This tests all commands return True
|
||||
|
@ -356,13 +346,12 @@ def test_verify_unless_list_cmd_invalid():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_list_cmd_invalid():
|
||||
def test_verify_onlyif_list_cmd_invalid(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -381,13 +370,12 @@ def test_verify_onlyif_list_cmd_invalid():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_parse():
|
||||
def test_verify_unless_parse(minion_opts):
|
||||
low_data = {
|
||||
"unless": [{"fun": "test.arg", "args": ["arg1", "arg2"]}],
|
||||
"name": "mysql-server-5.7",
|
||||
|
@ -410,13 +398,12 @@ def test_verify_unless_parse():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_unless_parse_deep_return():
|
||||
def test_verify_unless_parse_deep_return(minion_opts):
|
||||
low_data = {
|
||||
"state": "test",
|
||||
"name": "foo",
|
||||
|
@ -438,13 +425,12 @@ def test_verify_unless_parse_deep_return():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_creates():
|
||||
def test_verify_creates(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -457,7 +443,6 @@ def test_verify_creates():
|
|||
}
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with patch("os.path.exists") as path_mock:
|
||||
path_mock.return_value = True
|
||||
|
@ -482,7 +467,7 @@ def test_verify_creates():
|
|||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_creates_list():
|
||||
def test_verify_creates_list(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -495,7 +480,6 @@ def test_verify_creates_list():
|
|||
}
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with patch("os.path.exists") as path_mock:
|
||||
path_mock.return_value = True
|
||||
|
@ -534,7 +518,7 @@ def _expand_win_path(path):
|
|||
return path
|
||||
|
||||
|
||||
def test_verify_onlyif_parse_slots(tmp_path):
|
||||
def test_verify_onlyif_parse_slots(tmp_path, minion_opts):
|
||||
name = str(tmp_path / "testfile.txt")
|
||||
with salt.utils.files.fopen(name, "w") as fp:
|
||||
fp.write("file-contents")
|
||||
|
@ -561,13 +545,12 @@ def test_verify_onlyif_parse_slots(tmp_path):
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_list_cmd():
|
||||
def test_verify_onlyif_list_cmd(minion_opts):
|
||||
low_data = {
|
||||
"state": "cmd",
|
||||
"name": 'echo "something"',
|
||||
|
@ -586,13 +569,12 @@ def test_verify_onlyif_list_cmd():
|
|||
for key in ("__sls__", "__id__", "name"):
|
||||
expected_result[key] = low_data.get(key)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_onlyif(low_data, {})
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_cmd_args():
|
||||
def test_verify_onlyif_cmd_args(minion_opts):
|
||||
"""
|
||||
Verify cmd.run state arguments are properly passed to cmd.retcode in onlyif
|
||||
"""
|
||||
|
@ -617,7 +599,6 @@ def test_verify_onlyif_cmd_args():
|
|||
}
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
mock = MagicMock()
|
||||
with patch.dict(state_obj.functions, {"cmd.retcode": mock}):
|
||||
|
@ -639,7 +620,7 @@ def test_verify_onlyif_cmd_args():
|
|||
)
|
||||
|
||||
|
||||
def test_verify_unless_parse_slots(tmp_path):
|
||||
def test_verify_unless_parse_slots(tmp_path, minion_opts):
|
||||
name = str(tmp_path / "testfile.txt")
|
||||
with salt.utils.files.fopen(name, "w") as fp:
|
||||
fp.write("file-contents")
|
||||
|
@ -671,13 +652,12 @@ def test_verify_unless_parse_slots(tmp_path):
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
return_result = state_obj._run_check_unless(low_data, "")
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_retry_parsing():
|
||||
def test_verify_retry_parsing(minion_opts):
|
||||
low_data = {
|
||||
"state": "file",
|
||||
"name": "/tmp/saltstack.README.rst",
|
||||
|
@ -706,7 +686,6 @@ def test_verify_retry_parsing():
|
|||
expected_result[key] = low_data.get(key)
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["test"] = True
|
||||
minion_opts["file_client"] = "local"
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
|
@ -719,7 +698,7 @@ def test_verify_retry_parsing():
|
|||
assert set(expected_result).issubset(set(state_obj.call(low_data)))
|
||||
|
||||
|
||||
def test_render_requisite_require_disabled(tmp_path):
|
||||
def test_render_requisite_require_disabled(tmp_path, minion_opts):
|
||||
"""
|
||||
Test that the state compiler correctly deliver a rendering
|
||||
exception when a requisite cannot be resolved
|
||||
|
@ -749,7 +728,6 @@ def test_render_requisite_require_disabled(tmp_path):
|
|||
},
|
||||
}
|
||||
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["cachedir"] = str(tmp_path)
|
||||
minion_opts["disabled_requisites"] = ["require"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
|
@ -760,7 +738,7 @@ def test_render_requisite_require_disabled(tmp_path):
|
|||
assert run_num == 0
|
||||
|
||||
|
||||
def test_render_requisite_require_in_disabled(tmp_path):
|
||||
def test_render_requisite_require_in_disabled(tmp_path, minion_opts):
|
||||
"""
|
||||
Test that the state compiler correctly deliver a rendering
|
||||
exception when a requisite cannot be resolved
|
||||
|
@ -795,7 +773,6 @@ def test_render_requisite_require_in_disabled(tmp_path):
|
|||
),
|
||||
}
|
||||
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["cachedir"] = str(tmp_path)
|
||||
minion_opts["disabled_requisites"] = ["require_in"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
|
@ -806,7 +783,7 @@ def test_render_requisite_require_in_disabled(tmp_path):
|
|||
assert run_num == 0
|
||||
|
||||
|
||||
def test_call_chunk_sub_state_run():
|
||||
def test_call_chunk_sub_state_run(minion_opts):
|
||||
"""
|
||||
Test running a batch of states with an external runner
|
||||
that returns sub_state_run
|
||||
|
@ -840,7 +817,6 @@ def test_call_chunk_sub_state_run():
|
|||
)
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
with patch("salt.state.State.call", return_value=mock_call_return):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["disabled_requisites"] = ["require"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
ret = state_obj.call_chunk(low_data, {}, {})
|
||||
|
@ -852,7 +828,7 @@ def test_call_chunk_sub_state_run():
|
|||
assert sub_state["__sls__"] == "external"
|
||||
|
||||
|
||||
def test_aggregate_requisites():
|
||||
def test_aggregate_requisites(minion_opts):
|
||||
"""
|
||||
Test to ensure that the requisites are included in the aggregated low state.
|
||||
"""
|
||||
|
@ -960,7 +936,6 @@ def test_aggregate_requisites():
|
|||
]
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
low_ret = state_obj._aggregate_requisites(low, chunks)
|
||||
|
||||
|
@ -975,7 +950,7 @@ def test_aggregate_requisites():
|
|||
]
|
||||
|
||||
|
||||
def test_mod_aggregate():
|
||||
def test_mod_aggregate(minion_opts):
|
||||
"""
|
||||
Test to ensure that the requisites are included in the aggregated low state.
|
||||
"""
|
||||
|
@ -1046,7 +1021,6 @@ def test_mod_aggregate():
|
|||
}
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with patch.dict(
|
||||
state_obj.states,
|
||||
|
|
|
@ -16,10 +16,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def state_obj(tmp_path):
|
||||
def state_obj(minion_opts):
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
minion_opts["cachedir"] = str(tmp_path)
|
||||
yield salt.state.State(minion_opts)
|
||||
|
||||
|
||||
|
|
|
@ -2,16 +2,15 @@ import itertools
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.state
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def master_opts():
|
||||
def master_opts(master_opts):
|
||||
"""
|
||||
Return a subset of master options to the minion
|
||||
"""
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
opts = master_opts.copy()
|
||||
mopts = {}
|
||||
mopts["file_roots"] = opts["file_roots"]
|
||||
mopts["top_file_merging_strategy"] = opts["top_file_merging_strategy"]
|
||||
|
@ -40,11 +39,10 @@ class MockBaseHighStateClient:
|
|||
return self.opts
|
||||
|
||||
|
||||
def test_state_aggregate_option_behavior(master_opts):
|
||||
def test_state_aggregate_option_behavior(master_opts, minion_opts):
|
||||
"""
|
||||
Ensure state_aggregate can be overridden on the minion
|
||||
"""
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
possible = [None, True, False, ["pkg"]]
|
||||
expected_result = [
|
||||
True,
|
||||
|
|
|
@ -6,28 +6,15 @@ import textwrap
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.states.boto_cloudfront as boto_cloudfront
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_vars():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
|
||||
name = "my_distribution"
|
||||
base_ret = {"name": name, "changes": {}}
|
||||
|
||||
# Most attributes elided since there are so many required ones
|
||||
config = {"Enabled": True, "HttpVersion": "http2"}
|
||||
tags = {"test_tag1": "value1"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(setup_vars):
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(
|
||||
salt.config.DEFAULT_MINION_OPTS.copy(),
|
||||
minion_opts,
|
||||
whitelist=[
|
||||
"boto3",
|
||||
"dictdiffer",
|
||||
|
|
|
@ -4,7 +4,6 @@ import string
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.states.boto_cloudtrail as boto_cloudtrail
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
@ -64,21 +63,20 @@ def global_config():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto", "boto3", "args", "systemd", "path", "platform", "reg"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_cloudtrail"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_cloudtrail"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_cloudtrail"],
|
||||
|
@ -86,7 +84,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_cloudtrail: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -4,7 +4,6 @@ import string
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.states.boto_cloudwatch_event as boto_cloudwatch_event
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
@ -52,21 +51,20 @@ def global_config():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto3", "args", "systemd", "path", "platform"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_cloudwatch_event"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_cloudwatch_event"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_cloudwatch_event"],
|
||||
|
@ -74,7 +72,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_cloudwatch_event: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -51,21 +51,20 @@ class GlobalConfig:
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto3", "args", "systemd", "path", "platform"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_elasticsearch_domain"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_elasticsearch_domain"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_elasticsearch_domain"],
|
||||
|
@ -73,7 +72,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_elasticsearch_domain: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -95,21 +95,20 @@ class GlobalConfig:
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto3", "args", "systemd", "path", "platform"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_iot"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_iot"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_iot"],
|
||||
|
@ -117,7 +116,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_iot: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -73,21 +73,20 @@ def global_config():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
def configure_loader_modules(minion_opts):
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto", "boto3", "args", "systemd", "path", "platform", "reg"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_lambda"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_lambda"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_lambda"],
|
||||
|
@ -95,7 +94,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_lambda: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -180,20 +180,19 @@ def global_config():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def configure_loader_modules(minion_opts):
|
||||
ctx = {}
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto", "boto3", "args", "systemd", "path", "platform", "reg"],
|
||||
context=ctx,
|
||||
)
|
||||
serializers = salt.loader.serializers(opts)
|
||||
serializers = salt.loader.serializers(minion_opts)
|
||||
funcs = salt.loader.minion_mods(
|
||||
opts, context=ctx, utils=utils, whitelist=["boto_s3_bucket"]
|
||||
minion_opts, context=ctx, utils=utils, whitelist=["boto_s3_bucket"]
|
||||
)
|
||||
salt_states = salt.loader.states(
|
||||
opts=opts,
|
||||
opts=minion_opts,
|
||||
functions=funcs,
|
||||
utils=utils,
|
||||
whitelist=["boto_s3_bucket"],
|
||||
|
@ -201,7 +200,7 @@ def configure_loader_modules():
|
|||
)
|
||||
return {
|
||||
boto_s3_bucket: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": funcs,
|
||||
"__utils__": utils,
|
||||
"__states__": salt_states,
|
||||
|
|
|
@ -12,10 +12,9 @@ from tests.support.mock import MagicMock, patch
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(
|
||||
opts,
|
||||
minion_opts,
|
||||
whitelist=["boto3", "yaml", "args", "systemd", "path", "platform"],
|
||||
context={},
|
||||
)
|
||||
|
|
|
@ -679,21 +679,22 @@ def test_mod_beacon(tmp_path):
|
|||
|
||||
|
||||
@pytest.mark.skip_on_darwin(reason="service.running is currently failing on OSX")
|
||||
@pytest.mark.skip_if_not_root
|
||||
@pytest.mark.destructive_test
|
||||
@pytest.mark.slow_test
|
||||
def test_running_with_reload():
|
||||
def test_running_with_reload(minion_opts):
|
||||
"""
|
||||
Test that a running service is properly reloaded
|
||||
"""
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["grains"] = salt.loader.grains(opts)
|
||||
utils = salt.loader.utils(opts)
|
||||
modules = salt.loader.minion_mods(opts, utils=utils)
|
||||
# TODO: This is not a unit test, it interacts with the system. Move to functional.
|
||||
minion_opts["grains"] = salt.loader.grains(minion_opts)
|
||||
utils = salt.loader.utils(minion_opts)
|
||||
modules = salt.loader.minion_mods(minion_opts, utils=utils)
|
||||
|
||||
service_name = "cron"
|
||||
cmd_name = "crontab"
|
||||
os_family = opts["grains"]["os_family"]
|
||||
os_release = opts["grains"]["osrelease"]
|
||||
os_family = minion_opts["grains"]["os_family"]
|
||||
os_release = minion_opts["grains"]["osrelease"]
|
||||
if os_family == "RedHat":
|
||||
service_name = "crond"
|
||||
elif os_family == "Arch":
|
||||
|
@ -718,8 +719,8 @@ def test_running_with_reload():
|
|||
post_srv_disable = True
|
||||
|
||||
try:
|
||||
with patch.dict(service.__grains__, opts["grains"]), patch.dict(
|
||||
service.__opts__, opts
|
||||
with patch.dict(service.__grains__, minion_opts["grains"]), patch.dict(
|
||||
service.__opts__, minion_opts
|
||||
), patch.dict(service.__salt__, modules), patch.dict(
|
||||
service.__utils__, utils
|
||||
), patch.dict(
|
||||
|
|
|
@ -14,13 +14,12 @@ from tests.support.mock import patch
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts)
|
||||
modules = salt.loader.minion_mods(opts, utils=utils)
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts)
|
||||
modules = salt.loader.minion_mods(minion_opts, utils=utils)
|
||||
return {
|
||||
win_lgpo: {
|
||||
"__opts__": copy.deepcopy(opts),
|
||||
"__opts__": copy.deepcopy(minion_opts),
|
||||
"__salt__": modules,
|
||||
"__utils__": utils,
|
||||
}
|
||||
|
|
|
@ -25,11 +25,14 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts, whitelist=["zfs"])
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zfs_obj = {
|
||||
zfs: {"__opts__": opts, "__grains__": {"kernel": "SunOS"}, "__utils__": utils}
|
||||
zfs: {
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {"kernel": "SunOS"},
|
||||
"__utils__": utils,
|
||||
},
|
||||
}
|
||||
|
||||
return zfs_obj
|
||||
|
|
|
@ -25,12 +25,11 @@ def utils_patch():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
utils = salt.loader.utils(opts, whitelist=["zfs"])
|
||||
def configure_loader_modules(minion_opts):
|
||||
utils = salt.loader.utils(minion_opts, whitelist=["zfs"])
|
||||
zpool_obj = {
|
||||
zpool: {
|
||||
"__opts__": opts,
|
||||
"__opts__": minion_opts,
|
||||
"__grains__": {"kernel": "SunOS"},
|
||||
"__utils__": utils,
|
||||
}
|
||||
|
|
|
@ -4,22 +4,21 @@ unit tests for the beacon_module parameter
|
|||
|
||||
import logging
|
||||
|
||||
import salt.config
|
||||
import salt.beacons
|
||||
from tests.support.mock import MagicMock, call, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def test_beacon_process():
|
||||
def test_beacon_process(minion_opts):
|
||||
"""
|
||||
Test the process function in the beacon class
|
||||
returns the correct information when an exception
|
||||
occurs
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["id"] = "minion"
|
||||
mock_opts["__role"] = "minion"
|
||||
mock_opts["beacons"] = {
|
||||
minion_opts["id"] = "minion"
|
||||
minion_opts["__role"] = "minion"
|
||||
minion_opts["beacons"] = {
|
||||
"watch_apache": [
|
||||
{"processes": {"apache2": "stopped"}},
|
||||
{"beacon_module": "ps"},
|
||||
|
@ -28,11 +27,11 @@ def test_beacon_process():
|
|||
beacon_mock = MagicMock(side_effect=Exception("Global Thermonuclear War"))
|
||||
beacon_mock.__globals__ = {}
|
||||
|
||||
beacon = salt.beacons.Beacon(mock_opts, [])
|
||||
beacon = salt.beacons.Beacon(minion_opts, [])
|
||||
|
||||
found = "ps.beacon" in beacon.beacons
|
||||
beacon.beacons["ps.beacon"] = beacon_mock
|
||||
ret = beacon.process(mock_opts["beacons"], mock_opts["grains"])
|
||||
ret = beacon.process(minion_opts["beacons"], minion_opts["grains"])
|
||||
|
||||
_expected = [
|
||||
{
|
||||
|
@ -45,37 +44,36 @@ def test_beacon_process():
|
|||
assert ret == _expected
|
||||
|
||||
|
||||
def test_beacon_process_invalid():
|
||||
def test_beacon_process_invalid(minion_opts):
|
||||
"""
|
||||
Test the process function in the beacon class
|
||||
when the configuration is invalid.
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["id"] = "minion"
|
||||
mock_opts["__role"] = "minion"
|
||||
minion_opts["id"] = "minion"
|
||||
minion_opts["__role"] = "minion"
|
||||
|
||||
mock_opts["beacons"] = {"status": {}}
|
||||
minion_opts["beacons"] = {"status": {}}
|
||||
|
||||
beacon = salt.beacons.Beacon(mock_opts, [])
|
||||
beacon = salt.beacons.Beacon(minion_opts, [])
|
||||
|
||||
with patch.object(salt.beacons, "log") as log_mock, patch.object(
|
||||
salt.beacons.log, "error"
|
||||
) as log_error_mock:
|
||||
ret = beacon.process(mock_opts["beacons"], mock_opts["grains"])
|
||||
ret = beacon.process(minion_opts["beacons"], minion_opts["grains"])
|
||||
log_error_mock.assert_called_with(
|
||||
"Beacon %s configuration invalid, not running.\n%s",
|
||||
"status",
|
||||
"Configuration for status beacon must be a list.",
|
||||
)
|
||||
|
||||
mock_opts["beacons"] = {"mybeacon": {}}
|
||||
minion_opts["beacons"] = {"mybeacon": {}}
|
||||
|
||||
beacon = salt.beacons.Beacon(mock_opts, [])
|
||||
beacon = salt.beacons.Beacon(minion_opts, [])
|
||||
|
||||
with patch.object(salt.beacons.log, "warning") as log_warn_mock, patch.object(
|
||||
salt.beacons.log, "error"
|
||||
) as log_error_mock:
|
||||
ret = beacon.process(mock_opts["beacons"], mock_opts["grains"])
|
||||
ret = beacon.process(minion_opts["beacons"], minion_opts["grains"])
|
||||
log_warn_mock.assert_called_with(
|
||||
"No validate function found for %s, running basic beacon validation.",
|
||||
"mybeacon",
|
||||
|
@ -83,21 +81,20 @@ def test_beacon_process_invalid():
|
|||
log_error_mock.assert_called_with("Configuration for beacon must be a list.")
|
||||
|
||||
|
||||
def test_beacon_module():
|
||||
def test_beacon_module(minion_opts):
|
||||
"""
|
||||
Test that beacon_module parameter for beacon configuration
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["id"] = "minion"
|
||||
mock_opts["__role"] = "minion"
|
||||
mock_opts["beacons"] = {
|
||||
minion_opts["id"] = "minion"
|
||||
minion_opts["__role"] = "minion"
|
||||
minion_opts["beacons"] = {
|
||||
"watch_apache": [
|
||||
{"processes": {"apache2": "stopped"}},
|
||||
{"beacon_module": "ps"},
|
||||
]
|
||||
}
|
||||
beacon = salt.beacons.Beacon(mock_opts, [])
|
||||
ret = beacon.process(mock_opts["beacons"], mock_opts["grains"])
|
||||
beacon = salt.beacons.Beacon(minion_opts, [])
|
||||
ret = beacon.process(minion_opts["beacons"], minion_opts["grains"])
|
||||
|
||||
_expected = [
|
||||
{
|
||||
|
@ -122,5 +119,5 @@ def test_beacon_module():
|
|||
)
|
||||
]
|
||||
with patch.object(beacon, "beacons", mocked) as patched:
|
||||
beacon.process(mock_opts["beacons"], mock_opts["grains"])
|
||||
beacon.process(minion_opts["beacons"], minion_opts["grains"])
|
||||
patched[name].assert_has_calls(calls)
|
||||
|
|
|
@ -20,14 +20,7 @@ from tests.support.mock import MagicMock, patch
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def master_config():
|
||||
opts = salt.config.DEFAULT_MASTER_OPTS.copy()
|
||||
opts["__role"] = "master"
|
||||
return opts
|
||||
|
||||
|
||||
def test_job_result_return_success(master_config):
|
||||
def test_job_result_return_success(master_opts):
|
||||
"""
|
||||
Should return the `expected_return`, since there is a job with the right jid.
|
||||
"""
|
||||
|
@ -35,7 +28,7 @@ def test_job_result_return_success(master_config):
|
|||
jid = "0815"
|
||||
raw_return = {"id": "fake-id", "jid": jid, "data": "", "return": "fake-return"}
|
||||
expected_return = {"fake-id": {"ret": "fake-return"}}
|
||||
with client.LocalClient(mopts=master_config) as local_client:
|
||||
with client.LocalClient(mopts=master_opts) as local_client:
|
||||
local_client.event.get_event = MagicMock(return_value=raw_return)
|
||||
local_client.returners = MagicMock()
|
||||
ret = local_client.get_event_iter_returns(jid, minions)
|
||||
|
@ -43,7 +36,7 @@ def test_job_result_return_success(master_config):
|
|||
assert val == expected_return
|
||||
|
||||
|
||||
def test_job_result_return_failure(master_config):
|
||||
def test_job_result_return_failure(master_opts):
|
||||
"""
|
||||
We are _not_ getting a job return, because the jid is different. Instead we should
|
||||
get a StopIteration exception.
|
||||
|
@ -56,7 +49,7 @@ def test_job_result_return_failure(master_config):
|
|||
"data": "",
|
||||
"return": "fake-return",
|
||||
}
|
||||
with client.LocalClient(mopts=master_config) as local_client:
|
||||
with client.LocalClient(mopts=master_opts) as local_client:
|
||||
local_client.event.get_event = MagicMock()
|
||||
local_client.event.get_event.side_effect = [raw_return, None]
|
||||
local_client.returners = MagicMock()
|
||||
|
@ -65,8 +58,8 @@ def test_job_result_return_failure(master_config):
|
|||
next(ret)
|
||||
|
||||
|
||||
def test_create_local_client(master_config):
|
||||
with client.LocalClient(mopts=master_config) as local_client:
|
||||
def test_create_local_client(master_opts):
|
||||
with client.LocalClient(mopts=master_opts) as local_client:
|
||||
assert isinstance(
|
||||
local_client, client.LocalClient
|
||||
), "LocalClient did not create a LocalClient instance"
|
||||
|
|
|
@ -69,19 +69,18 @@ def test_minion_load_grains_default():
|
|||
),
|
||||
],
|
||||
)
|
||||
def test_send_req_fires_completion_event(event):
|
||||
def test_send_req_fires_completion_event(event, minion_opts):
|
||||
event_enter = MagicMock()
|
||||
event_enter.send.side_effect = event[1]
|
||||
event = MagicMock()
|
||||
event.__enter__.return_value = event_enter
|
||||
|
||||
with patch("salt.utils.event.get_event", return_value=event):
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["random_startup_delay"] = 0
|
||||
opts["return_retry_tries"] = 30
|
||||
opts["grains"] = {}
|
||||
minion_opts["random_startup_delay"] = 0
|
||||
minion_opts["return_retry_tries"] = 30
|
||||
minion_opts["grains"] = {}
|
||||
with patch("salt.loader.grains"):
|
||||
minion = salt.minion.Minion(opts)
|
||||
minion = salt.minion.Minion(minion_opts)
|
||||
|
||||
load = {"load": "value"}
|
||||
timeout = 60
|
||||
|
@ -119,7 +118,7 @@ def test_send_req_fires_completion_event(event):
|
|||
assert rtn
|
||||
|
||||
|
||||
async def test_send_req_async_regression_62453():
|
||||
async def test_send_req_async_regression_62453(minion_opts):
|
||||
event_enter = MagicMock()
|
||||
event_enter.send.side_effect = (
|
||||
lambda data, tag, cb=None, timeout=60: salt.ext.tornado.gen.maybe_future(True)
|
||||
|
@ -127,12 +126,11 @@ async def test_send_req_async_regression_62453():
|
|||
event = MagicMock()
|
||||
event.__enter__.return_value = event_enter
|
||||
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
opts["random_startup_delay"] = 0
|
||||
opts["return_retry_tries"] = 30
|
||||
opts["grains"] = {}
|
||||
minion_opts["random_startup_delay"] = 0
|
||||
minion_opts["return_retry_tries"] = 30
|
||||
minion_opts["grains"] = {}
|
||||
with patch("salt.loader.grains"):
|
||||
minion = salt.minion.Minion(opts)
|
||||
minion = salt.minion.Minion(minion_opts)
|
||||
|
||||
load = {"load": "value"}
|
||||
timeout = 60
|
||||
|
@ -167,21 +165,20 @@ def test_mine_send_tries(req_channel_factory):
|
|||
assert rtn == 20
|
||||
|
||||
|
||||
def test_invalid_master_address():
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
def test_invalid_master_address(minion_opts):
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": False,
|
||||
"master": float("127.0"),
|
||||
"master_port": "4555",
|
||||
"retry_dns": False,
|
||||
},
|
||||
):
|
||||
pytest.raises(SaltSystemExit, salt.minion.resolve_dns, opts)
|
||||
}
|
||||
)
|
||||
with pytest.raises(SaltSystemExit):
|
||||
salt.minion.resolve_dns(minion_opts)
|
||||
|
||||
|
||||
def test_source_int_name_local():
|
||||
def test_source_int_name_local(minion_opts):
|
||||
"""
|
||||
test when file_client local and
|
||||
source_interface_name is set
|
||||
|
@ -200,9 +197,7 @@ def test_source_int_name_local():
|
|||
],
|
||||
}
|
||||
}
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": False,
|
||||
"master": "127.0.0.1",
|
||||
|
@ -212,8 +207,9 @@ def test_source_int_name_local():
|
|||
"source_ret_port": 49017,
|
||||
"source_publish_port": 49018,
|
||||
},
|
||||
), patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(opts) == {
|
||||
)
|
||||
with patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(minion_opts) == {
|
||||
"master_ip": "127.0.0.1",
|
||||
"source_ip": "111.1.0.1",
|
||||
"source_ret_port": 49017,
|
||||
|
@ -223,7 +219,7 @@ def test_source_int_name_local():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_source_int_name_remote():
|
||||
def test_source_int_name_remote(minion_opts):
|
||||
"""
|
||||
test when file_client remote and
|
||||
source_interface_name is set and
|
||||
|
@ -243,9 +239,7 @@ def test_source_int_name_remote():
|
|||
],
|
||||
}
|
||||
}
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": False,
|
||||
"master": "127.0.0.1",
|
||||
|
@ -255,8 +249,9 @@ def test_source_int_name_remote():
|
|||
"source_ret_port": 49017,
|
||||
"source_publish_port": 49018,
|
||||
},
|
||||
), patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(opts) == {
|
||||
)
|
||||
with patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(minion_opts) == {
|
||||
"master_ip": "127.0.0.1",
|
||||
"source_ret_port": 49017,
|
||||
"source_publish_port": 49018,
|
||||
|
@ -265,7 +260,7 @@ def test_source_int_name_remote():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_source_address():
|
||||
def test_source_address(minion_opts):
|
||||
"""
|
||||
test when source_address is set
|
||||
"""
|
||||
|
@ -283,9 +278,7 @@ def test_source_address():
|
|||
],
|
||||
}
|
||||
}
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": False,
|
||||
"master": "127.0.0.1",
|
||||
|
@ -296,8 +289,9 @@ def test_source_address():
|
|||
"source_ret_port": 49017,
|
||||
"source_publish_port": 49018,
|
||||
},
|
||||
), patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(opts) == {
|
||||
)
|
||||
with patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
assert salt.minion.resolve_dns(minion_opts) == {
|
||||
"source_publish_port": 49018,
|
||||
"source_ret_port": 49017,
|
||||
"master_uri": "tcp://127.0.0.1:4555",
|
||||
|
@ -308,7 +302,7 @@ def test_source_address():
|
|||
|
||||
# Tests for _handle_decoded_payload in the salt.minion.Minion() class: 3
|
||||
@pytest.mark.slow_test
|
||||
def test_handle_decoded_payload_jid_match_in_jid_queue():
|
||||
def test_handle_decoded_payload_jid_match_in_jid_queue(minion_opts):
|
||||
"""
|
||||
Tests that the _handle_decoded_payload function returns when a jid is given that is already present
|
||||
in the jid_queue.
|
||||
|
@ -318,11 +312,10 @@ def test_handle_decoded_payload_jid_match_in_jid_queue():
|
|||
return None BEFORE any of the processes are spun up because we should be avoiding firing duplicate
|
||||
jobs.
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_data = {"fun": "foo.bar", "jid": 123}
|
||||
mock_jid_queue = [123]
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
jid_queue=copy.copy(mock_jid_queue),
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
)
|
||||
|
@ -335,7 +328,7 @@ def test_handle_decoded_payload_jid_match_in_jid_queue():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_handle_decoded_payload_jid_queue_addition():
|
||||
def test_handle_decoded_payload_jid_queue_addition(minion_opts):
|
||||
"""
|
||||
Tests that the _handle_decoded_payload function adds a jid to the minion's jid_queue when the new
|
||||
jid isn't already present in the jid_queue.
|
||||
|
@ -348,11 +341,10 @@ def test_handle_decoded_payload_jid_queue_addition():
|
|||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_jid = 11111
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_data = {"fun": "foo.bar", "jid": mock_jid}
|
||||
mock_jid_queue = [123, 456]
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
jid_queue=copy.copy(mock_jid_queue),
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
)
|
||||
|
@ -373,7 +365,7 @@ def test_handle_decoded_payload_jid_queue_addition():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_handle_decoded_payload_jid_queue_reduced_minion_jid_queue_hwm():
|
||||
def test_handle_decoded_payload_jid_queue_reduced_minion_jid_queue_hwm(minion_opts):
|
||||
"""
|
||||
Tests that the _handle_decoded_payload function removes a jid from the minion's jid_queue when the
|
||||
minion's jid_queue high water mark (minion_jid_queue_hwm) is hit.
|
||||
|
@ -385,12 +377,11 @@ def test_handle_decoded_payload_jid_queue_reduced_minion_jid_queue_hwm():
|
|||
"salt.utils.process.SignalHandlingProcess.join",
|
||||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["minion_jid_queue_hwm"] = 2
|
||||
minion_opts["minion_jid_queue_hwm"] = 2
|
||||
mock_data = {"fun": "foo.bar", "jid": 789}
|
||||
mock_jid_queue = [123, 456]
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
jid_queue=copy.copy(mock_jid_queue),
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
)
|
||||
|
@ -410,7 +401,7 @@ def test_handle_decoded_payload_jid_queue_reduced_minion_jid_queue_hwm():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_process_count_max():
|
||||
def test_process_count_max(minion_opts):
|
||||
"""
|
||||
Tests that the _handle_decoded_payload function does not spawn more than the configured amount of processes,
|
||||
as per process_count_max.
|
||||
|
@ -428,13 +419,12 @@ def test_process_count_max():
|
|||
MagicMock(return_value=salt.ext.tornado.concurrent.Future()),
|
||||
):
|
||||
process_count_max = 10
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["__role"] = "minion"
|
||||
mock_opts["minion_jid_queue_hwm"] = 100
|
||||
mock_opts["process_count_max"] = process_count_max
|
||||
minion_opts["__role"] = "minion"
|
||||
minion_opts["minion_jid_queue_hwm"] = 100
|
||||
minion_opts["process_count_max"] = process_count_max
|
||||
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
minion = salt.minion.Minion(mock_opts, jid_queue=[], io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, jid_queue=[], io_loop=io_loop)
|
||||
try:
|
||||
|
||||
# mock gen.sleep to throw a special Exception when called, so that we detect it
|
||||
|
@ -476,7 +466,7 @@ def test_process_count_max():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_beacons_before_connect():
|
||||
def test_beacons_before_connect(minion_opts):
|
||||
"""
|
||||
Tests that the 'beacons_before_connect' option causes the beacons to be initialized before connect.
|
||||
"""
|
||||
|
@ -490,11 +480,10 @@ def test_beacons_before_connect():
|
|||
"salt.utils.process.SignalHandlingProcess.join",
|
||||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["beacons_before_connect"] = True
|
||||
minion_opts["beacons_before_connect"] = True
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
|
||||
try:
|
||||
|
@ -510,7 +499,7 @@ def test_beacons_before_connect():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_scheduler_before_connect():
|
||||
def test_scheduler_before_connect(minion_opts):
|
||||
"""
|
||||
Tests that the 'scheduler_before_connect' option causes the scheduler to be initialized before connect.
|
||||
"""
|
||||
|
@ -524,11 +513,10 @@ def test_scheduler_before_connect():
|
|||
"salt.utils.process.SignalHandlingProcess.join",
|
||||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["scheduler_before_connect"] = True
|
||||
minion_opts["scheduler_before_connect"] = True
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
try:
|
||||
minion.tune_in(start=True)
|
||||
|
@ -542,7 +530,7 @@ def test_scheduler_before_connect():
|
|||
minion.destroy()
|
||||
|
||||
|
||||
def test_minion_module_refresh(tmp_path):
|
||||
def test_minion_module_refresh(minion_opts):
|
||||
"""
|
||||
Tests that the 'module_refresh' just return in case there is no 'schedule'
|
||||
because destroy method was already called.
|
||||
|
@ -555,13 +543,13 @@ def test_minion_module_refresh(tmp_path):
|
|||
MagicMock(return_value=True),
|
||||
):
|
||||
try:
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["cachedir"] = str(tmp_path)
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
)
|
||||
minion.schedule = salt.utils.schedule.Schedule(mock_opts, {}, returners={})
|
||||
minion.schedule = salt.utils.schedule.Schedule(
|
||||
minion_opts, {}, returners={}
|
||||
)
|
||||
assert hasattr(minion, "schedule")
|
||||
minion.destroy()
|
||||
assert not hasattr(minion, "schedule")
|
||||
|
@ -570,7 +558,7 @@ def test_minion_module_refresh(tmp_path):
|
|||
minion.destroy()
|
||||
|
||||
|
||||
def test_minion_module_refresh_beacons_refresh(tmp_path):
|
||||
def test_minion_module_refresh_beacons_refresh(minion_opts):
|
||||
"""
|
||||
Tests that 'module_refresh' calls beacons_refresh and that the
|
||||
minion object has a beacons attribute with beacons.
|
||||
|
@ -583,13 +571,13 @@ def test_minion_module_refresh_beacons_refresh(tmp_path):
|
|||
MagicMock(return_value=True),
|
||||
):
|
||||
try:
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["cachedir"] = str(tmp_path)
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
)
|
||||
minion.schedule = salt.utils.schedule.Schedule(mock_opts, {}, returners={})
|
||||
minion.schedule = salt.utils.schedule.Schedule(
|
||||
minion_opts, {}, returners={}
|
||||
)
|
||||
assert not hasattr(minion, "beacons")
|
||||
minion.module_refresh()
|
||||
assert hasattr(minion, "beacons")
|
||||
|
@ -601,7 +589,9 @@ def test_minion_module_refresh_beacons_refresh(tmp_path):
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks():
|
||||
def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks(
|
||||
minion_opts,
|
||||
):
|
||||
with patch("salt.minion.Minion.ctx", MagicMock(return_value={})), patch(
|
||||
"salt.minion.Minion.sync_connect_master",
|
||||
MagicMock(side_effect=RuntimeError("stop execution")),
|
||||
|
@ -612,11 +602,10 @@ def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_call
|
|||
"salt.utils.process.SignalHandlingProcess.join",
|
||||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["ping_interval"] = 10
|
||||
minion_opts["ping_interval"] = 10
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
try:
|
||||
minion.connected = MagicMock(side_effect=(False, True))
|
||||
|
@ -632,14 +621,13 @@ def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_call
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_when_passed_start_event_grains():
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def test_when_passed_start_event_grains(minion_opts):
|
||||
# provide mock opts an os grain since we'll look for it later.
|
||||
mock_opts["grains"]["os"] = "linux"
|
||||
mock_opts["start_event_grains"] = ["os"]
|
||||
minion_opts["grains"]["os"] = "linux"
|
||||
minion_opts["start_event_grains"] = ["os"]
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
minion.tok = MagicMock()
|
||||
minion._send_req_sync = MagicMock()
|
||||
|
@ -655,11 +643,10 @@ def test_when_passed_start_event_grains():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_when_not_passed_start_event_grains():
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
def test_when_not_passed_start_event_grains(minion_opts):
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
minion.tok = MagicMock()
|
||||
minion._send_req_sync = MagicMock()
|
||||
|
@ -672,12 +659,11 @@ def test_when_not_passed_start_event_grains():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_when_other_events_fired_and_start_event_grains_are_set():
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["start_event_grains"] = ["os"]
|
||||
def test_when_other_events_fired_and_start_event_grains_are_set(minion_opts):
|
||||
minion_opts["start_event_grains"] = ["os"]
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
try:
|
||||
minion.tok = MagicMock()
|
||||
minion._send_req_sync = MagicMock()
|
||||
|
@ -690,14 +676,12 @@ def test_when_other_events_fired_and_start_event_grains_are_set():
|
|||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_minion_retry_dns_count():
|
||||
def test_minion_retry_dns_count(minion_opts):
|
||||
"""
|
||||
Tests that the resolve_dns will retry dns look ups for a maximum of
|
||||
3 times before raising a SaltMasterUnresolvableError exception.
|
||||
"""
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": False,
|
||||
"master": "dummy",
|
||||
|
@ -705,19 +689,19 @@ def test_minion_retry_dns_count():
|
|||
"retry_dns": 1,
|
||||
"retry_dns_count": 3,
|
||||
},
|
||||
):
|
||||
pytest.raises(SaltMasterUnresolvableError, salt.minion.resolve_dns, opts)
|
||||
)
|
||||
with pytest.raises(SaltMasterUnresolvableError):
|
||||
salt.minion.resolve_dns(minion_opts)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_gen_modules_executors():
|
||||
def test_gen_modules_executors(minion_opts):
|
||||
"""
|
||||
Ensure gen_modules is called with the correct arguments #54429
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
|
||||
class MockPillarCompiler:
|
||||
def compile_pillar(self):
|
||||
|
@ -732,40 +716,40 @@ def test_gen_modules_executors():
|
|||
minion.destroy()
|
||||
|
||||
|
||||
@patch("salt.utils.process.default_signals")
|
||||
@pytest.mark.slow_test
|
||||
def test_reinit_crypto_on_fork(def_mock):
|
||||
def test_reinit_crypto_on_fork(minion_opts):
|
||||
"""
|
||||
Ensure salt.utils.crypt.reinit_crypto() is executed when forking for new job
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["multiprocessing"] = True
|
||||
minion_opts["multiprocessing"] = True
|
||||
with patch("salt.utils.process.default_signals"):
|
||||
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
|
||||
job_data = {"jid": "test-jid", "fun": "test.ping"}
|
||||
job_data = {"jid": "test-jid", "fun": "test.ping"}
|
||||
|
||||
def mock_start(self):
|
||||
# pylint: disable=comparison-with-callable
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in self._after_fork_methods
|
||||
if x[0] == salt.utils.crypt.reinit_crypto
|
||||
]
|
||||
def mock_start(self):
|
||||
# pylint: disable=comparison-with-callable
|
||||
assert (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in self._after_fork_methods
|
||||
if x[0] == salt.utils.crypt.reinit_crypto
|
||||
]
|
||||
)
|
||||
== 1
|
||||
)
|
||||
== 1
|
||||
)
|
||||
# pylint: enable=comparison-with-callable
|
||||
# pylint: enable=comparison-with-callable
|
||||
|
||||
with patch.object(salt.utils.process.SignalHandlingProcess, "start", mock_start):
|
||||
io_loop.run_sync(lambda: minion._handle_decoded_payload(job_data))
|
||||
with patch.object(
|
||||
salt.utils.process.SignalHandlingProcess, "start", mock_start
|
||||
):
|
||||
io_loop.run_sync(lambda: minion._handle_decoded_payload(job_data))
|
||||
|
||||
|
||||
def test_minion_manage_schedule():
|
||||
def test_minion_manage_schedule(minion_opts):
|
||||
"""
|
||||
Tests that the manage_schedule will call the add function, adding
|
||||
schedule data into opts.
|
||||
|
@ -780,7 +764,6 @@ def test_minion_manage_schedule():
|
|||
"salt.utils.process.SignalHandlingProcess.join",
|
||||
MagicMock(return_value=True),
|
||||
):
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
|
||||
|
@ -788,9 +771,9 @@ def test_minion_manage_schedule():
|
|||
try:
|
||||
mock_functions = {"test.ping": None}
|
||||
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
minion.schedule = salt.utils.schedule.Schedule(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
mock_functions,
|
||||
returners={},
|
||||
new_instance=True,
|
||||
|
@ -823,7 +806,7 @@ def test_minion_manage_schedule():
|
|||
del minion
|
||||
|
||||
|
||||
def test_minion_manage_beacons():
|
||||
def test_minion_manage_beacons(minion_opts):
|
||||
"""
|
||||
Tests that the manage_beacons will call the add function, adding
|
||||
beacon data into opts.
|
||||
|
@ -839,15 +822,14 @@ def test_minion_manage_beacons():
|
|||
MagicMock(return_value=True),
|
||||
):
|
||||
try:
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["beacons"] = {}
|
||||
minion_opts["beacons"] = {}
|
||||
|
||||
io_loop = salt.ext.tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
|
||||
mock_functions = {"test.ping": None}
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
minion.beacons = salt.beacons.Beacon(mock_opts, mock_functions)
|
||||
minion = salt.minion.Minion(minion_opts, io_loop=io_loop)
|
||||
minion.beacons = salt.beacons.Beacon(minion_opts, mock_functions)
|
||||
|
||||
bdata = [{"salt-master": "stopped"}, {"apache2": "stopped"}]
|
||||
data = {"name": "ps", "beacon_data": bdata, "func": "add"}
|
||||
|
@ -892,7 +874,7 @@ def test_prep_ip_port():
|
|||
|
||||
|
||||
@pytest.mark.skip_if_not_root
|
||||
def test_sock_path_len():
|
||||
def test_sock_path_len(minion_opts):
|
||||
"""
|
||||
This tests whether or not a larger hash causes the sock path to exceed
|
||||
the system's max sock path length. See the below link for more
|
||||
|
@ -900,33 +882,32 @@ def test_sock_path_len():
|
|||
|
||||
https://github.com/saltstack/salt/issues/12172#issuecomment-43903643
|
||||
"""
|
||||
opts = {
|
||||
"id": "salt-testing",
|
||||
"hash_type": "sha512",
|
||||
"sock_dir": os.path.join(salt.syspaths.SOCK_DIR, "minion"),
|
||||
"extension_modules": "",
|
||||
}
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(opts, opts):
|
||||
try:
|
||||
event_publisher = event.AsyncEventPublisher(opts)
|
||||
result = True
|
||||
except ValueError:
|
||||
# There are rare cases where we operate a closed socket, especially in containers.
|
||||
# In this case, don't fail the test because we'll catch it down the road.
|
||||
result = True
|
||||
except SaltSystemExit:
|
||||
result = False
|
||||
minion_opts.update(
|
||||
{
|
||||
"id": "salt-testing",
|
||||
"hash_type": "sha512",
|
||||
"sock_dir": os.path.join(salt.syspaths.SOCK_DIR, "minion"),
|
||||
"extension_modules": "",
|
||||
}
|
||||
)
|
||||
try:
|
||||
event_publisher = event.AsyncEventPublisher(minion_opts)
|
||||
result = True
|
||||
except ValueError:
|
||||
# There are rare cases where we operate a closed socket, especially in containers.
|
||||
# In this case, don't fail the test because we'll catch it down the road.
|
||||
result = True
|
||||
except SaltSystemExit:
|
||||
result = False
|
||||
assert result
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows(reason="Skippin, no Salt master running on Windows.")
|
||||
async def test_master_type_failover():
|
||||
async def test_master_type_failover(minion_opts):
|
||||
"""
|
||||
Tests master_type "failover" to not fall back to 127.0.0.1 address when master does not resolve in DNS
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts.update(
|
||||
minion_opts.update(
|
||||
{
|
||||
"master_type": "failover",
|
||||
"master": ["master1", "master2"],
|
||||
|
@ -961,16 +942,15 @@ async def test_master_type_failover():
|
|||
"salt.channel.client.AsyncPubChannel.factory", mock_channel_factory
|
||||
), patch("salt.loader.grains", MagicMock(return_value=[])):
|
||||
with pytest.raises(SaltClientError):
|
||||
minion = salt.minion.Minion(mock_opts)
|
||||
minion = salt.minion.Minion(minion_opts)
|
||||
await minion.connect_master()
|
||||
|
||||
|
||||
async def test_master_type_failover_no_masters():
|
||||
async def test_master_type_failover_no_masters(minion_opts):
|
||||
"""
|
||||
Tests master_type "failover" to not fall back to 127.0.0.1 address when no master can be resolved
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts.update(
|
||||
minion_opts.update(
|
||||
{
|
||||
"master_type": "failover",
|
||||
"master": ["master1", "master2"],
|
||||
|
@ -987,7 +967,7 @@ async def test_master_type_failover_no_masters():
|
|||
"salt.loader.grains", MagicMock(return_value=[])
|
||||
):
|
||||
with pytest.raises(SaltClientError):
|
||||
minion = salt.minion.Minion(mock_opts)
|
||||
minion = salt.minion.Minion(minion_opts)
|
||||
await minion.connect_master()
|
||||
|
||||
|
||||
|
@ -999,19 +979,18 @@ def test_config_cache_path_overrides():
|
|||
assert mminion.opts["cachedir"] == cachedir
|
||||
|
||||
|
||||
def test_minion_grains_refresh_pre_exec_false():
|
||||
def test_minion_grains_refresh_pre_exec_false(minion_opts):
|
||||
"""
|
||||
Minion does not refresh grains when grains_refresh_pre_exec is False
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["multiprocessing"] = False
|
||||
mock_opts["grains_refresh_pre_exec"] = False
|
||||
minion_opts["multiprocessing"] = False
|
||||
minion_opts["grains_refresh_pre_exec"] = False
|
||||
mock_data = {"fun": "foo.bar", "jid": 123}
|
||||
with patch("salt.loader.grains") as grainsfunc, patch(
|
||||
"salt.minion.Minion._target", MagicMock(return_value=True)
|
||||
):
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
jid_queue=None,
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
load_grains=False,
|
||||
|
@ -1023,19 +1002,18 @@ def test_minion_grains_refresh_pre_exec_false():
|
|||
minion.destroy()
|
||||
|
||||
|
||||
def test_minion_grains_refresh_pre_exec_true():
|
||||
def test_minion_grains_refresh_pre_exec_true(minion_opts):
|
||||
"""
|
||||
Minion refreshes grains when grains_refresh_pre_exec is True
|
||||
"""
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts["multiprocessing"] = False
|
||||
mock_opts["grains_refresh_pre_exec"] = True
|
||||
minion_opts["multiprocessing"] = False
|
||||
minion_opts["grains_refresh_pre_exec"] = True
|
||||
mock_data = {"fun": "foo.bar", "jid": 123}
|
||||
with patch("salt.loader.grains") as grainsfunc, patch(
|
||||
"salt.minion.Minion._target", MagicMock(return_value=True)
|
||||
):
|
||||
minion = salt.minion.Minion(
|
||||
mock_opts,
|
||||
minion_opts,
|
||||
jid_queue=None,
|
||||
io_loop=salt.ext.tornado.ioloop.IOLoop(),
|
||||
load_grains=False,
|
||||
|
@ -1050,7 +1028,7 @@ def test_minion_grains_refresh_pre_exec_true():
|
|||
@pytest.mark.skip_on_darwin(
|
||||
reason="Skip on MacOS, where this does not raise an exception."
|
||||
)
|
||||
def test_valid_ipv4_master_address_ipv6_enabled():
|
||||
def test_valid_ipv4_master_address_ipv6_enabled(minion_opts):
|
||||
"""
|
||||
Tests that the lookups fail back to ipv4 when ipv6 fails.
|
||||
"""
|
||||
|
@ -1068,9 +1046,7 @@ def test_valid_ipv4_master_address_ipv6_enabled():
|
|||
],
|
||||
}
|
||||
}
|
||||
opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
with patch.dict(
|
||||
opts,
|
||||
minion_opts.update(
|
||||
{
|
||||
"ipv6": True,
|
||||
"master": "127.0.0.1",
|
||||
|
@ -1081,14 +1057,15 @@ def test_valid_ipv4_master_address_ipv6_enabled():
|
|||
"source_ret_port": 49017,
|
||||
"source_publish_port": 49018,
|
||||
},
|
||||
), patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
)
|
||||
with patch("salt.utils.network.interfaces", MagicMock(return_value=interfaces)):
|
||||
expected = {
|
||||
"source_publish_port": 49018,
|
||||
"master_uri": "tcp://127.0.0.1:4555",
|
||||
"source_ret_port": 49017,
|
||||
"master_ip": "127.0.0.1",
|
||||
}
|
||||
assert salt.minion.resolve_dns(opts) == expected
|
||||
assert salt.minion.resolve_dns(minion_opts) == expected
|
||||
|
||||
|
||||
async def test_master_type_disable():
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
import pytest
|
||||
from pytestshellutils.utils.processes import terminate_process
|
||||
|
||||
import salt.config
|
||||
import salt.ext.tornado.ioloop
|
||||
import salt.utils.event
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_event_return():
|
||||
def test_event_return(master_opts):
|
||||
evt = None
|
||||
try:
|
||||
evt = salt.utils.event.EventReturn(salt.config.DEFAULT_MASTER_OPTS.copy())
|
||||
evt = salt.utils.event.EventReturn(master_opts)
|
||||
evt.start()
|
||||
except TypeError as exc:
|
||||
if "object" in str(exc):
|
||||
|
|
|
@ -11,7 +11,6 @@ import re
|
|||
import pytest
|
||||
from jinja2 import DictLoader, Environment, exceptions
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
|
||||
# dateutils is needed so that the strftime jinja filter is loaded
|
||||
|
@ -36,9 +35,8 @@ except ImportError:
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_opts(tmp_path):
|
||||
_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
_opts.update(
|
||||
def minion_opts(tmp_path, minion_opts):
|
||||
minion_opts.update(
|
||||
{
|
||||
"cachedir": str(tmp_path / "jinja-template-cache"),
|
||||
"file_buffer_size": 1048576,
|
||||
|
@ -54,7 +52,7 @@ def minion_opts(tmp_path):
|
|||
),
|
||||
}
|
||||
)
|
||||
return _opts
|
||||
return minion_opts
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
Tests for salt.utils.jinja
|
||||
"""
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
|
||||
# dateutils is needed so that the strftime jinja filter is loaded
|
||||
|
@ -15,14 +14,12 @@ from salt.utils.jinja import SaltCacheLoader
|
|||
from tests.support.mock import Mock, patch
|
||||
|
||||
|
||||
def render(tmpl_str, context=None):
|
||||
def render(tmpl_str, minion_opts, context=None):
|
||||
functions = {
|
||||
"mocktest.ping": lambda: True,
|
||||
"mockgrains.get": lambda x: "jerry",
|
||||
}
|
||||
|
||||
minion_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
|
||||
_render = salt.loader.render(minion_opts, functions)
|
||||
|
||||
jinja = _render.get("jinja")
|
||||
|
@ -30,29 +27,29 @@ def render(tmpl_str, context=None):
|
|||
return jinja(tmpl_str, context=context or {}, argline="-s").read()
|
||||
|
||||
|
||||
def test_normlookup():
|
||||
def test_normlookup(minion_opts):
|
||||
"""
|
||||
Sanity-check the normal dictionary-lookup syntax for our stub function
|
||||
"""
|
||||
tmpl_str = """Hello, {{ salt['mocktest.ping']() }}."""
|
||||
|
||||
with patch.object(SaltCacheLoader, "file_client", Mock()):
|
||||
ret = render(tmpl_str)
|
||||
ret = render(tmpl_str, minion_opts)
|
||||
assert ret == "Hello, True."
|
||||
|
||||
|
||||
def test_dotlookup():
|
||||
def test_dotlookup(minion_opts):
|
||||
"""
|
||||
Check calling a stub function using awesome dot-notation
|
||||
"""
|
||||
tmpl_str = """Hello, {{ salt.mocktest.ping() }}."""
|
||||
|
||||
with patch.object(SaltCacheLoader, "file_client", Mock()):
|
||||
ret = render(tmpl_str)
|
||||
ret = render(tmpl_str, minion_opts)
|
||||
assert ret == "Hello, True."
|
||||
|
||||
|
||||
def test_shadowed_dict_method():
|
||||
def test_shadowed_dict_method(minion_opts):
|
||||
"""
|
||||
Check calling a stub function with a name that shadows a ``dict``
|
||||
method name
|
||||
|
@ -60,5 +57,5 @@ def test_shadowed_dict_method():
|
|||
tmpl_str = """Hello, {{ salt.mockgrains.get('id') }}."""
|
||||
|
||||
with patch.object(SaltCacheLoader, "file_client", Mock()):
|
||||
ret = render(tmpl_str)
|
||||
ret = render(tmpl_str, minion_opts)
|
||||
assert ret == "Hello, jerry."
|
||||
|
|
|
@ -9,7 +9,6 @@ import os
|
|||
|
||||
import pytest
|
||||
|
||||
import salt.config
|
||||
import salt.loader
|
||||
|
||||
# dateutils is needed so that the strftime jinja filter is loaded
|
||||
|
@ -51,9 +50,8 @@ class MockFileClient:
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_opts(tmp_path):
|
||||
_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
_opts.update(
|
||||
def minion_opts(tmp_path, minion_opts):
|
||||
minion_opts.update(
|
||||
{
|
||||
"cachedir": str(tmp_path),
|
||||
"file_buffer_size": 1048576,
|
||||
|
@ -69,7 +67,7 @@ def minion_opts(tmp_path):
|
|||
),
|
||||
}
|
||||
)
|
||||
return _opts
|
||||
return minion_opts
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue