mirror of
https://github.com/saltstack/salt.git
synced 2025-04-10 14:51:40 +00:00
Fix issues with requisites and aggregate
add install of networkx fix aggregate to properly work with requisites fix requisite checking to not be exponential fix pkg aggregate to work when multiple states specify the same package add some type hints to state.py to make the code easier to follow fix case of pkg aggregate duplicate package.
This commit is contained in:
parent
58c89a8dfd
commit
66caa58346
76 changed files with 2307 additions and 1625 deletions
|
@ -1402,7 +1402,7 @@ repos:
|
|||
hooks:
|
||||
- id: pyupgrade
|
||||
name: Upgrade code for Py3.8+
|
||||
args: [--py38-plus, --keep-mock]
|
||||
args: [--py38-plus, --keep-mock, --keep-runtime-typing]
|
||||
exclude: >
|
||||
(?x)^(
|
||||
salt/client/ssh/ssh_py_shim.py
|
||||
|
|
|
@ -761,4 +761,5 @@ allowed-3rd-party-modules=msgpack,
|
|||
pytestskipmarkers,
|
||||
cryptography,
|
||||
aiohttp,
|
||||
pytest_timeout
|
||||
pytest_timeout,
|
||||
networkx
|
||||
|
|
1
changelog/47154.fixed.md
Normal file
1
changelog/47154.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed erroneous recursive requisite error when a prereq is used in combination with onchanges_any.
|
1
changelog/59123.fixed.md
Normal file
1
changelog/59123.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed dependency resolution to not be quadratic.
|
1
changelog/62439.fixed.md
Normal file
1
changelog/62439.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed performance when state_aggregate is enabled.
|
|
@ -1 +1 @@
|
|||
pkg.installed state aggregate does not honors requires requisite
|
||||
Fixed aggregation to correctly honor requisites.
|
||||
|
|
1
changelog/8210.fixed.md
Normal file
1
changelog/8210.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed recursive prereq requisites to report recursive requisite error.
|
|
@ -5,6 +5,7 @@ jmespath
|
|||
msgpack>=1.0.0
|
||||
PyYAML
|
||||
MarkupSafe
|
||||
networkx
|
||||
requests>=2.31.0 ; python_version < '3.8'
|
||||
requests>=2.32.0 ; python_version >= '3.8'
|
||||
distro>=1.0.1
|
||||
|
@ -25,7 +26,7 @@ pyopenssl>=24.0.0
|
|||
python-dateutil>=2.8.1
|
||||
python-gnupg>=0.4.7
|
||||
cherrypy>=18.6.1
|
||||
importlib-metadata>=3.3.0
|
||||
importlib-metadata>=4.3.0
|
||||
cryptography>=42.0.0
|
||||
|
||||
# From old requirements/static/pkg/linux.in
|
||||
|
|
|
@ -282,6 +282,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -285,6 +285,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -310,6 +310,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -253,6 +253,10 @@ multidict==6.0.4
|
|||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
|
|
|
@ -275,6 +275,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -278,6 +278,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -301,6 +301,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -246,6 +246,10 @@ multidict==6.0.4
|
|||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
|
|
|
@ -385,6 +385,11 @@ netutils==1.6.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
|
|
@ -275,6 +275,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -158,6 +158,10 @@ multidict==6.0.4
|
|||
# yarl
|
||||
myst-docutils[linkify]==1.0.0
|
||||
# via -r requirements/static/ci/docs.in
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
|
|
@ -278,6 +278,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -412,6 +412,11 @@ netutils==1.6.0
|
|||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via
|
||||
# -c requirements/static/ci/py3.12/linux.txt
|
||||
|
|
|
@ -301,6 +301,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -246,6 +246,10 @@ multidict==6.0.4
|
|||
# -c requirements/static/ci/../pkg/py3.12/windows.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/windows.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.12/windows.txt
|
||||
|
|
|
@ -289,6 +289,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -308,6 +308,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -257,6 +257,10 @@ multidict==6.0.4
|
|||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
|
|
|
@ -282,6 +282,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -285,6 +285,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -304,6 +304,10 @@ netmiko==4.2.0
|
|||
# via napalm
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
# -r requirements/base.txt
|
||||
ntc-templates==4.0.1
|
||||
# via netmiko
|
||||
oscrypto==1.3.0
|
||||
|
|
|
@ -253,6 +253,10 @@ multidict==6.0.4
|
|||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -91,6 +91,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -89,6 +89,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -81,6 +81,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -89,6 +89,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -85,6 +85,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -85,6 +85,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -93,6 +93,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -83,6 +83,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -91,6 +91,8 @@ multidict==6.0.4
|
|||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
networkx==3.2.1
|
||||
# via -r requirements/base.txt
|
||||
packaging==23.1
|
||||
# via -r requirements/base.txt
|
||||
portend==3.1.0
|
||||
|
|
|
@ -199,7 +199,10 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs):
|
|||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
# Compile and verify the raw chunks
|
||||
chunks = st_.state.compile_high_data(high_data)
|
||||
chunks, errors = st_.state.compile_high_data(high_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
|
@ -430,7 +433,9 @@ def high(data, **kwargs):
|
|||
# Ensure other wrappers use the correct pillar
|
||||
__pillar__.update(pillar)
|
||||
st_.push_active()
|
||||
chunks = st_.state.compile_high_data(data)
|
||||
chunks, errors = st_.state.compile_high_data(data)
|
||||
if errors:
|
||||
return errors
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
_merge_extra_filerefs(
|
||||
|
@ -676,9 +681,9 @@ def highstate(test=None, **kwargs):
|
|||
# Ensure other wrappers use the correct pillar
|
||||
__pillar__.update(pillar)
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
chunks_or_errors = st_.compile_low_chunks(context=__context__.value())
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
chunks_or_errors,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get("extra_filerefs", ""),
|
||||
opts.get("extra_filerefs", ""),
|
||||
|
@ -686,19 +691,19 @@ def highstate(test=None, **kwargs):
|
|||
),
|
||||
)
|
||||
# Check for errors
|
||||
for chunk in chunks:
|
||||
for chunk in chunks_or_errors:
|
||||
if not isinstance(chunk, dict):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return chunks
|
||||
return chunks_or_errors
|
||||
|
||||
roster = salt.roster.Roster(opts, opts.get("roster", "flat"))
|
||||
roster_grains = roster.opts["grains"]
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
_cleanup_slsmod_low_data(chunks_or_errors)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__context__["fileclient"],
|
||||
chunks,
|
||||
chunks_or_errors,
|
||||
file_refs,
|
||||
pillar,
|
||||
st_kwargs["id_"],
|
||||
|
@ -767,14 +772,14 @@ def top(topfn, test=None, **kwargs):
|
|||
__pillar__.update(pillar)
|
||||
st_.opts["state_top"] = os.path.join("salt://", topfn)
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
chunks_or_errors = st_.compile_low_chunks(context=__context__.value())
|
||||
# Check for errors
|
||||
for chunk in chunks:
|
||||
for chunk in chunks_or_errors:
|
||||
if not isinstance(chunk, dict):
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return chunks
|
||||
return chunks_or_errors
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
chunks_or_errors,
|
||||
_merge_extra_filerefs(
|
||||
kwargs.get("extra_filerefs", ""),
|
||||
opts.get("extra_filerefs", ""),
|
||||
|
@ -786,10 +791,10 @@ def top(topfn, test=None, **kwargs):
|
|||
roster_grains = roster.opts["grains"]
|
||||
|
||||
# Create the tar containing the state pkg and relevant files.
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
_cleanup_slsmod_low_data(chunks_or_errors)
|
||||
trans_tar = salt.client.ssh.state.prep_trans_tar(
|
||||
__context__["fileclient"],
|
||||
chunks,
|
||||
chunks_or_errors,
|
||||
file_refs,
|
||||
pillar,
|
||||
st_kwargs["id_"],
|
||||
|
@ -888,9 +893,9 @@ def show_lowstate(**kwargs):
|
|||
err += st_.opts["pillar"]["_errors"]
|
||||
return err
|
||||
st_.push_active()
|
||||
chunks = st_.compile_low_chunks(context=__context__.value())
|
||||
_cleanup_slsmod_low_data(chunks)
|
||||
return chunks
|
||||
chunks_or_errors = st_.compile_low_chunks(context=__context__.value())
|
||||
_cleanup_slsmod_low_data(chunks_or_errors)
|
||||
return chunks_or_errors
|
||||
|
||||
|
||||
def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
||||
|
@ -977,7 +982,10 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
|
|||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
chunks = st_.state.compile_high_data(high_)
|
||||
chunks, errors = st_.state.compile_high_data(high_)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
chunk = [x for x in chunks if x.get("__id__", "") == id_]
|
||||
|
||||
if not chunk:
|
||||
|
@ -1108,7 +1116,10 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs):
|
|||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
ret = st_.state.compile_high_data(high_data)
|
||||
ret, errors = st_.state.compile_high_data(high_data)
|
||||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
_cleanup_slsmod_low_data(ret)
|
||||
return ret
|
||||
|
||||
|
|
|
@ -347,7 +347,9 @@ def sls(root, mods, saltenv="base", test=None, exclude=None, **kwargs):
|
|||
high_data = st_.state.apply_exclude(high_data)
|
||||
|
||||
# Compile and verify the raw chunks
|
||||
chunks = st_.state.compile_high_data(high_data)
|
||||
chunks, errors = st_.state.compile_high_data(high_data)
|
||||
if errors:
|
||||
return errors
|
||||
file_refs = salt.client.ssh.state.lowstate_file_refs(
|
||||
chunks,
|
||||
salt.client.ssh.wrapper.state._merge_extra_filerefs(
|
||||
|
|
|
@ -1797,7 +1797,11 @@ def show_states(queue=None, **kwargs):
|
|||
if not isinstance(s, dict):
|
||||
_set_retcode(result)
|
||||
return result
|
||||
states[s["__sls__"]] = True
|
||||
# The isinstance check ensures s is a dict,
|
||||
# so disable the error pylint incorrectly gives:
|
||||
# [E1126(invalid-sequence-index), show_states]
|
||||
# Sequence index is not an int, slice, or instance with __index__
|
||||
states[s["__sls__"]] = True # pylint: disable=E1126
|
||||
finally:
|
||||
st_.pop_active()
|
||||
|
||||
|
@ -1915,7 +1919,9 @@ def sls_id(id_, mods, test=None, queue=None, state_events=None, **kwargs):
|
|||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
chunks = st_.state.compile_high_data(high_)
|
||||
chunks, errors = st_.state.compile_high_data(high_)
|
||||
if errors:
|
||||
return errors
|
||||
ret = {}
|
||||
for chunk in chunks:
|
||||
if chunk.get("__id__", "") == id_:
|
||||
|
@ -2023,7 +2029,9 @@ def show_low_sls(mods, test=None, queue=None, **kwargs):
|
|||
if errors:
|
||||
__context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR
|
||||
return errors
|
||||
ret = st_.state.compile_high_data(high_)
|
||||
ret, errors = st_.state.compile_high_data(high_)
|
||||
if errors:
|
||||
return errors
|
||||
# Work around Windows multiprocessing bug, set __opts__['test'] back to
|
||||
# value from before this function was run.
|
||||
__opts__["test"] = orig_test
|
||||
|
@ -2378,15 +2386,19 @@ def pkg(pkg_path, pkg_sum, hash_type, test=None, **kwargs):
|
|||
continue
|
||||
popts["file_roots"][fn_] = [full]
|
||||
st_ = salt.state.State(popts, pillar_override=pillar_override)
|
||||
snapper_pre = _snapper_pre(popts, kwargs.get("__pub_jid", "called localy"))
|
||||
ret = st_.call_chunks(lowstate)
|
||||
ret = st_.call_listen(lowstate, ret)
|
||||
snapper_pre = _snapper_pre(popts, kwargs.get("__pub_jid", "called locally"))
|
||||
chunks, errors = st_.order_chunks(lowstate)
|
||||
if errors:
|
||||
ret = errors
|
||||
else:
|
||||
ret = st_.call_chunks(chunks)
|
||||
ret = st_.call_listen(chunks, ret)
|
||||
try:
|
||||
shutil.rmtree(root)
|
||||
except OSError:
|
||||
pass
|
||||
_set_retcode(ret)
|
||||
_snapper_post(popts, kwargs.get("__pub_jid", "called localy"), snapper_pre)
|
||||
_snapper_post(popts, kwargs.get("__pub_jid", "called locally"), snapper_pre)
|
||||
return ret
|
||||
|
||||
|
||||
|
|
1733
salt/state.py
1733
salt/state.py
File diff suppressed because it is too large
Load diff
|
@ -3597,8 +3597,6 @@ def mod_aggregate(low, chunks, running):
|
|||
The mod_aggregate function which looks up all packages in the available
|
||||
low chunks and merges them into a single pkgs ref in the present low data
|
||||
"""
|
||||
pkgs = []
|
||||
pkg_type = None
|
||||
agg_enabled = [
|
||||
"installed",
|
||||
"latest",
|
||||
|
@ -3607,6 +3605,9 @@ def mod_aggregate(low, chunks, running):
|
|||
]
|
||||
if low.get("fun") not in agg_enabled:
|
||||
return low
|
||||
is_sources = "sources" in low
|
||||
# use a dict instead of a set to maintain insertion order
|
||||
pkgs = {}
|
||||
for chunk in chunks:
|
||||
tag = __utils__["state.gen_tag"](chunk)
|
||||
if tag in running:
|
||||
|
@ -3621,42 +3622,52 @@ def mod_aggregate(low, chunks, running):
|
|||
# Check for the same repo
|
||||
if chunk.get("fromrepo") != low.get("fromrepo"):
|
||||
continue
|
||||
# If hold exists in the chunk, do not add to aggregation
|
||||
# otherwise all packages will be held or unheld.
|
||||
# setting a package to be held/unheld is not as
|
||||
# time consuming as installing/uninstalling.
|
||||
if "hold" in chunk:
|
||||
continue
|
||||
# Check first if 'sources' was passed so we don't aggregate pkgs
|
||||
# and sources together.
|
||||
if "sources" in chunk:
|
||||
if pkg_type is None:
|
||||
pkg_type = "sources"
|
||||
if pkg_type == "sources":
|
||||
pkgs.extend(chunk["sources"])
|
||||
if is_sources and "sources" in chunk:
|
||||
_combine_pkgs(pkgs, chunk["sources"])
|
||||
chunk["__agg__"] = True
|
||||
elif not is_sources:
|
||||
# Pull out the pkg names!
|
||||
if "pkgs" in chunk:
|
||||
_combine_pkgs(pkgs, chunk["pkgs"])
|
||||
chunk["__agg__"] = True
|
||||
else:
|
||||
# If hold exists in the chunk, do not add to aggregation
|
||||
# otherwise all packages will be held or unheld.
|
||||
# setting a package to be held/unheld is not as
|
||||
# time consuming as installing/uninstalling.
|
||||
if "hold" not in chunk:
|
||||
if pkg_type is None:
|
||||
pkg_type = "pkgs"
|
||||
if pkg_type == "pkgs":
|
||||
# Pull out the pkg names!
|
||||
if "pkgs" in chunk:
|
||||
pkgs.extend(chunk["pkgs"])
|
||||
chunk["__agg__"] = True
|
||||
elif "name" in chunk:
|
||||
version = chunk.pop("version", None)
|
||||
if version is not None:
|
||||
pkgs.append({chunk["name"]: version})
|
||||
else:
|
||||
pkgs.append(chunk["name"])
|
||||
chunk["__agg__"] = True
|
||||
if pkg_type is not None and pkgs:
|
||||
if pkg_type in low:
|
||||
low[pkg_type].extend(pkgs)
|
||||
else:
|
||||
low[pkg_type] = pkgs
|
||||
elif "name" in chunk:
|
||||
version = chunk.pop("version", None)
|
||||
pkgs.setdefault(chunk["name"], set()).add(version)
|
||||
chunk["__agg__"] = True
|
||||
if pkgs:
|
||||
pkg_type = "sources" if is_sources else "pkgs"
|
||||
low_pkgs = {}
|
||||
_combine_pkgs(low_pkgs, low.get(pkg_type, []))
|
||||
for pkg, values in pkgs.items():
|
||||
low_pkgs.setdefault(pkg, {None}).update(values)
|
||||
# the value is the version for pkgs and
|
||||
# the URI for sources
|
||||
low_pkgs_list = [
|
||||
name if value is None else {name: value}
|
||||
for name, values in pkgs.items()
|
||||
for value in values
|
||||
]
|
||||
low[pkg_type] = low_pkgs_list
|
||||
return low
|
||||
|
||||
|
||||
def _combine_pkgs(pkgs_dict, additional_pkgs_list):
|
||||
for item in additional_pkgs_list:
|
||||
if isinstance(item, str):
|
||||
pkgs_dict.setdefault(item, {None})
|
||||
else:
|
||||
for pkg, version in item:
|
||||
pkgs_dict.setdefault(pkg, {None}).add(version)
|
||||
|
||||
|
||||
def mod_watch(name, **kwargs):
|
||||
"""
|
||||
Install/reinstall a package based on a watch requisite
|
||||
|
|
|
@ -352,7 +352,7 @@ def mod_watch(name, sfun=None, **kwargs):
|
|||
"""
|
||||
has_changes = []
|
||||
if "__reqs__" in __low__:
|
||||
for req in __low__["__reqs__"]["watch"]:
|
||||
for req in __low__["__reqs__"].get("watch", []):
|
||||
tag = _gen_tag(req)
|
||||
if __running__[tag]["changes"]:
|
||||
has_changes.append("{state}: {__id__}".format(**req))
|
||||
|
|
|
@ -137,7 +137,10 @@ class ThorState(salt.state.HighState):
|
|||
err += self.state.verify_high(high)
|
||||
if err:
|
||||
raise SaltRenderError(err)
|
||||
return self.state.compile_high_data(high)
|
||||
chunks, errors = self.state.compile_high_data(high)
|
||||
if errors:
|
||||
raise SaltRenderError(errors)
|
||||
return chunks
|
||||
|
||||
def get_events(self):
|
||||
"""
|
||||
|
|
|
@ -59,7 +59,7 @@ import hashlib
|
|||
import logging
|
||||
import os
|
||||
import time
|
||||
from collections.abc import MutableMapping
|
||||
from collections.abc import Iterable, MutableMapping
|
||||
|
||||
import tornado.ioloop
|
||||
import tornado.iostream
|
||||
|
@ -186,17 +186,23 @@ def tagify(suffix="", prefix="", base=SALT):
|
|||
|
||||
"""
|
||||
parts = [base, TAGS.get(prefix, prefix)]
|
||||
if hasattr(suffix, "append"): # list so extend parts
|
||||
if isinstance(suffix, Iterable) and not isinstance(
|
||||
suffix, str
|
||||
): # list so extend parts
|
||||
parts.extend(suffix)
|
||||
else: # string so append
|
||||
parts.append(suffix)
|
||||
|
||||
for index, _ in enumerate(parts):
|
||||
str_parts = []
|
||||
for part in parts:
|
||||
part_str = None
|
||||
try:
|
||||
parts[index] = salt.utils.stringutils.to_str(parts[index])
|
||||
part_str = salt.utils.stringutils.to_str(part)
|
||||
except TypeError:
|
||||
parts[index] = str(parts[index])
|
||||
return TAGPARTER.join([part for part in parts if part])
|
||||
part_str = str(part)
|
||||
if part_str:
|
||||
str_parts.append(part_str)
|
||||
return TAGPARTER.join(str_parts)
|
||||
|
||||
|
||||
class SaltEvent:
|
||||
|
|
|
@ -188,7 +188,16 @@ class Reactor(salt.utils.process.SignalHandlingProcess, salt.state.Compiler):
|
|||
reactors,
|
||||
)
|
||||
return [] # We'll return nothing since there was an error
|
||||
chunks = self.order_chunks(self.compile_high_data(high))
|
||||
chunks, errors = self.compile_high_data(high)
|
||||
if errors:
|
||||
log.error(
|
||||
"Unable to render reactions for event %s due to "
|
||||
"errors (%s) in one or more of the sls files (%s)",
|
||||
tag,
|
||||
errors,
|
||||
reactors,
|
||||
)
|
||||
return [] # We'll return nothing since there was an error
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.exception("Exception encountered while compiling reactions")
|
||||
|
||||
|
|
469
salt/utils/requisite.py
Normal file
469
salt/utils/requisite.py
Normal file
|
@ -0,0 +1,469 @@
|
|||
"""
|
||||
The classes and functions in this module are related to requisite
|
||||
handling and ordering of chunks by the State compiler
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import logging
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator, Iterable, Sequence
|
||||
from enum import Enum, auto
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import networkx as nx # pylint: disable=3rd-party-module-not-gated
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# See https://docs.saltproject.io/en/latest/ref/states/layers.html for details on the naming
|
||||
LowChunk = dict[str, Any]
|
||||
|
||||
if TYPE_CHECKING or sys.version_info >= (3, 10):
|
||||
staticmethod_hack = staticmethod
|
||||
else:
|
||||
# Python < 3.10 does not support calling static methods directly from the class body
|
||||
# as is the case with enum _generate_next_value_.
|
||||
# Since @staticmethod is only added for static type checking, substitute a dummy decorator.
|
||||
def staticmethod_hack(f):
|
||||
return f
|
||||
|
||||
|
||||
def _gen_tag(low: LowChunk) -> str:
|
||||
"""
|
||||
Generate the a unique identifer tag string from the low data structure
|
||||
"""
|
||||
return "{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}".format(low)
|
||||
|
||||
|
||||
def trim_req(req: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
Trim any function off of a requisite reference
|
||||
"""
|
||||
reqfirst, valfirst = next(iter(req.items()))
|
||||
if "." in reqfirst:
|
||||
return {reqfirst.split(".", maxsplit=1)[0]: valfirst}
|
||||
return req
|
||||
|
||||
|
||||
class RequisiteType(str, Enum):
|
||||
"""Types of direct requisites"""
|
||||
|
||||
# Once salt no longer needs to support python < 3.10,
|
||||
# remove this hack and use @staticmethod
|
||||
@staticmethod_hack
|
||||
def _generate_next_value_(
|
||||
name: str, start: int, count: int, last_values: list[Any]
|
||||
) -> tuple[str, int]:
|
||||
return name.lower(), count
|
||||
|
||||
def __new__(cls, value, weight):
|
||||
member = str.__new__(cls, value)
|
||||
member._value_ = value
|
||||
member.weight = weight
|
||||
return member
|
||||
|
||||
def __init__(self, value, weight):
|
||||
super().__init__()
|
||||
self._value_ = value
|
||||
self.weight = weight
|
||||
|
||||
# The items here are listed in order of precedence for determining
|
||||
# the order of execution, so do not change the order unless you
|
||||
# are intentionally changing the precedence
|
||||
ONFAIL = auto()
|
||||
ONFAIL_ANY = auto()
|
||||
ONFAIL_ALL = auto()
|
||||
REQUIRE = auto()
|
||||
REQUIRE_ANY = auto()
|
||||
ONCHANGES = auto()
|
||||
ONCHANGES_ANY = auto()
|
||||
WATCH = auto()
|
||||
WATCH_ANY = auto()
|
||||
PREREQ = auto()
|
||||
PREREQUIRED = auto()
|
||||
LISTEN = auto()
|
||||
|
||||
|
||||
class DependencyGraph:
|
||||
"""
|
||||
Class used to track dependencies (requisites) among salt states.
|
||||
|
||||
This class utilizes a Directed Acyclic Graph to determine the
|
||||
ordering of states. The nodes represent the individual states that
|
||||
can be depended on and edges represent the types of requisites
|
||||
between the states.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.dag = nx.MultiDiGraph()
|
||||
# a mapping to node_id to be able to find nodes with
|
||||
# specific state type (module name), names, and/or IDs
|
||||
self.nodes_lookup_map: dict[tuple[str, str], set[str]] = {}
|
||||
self.sls_to_nodes: dict[str, set[str]] = {}
|
||||
|
||||
def _add_prereq(self, node_tag: str, req_tag: str):
|
||||
# the prerequiring chunk is the state declaring the prereq
|
||||
# requisite; the prereq/prerequired state is the one that is
|
||||
# declared in the requisite prereq statement
|
||||
self.dag.nodes[node_tag]["chunk"]["__prerequiring__"] = True
|
||||
prereq_chunk = self.dag.nodes[req_tag]["chunk"]
|
||||
# set __prereq__ true to run the state in test mode
|
||||
prereq_chunk["__prereq__"] = True
|
||||
prereq_check_node = self._get_prereq_node_tag(req_tag)
|
||||
if not self.dag.nodes.get(prereq_check_node):
|
||||
self.dag.add_node(
|
||||
prereq_check_node, chunk=prereq_chunk, state=prereq_chunk["state"]
|
||||
)
|
||||
# all the dependencies of the node for the prerequired
|
||||
# chunk also need to be applied to its prereq check node
|
||||
for dependency_node, _, req_type, data in self.dag.in_edges(
|
||||
req_tag, data=True, keys=True
|
||||
):
|
||||
if req_type != RequisiteType.PREREQ:
|
||||
self.dag.add_edge(
|
||||
dependency_node, prereq_check_node, req_type, **data
|
||||
)
|
||||
self.dag.add_edge(prereq_check_node, node_tag, RequisiteType.PREREQ)
|
||||
self.dag.add_edge(node_tag, req_tag, RequisiteType.REQUIRE)
|
||||
|
||||
def _add_reqs(
|
||||
self,
|
||||
node_tag: str,
|
||||
has_preq_node: bool,
|
||||
req_type: RequisiteType,
|
||||
req_tags: Iterable[str],
|
||||
) -> None:
|
||||
for req_tag in req_tags:
|
||||
if req_type == RequisiteType.PREREQ:
|
||||
self._add_prereq(node_tag, req_tag)
|
||||
else:
|
||||
if has_preq_node:
|
||||
# if the low chunk is set to run in test mode for a
|
||||
# prereq check then also add the requisites to the
|
||||
# prereq node.
|
||||
prereq_node_tag = self._get_prereq_node_tag(node_tag)
|
||||
self.dag.add_edge(req_tag, prereq_node_tag, key=req_type)
|
||||
self.dag.add_edge(req_tag, node_tag, key=req_type)
|
||||
|
||||
def _copy_edges(self, source: str, dest: str) -> None:
|
||||
"""Add the edges from source node to dest node"""
|
||||
for dependency, _, req_type, data in self.dag.in_edges(
|
||||
source, data=True, keys=True
|
||||
):
|
||||
self.dag.add_edge(dependency, dest, req_type, **data)
|
||||
for _, dependent, req_type, data in self.dag.out_edges(
|
||||
source, data=True, keys=True
|
||||
):
|
||||
self.dag.add_edge(dest, dependent, req_type, **data)
|
||||
|
||||
def _get_chunk_order(self, cap: int, node: str) -> tuple[int | float, int | float]:
|
||||
dag = self.dag
|
||||
stack: list[tuple[str, bool, int | float, int | float]] = [
|
||||
# node, is_processing_children, child_min, req_order
|
||||
(node, False, float("inf"), float("-inf"))
|
||||
]
|
||||
order = cap
|
||||
while stack:
|
||||
node, is_processing_children, child_min, req_order = stack[-1]
|
||||
node_data = dag.nodes[node]
|
||||
chunk = node_data.get("chunk", {})
|
||||
if not is_processing_children: # initial stage
|
||||
order = chunk.get("order")
|
||||
if order is None or not isinstance(order, (int, float)):
|
||||
if order == "last":
|
||||
order = cap + 1000000
|
||||
elif order == "first":
|
||||
order = 0
|
||||
else:
|
||||
order = cap
|
||||
chunk["order"] = order
|
||||
name_order = chunk.pop("name_order", 0)
|
||||
if name_order:
|
||||
order += name_order / 10000.0
|
||||
chunk["order"] = order
|
||||
if order < 0:
|
||||
order += cap + 1000000
|
||||
chunk["order"] = order
|
||||
stack.pop()
|
||||
# update stage
|
||||
stack.append((node, True, child_min, req_order))
|
||||
else: # after processing node
|
||||
child_min_node = node_data.get("child_min")
|
||||
if child_min_node is None:
|
||||
for _, child, req_type in dag.out_edges(node, keys=True):
|
||||
if req_order <= req_type.weight:
|
||||
req_order = req_type.weight
|
||||
child_order = (
|
||||
dag.nodes[child]
|
||||
.get("chunk", {})
|
||||
.get("order", float("inf"))
|
||||
)
|
||||
child_min = min(child_min, child_order)
|
||||
node_data["child_min"] = child_min
|
||||
if order > child_min:
|
||||
order = child_min
|
||||
stack.pop()
|
||||
return (order, chunk["order"])
|
||||
|
||||
def _get_prereq_node_tag(self, low_tag: str):
|
||||
return f"{low_tag}_|-__prereq_test__"
|
||||
|
||||
def _is_fnmatch_pattern(self, value: str) -> bool:
|
||||
return any(char in value for char in ("*", "?", "[", "]"))
|
||||
|
||||
def _chunk_str(self, chunk: LowChunk) -> str:
|
||||
node_dict = {
|
||||
"SLS": chunk["__sls__"],
|
||||
"ID": chunk["__id__"],
|
||||
}
|
||||
if chunk["__id__"] != chunk["name"]:
|
||||
node_dict["NAME"] = chunk["name"]
|
||||
return str(node_dict)
|
||||
|
||||
def add_chunk(self, low: LowChunk, allow_aggregate: bool) -> None:
|
||||
node_id = _gen_tag(low)
|
||||
self.dag.add_node(
|
||||
node_id, allow_aggregate=allow_aggregate, chunk=low, state=low["state"]
|
||||
)
|
||||
self.nodes_lookup_map.setdefault((low["state"], low["name"]), set()).add(
|
||||
node_id
|
||||
)
|
||||
self.nodes_lookup_map.setdefault((low["state"], low["__id__"]), set()).add(
|
||||
node_id
|
||||
)
|
||||
self.nodes_lookup_map.setdefault(("id", low["__id__"]), set()).add(node_id)
|
||||
self.nodes_lookup_map.setdefault(("id", low["name"]), set()).add(node_id)
|
||||
if sls := low.get("__sls__"):
|
||||
self.sls_to_nodes.setdefault(sls, set()).add(node_id)
|
||||
if sls_included_from := low.get("__sls_included_from__"):
|
||||
for sls in sls_included_from:
|
||||
self.sls_to_nodes.setdefault(sls, set()).add(node_id)
|
||||
|
||||
def add_dependency(
|
||||
self, low: LowChunk, req_type: RequisiteType, req_key: str, req_val: str
|
||||
) -> bool:
|
||||
found = False
|
||||
prereq_tag = None
|
||||
has_prereq_node = low.get("__prereq__", False)
|
||||
if req_key == "sls":
|
||||
# Allow requisite tracking of entire sls files
|
||||
if self._is_fnmatch_pattern(req_val):
|
||||
found = True
|
||||
node_tag = _gen_tag(low)
|
||||
for sls, req_tags in self.sls_to_nodes.items():
|
||||
if fnmatch.fnmatch(sls, req_val):
|
||||
found = True
|
||||
self._add_reqs(node_tag, has_prereq_node, req_type, req_tags)
|
||||
else:
|
||||
node_tag = _gen_tag(low)
|
||||
if req_tags := self.sls_to_nodes.get(req_val, []):
|
||||
found = True
|
||||
self._add_reqs(node_tag, has_prereq_node, req_type, req_tags)
|
||||
elif self._is_fnmatch_pattern(req_val):
|
||||
# This iterates over every chunk to check
|
||||
# if any match instead of doing a look up since
|
||||
# it has to support wildcard matching.
|
||||
node_tag = _gen_tag(low)
|
||||
for (state_type, name_or_id), req_tags in self.nodes_lookup_map.items():
|
||||
if req_key == state_type and (fnmatch.fnmatch(name_or_id, req_val)):
|
||||
found = True
|
||||
self._add_reqs(node_tag, has_prereq_node, req_type, req_tags)
|
||||
elif req_tags := self.nodes_lookup_map.get((req_key, req_val)):
|
||||
found = True
|
||||
node_tag = _gen_tag(low)
|
||||
self._add_reqs(node_tag, has_prereq_node, req_type, req_tags)
|
||||
return found
|
||||
|
||||
def add_requisites(self, low: LowChunk, disabled_reqs: Sequence[str]) -> str | None:
|
||||
"""
|
||||
Add all the dependency requisites of the low chunk as edges to the DAG
|
||||
:return: an error string if there was an error otherwise None
|
||||
"""
|
||||
present = False
|
||||
for req_type in RequisiteType:
|
||||
if req_type.value in low:
|
||||
present = True
|
||||
break
|
||||
if not present:
|
||||
return None
|
||||
reqs = {
|
||||
rtype: []
|
||||
for rtype in (
|
||||
RequisiteType.REQUIRE,
|
||||
RequisiteType.REQUIRE_ANY,
|
||||
RequisiteType.WATCH,
|
||||
RequisiteType.WATCH_ANY,
|
||||
RequisiteType.PREREQ,
|
||||
RequisiteType.ONFAIL,
|
||||
RequisiteType.ONFAIL_ANY,
|
||||
RequisiteType.ONFAIL_ALL,
|
||||
RequisiteType.ONCHANGES,
|
||||
RequisiteType.ONCHANGES_ANY,
|
||||
)
|
||||
}
|
||||
for r_type in reqs:
|
||||
if low_reqs := low.get(r_type.value):
|
||||
if r_type in disabled_reqs:
|
||||
log.warning("The %s requisite has been disabled, Ignoring.", r_type)
|
||||
continue
|
||||
for req_ref in low_reqs:
|
||||
if isinstance(req_ref, str):
|
||||
req_ref = {"id": req_ref}
|
||||
req_ref = trim_req(req_ref)
|
||||
# req_key: match state module name
|
||||
# req_val: match state id or name
|
||||
req_key, req_val = next(iter(req_ref.items()))
|
||||
if req_val is None:
|
||||
continue
|
||||
if not isinstance(req_val, str):
|
||||
return (
|
||||
f"Requisite [{r_type}: {req_key}] in state"
|
||||
f" [{low['name']}] in SLS [{low.get('__sls__')}]"
|
||||
" must have a string as the value"
|
||||
)
|
||||
found = self.add_dependency(low, r_type, req_key, req_val)
|
||||
if not found:
|
||||
return (
|
||||
"Referenced state does not exist"
|
||||
f" for requisite [{r_type}: ({req_key}: {req_val})] in state"
|
||||
f" [{low['name']}] in SLS [{low.get('__sls__')}]"
|
||||
)
|
||||
return None
|
||||
|
||||
def aggregate_and_order_chunks(self, cap: int) -> list[LowChunk]:
|
||||
"""
|
||||
Aggregate eligible nodes in the dependencies graph.
|
||||
|
||||
Return a list of the chunks in the sorted order in which the
|
||||
chunks should be executed.
|
||||
Nodes are eligible for aggregation if the state function in the
|
||||
chunks match and aggregation is enabled in the configuration for
|
||||
the state function.
|
||||
:param cap: the maximum order value configured in the states
|
||||
:return: the ordered chunks
|
||||
"""
|
||||
dag: nx.MultiDiGraph = self.dag
|
||||
# dict for tracking topo order and for mapping each node that
|
||||
# was aggregated to the aggregated node that replaces it
|
||||
topo_order = {}
|
||||
|
||||
max_group_size = 500
|
||||
groups_by_type = defaultdict(list)
|
||||
|
||||
def _get_order(node):
|
||||
chunk = dag.nodes[node].get("chunk", {})
|
||||
chunk_label = "{0[state]}{0[name]}{0[fun]}".format(chunk) if chunk else ""
|
||||
chunk_order = self._get_chunk_order(cap, node)
|
||||
return (chunk_order, chunk_label)
|
||||
|
||||
# Iterate over the nodes in topological order to get the correct
|
||||
# ordering which takes requisites into account
|
||||
for node in nx.lexicographical_topological_sort(dag, key=_get_order):
|
||||
topo_order[node] = None
|
||||
data = dag.nodes[node]
|
||||
if not data.get("allow_aggregate"):
|
||||
continue
|
||||
|
||||
node_type = data["state"]
|
||||
added = False
|
||||
for idx, group in enumerate(groups_by_type[node_type]):
|
||||
if len(group) >= max_group_size:
|
||||
continue
|
||||
# Check if the node can be reached from any node in the group
|
||||
first_node = next(iter(group))
|
||||
agg_node = topo_order.get(first_node)
|
||||
# Since we are iterating in topological order we know
|
||||
# that there is no path from the current node to the
|
||||
# node in the group; so we only need to check the path
|
||||
# from the group node to the current node
|
||||
reachable = nx.has_path(dag, agg_node or first_node, node)
|
||||
if not reachable:
|
||||
# If not, add the node to the group
|
||||
if agg_node is None:
|
||||
# there is now more than one node for this
|
||||
# group so aggregate them
|
||||
agg_node = f"__aggregate_{node_type}_{idx}__"
|
||||
dag.add_node(
|
||||
agg_node, state=node_type, aggregated_nodes=group.keys()
|
||||
)
|
||||
# add the edges of the first node in the group to
|
||||
# the aggregate
|
||||
self._copy_edges(first_node, agg_node)
|
||||
dag.nodes[first_node]["aggregate"] = agg_node
|
||||
topo_order[first_node] = agg_node
|
||||
|
||||
self._copy_edges(node, agg_node)
|
||||
dag.nodes[node]["aggregate"] = agg_node
|
||||
topo_order[node] = agg_node
|
||||
group[node] = None
|
||||
added = True
|
||||
break
|
||||
|
||||
# If the node was not added to any set, create a new set
|
||||
if not added:
|
||||
# use a dict instead of set to retain insertion ordering
|
||||
groups_by_type[node_type].append({node: None})
|
||||
|
||||
ordered_chunks = [dag.nodes[node].get("chunk", {}) for node in topo_order]
|
||||
return ordered_chunks
|
||||
|
||||
def find_cycle_edges(self) -> list[tuple[LowChunk, RequisiteType, LowChunk]]:
|
||||
"""
|
||||
Find the cycles if the graph is not a Directed Acyclic Graph
|
||||
"""
|
||||
dag = self.dag
|
||||
try:
|
||||
cycle_edges = []
|
||||
for dependency, dependent, req_type in nx.find_cycle(dag):
|
||||
dependency_chunk = self.dag.nodes[dependency]["chunk"]
|
||||
dependent_chunk = self.dag.nodes[dependent]["chunk"]
|
||||
if (
|
||||
req_type not in dependent_chunk
|
||||
and req_type == RequisiteType.REQUIRE
|
||||
):
|
||||
# show the original prereq requisite for the require edges
|
||||
# added for the prereq
|
||||
req_type = RequisiteType.PREREQ
|
||||
cycle_edges.append((dependent_chunk, req_type, dependency_chunk))
|
||||
return cycle_edges
|
||||
except nx.NetworkXNoCycle:
|
||||
# If the graph is a DAG, return an empty list
|
||||
return []
|
||||
|
||||
def get_aggregate_chunks(self, low: LowChunk) -> list[LowChunk]:
|
||||
"""
|
||||
Get the chunks that were set to be valid for aggregation with
|
||||
this low chunk.
|
||||
"""
|
||||
low_tag = _gen_tag(low)
|
||||
if aggregate_node := self.dag.nodes[low_tag].get("aggregate"):
|
||||
return [
|
||||
self.dag.nodes[node]["chunk"]
|
||||
for node in self.dag.nodes[aggregate_node]["aggregated_nodes"]
|
||||
]
|
||||
return []
|
||||
|
||||
def get_cycles_str(self) -> str:
|
||||
cycle_edges = [
|
||||
f"({self._chunk_str(dependency)}, '{req_type.value}', {self._chunk_str(dependent)})"
|
||||
for dependency, req_type, dependent in self.find_cycle_edges()
|
||||
]
|
||||
return ", ".join(cycle_edges)
|
||||
|
||||
def get_dependencies(
|
||||
self, low: LowChunk
|
||||
) -> Generator[tuple[RequisiteType, LowChunk], None, None]:
|
||||
"""Get the requisite type and low chunk for each dependency of low"""
|
||||
low_tag = _gen_tag(low)
|
||||
if low.get("__prereq__"):
|
||||
# if the low chunk is set to run in test mode for a
|
||||
# prereq check then return the reqs for prereq test node.
|
||||
low_tag = self._get_prereq_node_tag(low_tag)
|
||||
for req_id, _, req_type in self.dag.in_edges(low_tag, keys=True):
|
||||
if chunk := self.dag.nodes[req_id].get("chunk"):
|
||||
yield req_type, chunk
|
||||
else:
|
||||
for node in self.dag.nodes[req_id]["aggregated_nodes"]:
|
||||
yield req_type, self.dag.nodes[node].get("chunk")
|
|
@ -23,6 +23,7 @@ import distro
|
|||
import jinja2
|
||||
import looseversion
|
||||
import msgpack
|
||||
import networkx
|
||||
import packaging
|
||||
import tornado
|
||||
import yaml
|
||||
|
@ -280,6 +281,7 @@ def get_tops_python(py_ver, exclude=None, ext_py_ver=None):
|
|||
"yaml",
|
||||
"tornado",
|
||||
"msgpack",
|
||||
"networkx",
|
||||
"certifi",
|
||||
"singledispatch",
|
||||
"concurrent",
|
||||
|
@ -330,7 +332,7 @@ def get_ext_tops(config):
|
|||
"""
|
||||
config = copy.deepcopy(config) or {}
|
||||
alternatives = {}
|
||||
required = ["jinja2", "yaml", "tornado", "msgpack"]
|
||||
required = ["jinja2", "yaml", "tornado", "msgpack", "networkx"]
|
||||
tops = []
|
||||
for ns, cfg in config.items():
|
||||
alternatives[ns] = cfg
|
||||
|
@ -429,6 +431,7 @@ def get_tops(extra_mods="", so_mods=""):
|
|||
yaml,
|
||||
tornado,
|
||||
msgpack,
|
||||
networkx,
|
||||
certifi,
|
||||
singledispatch,
|
||||
concurrent,
|
||||
|
@ -1035,6 +1038,7 @@ def gen_min(
|
|||
"salt/utils/process.py",
|
||||
"salt/utils/jinja.py",
|
||||
"salt/utils/rsax931.py",
|
||||
"salt/utils/requisite.py",
|
||||
"salt/utils/context.py",
|
||||
"salt/utils/minion.py",
|
||||
"salt/utils/error.py",
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
import textwrap
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def minion_config_overrides():
|
||||
return {
|
||||
"file_client": "local",
|
||||
"master_type": "disable",
|
||||
"state_aggregate": True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def nop_aggregate_mod(loaders, state_tree):
|
||||
mod_contents = textwrap.dedent(
|
||||
"""
|
||||
__virtualname__ = "aggr"
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def test(name, aggrs=None, **kwargs):
|
||||
return {
|
||||
"name": name,
|
||||
"result": True,
|
||||
"comment": "",
|
||||
"changes": {
|
||||
"aggrs": aggrs or [name]
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def mod_aggregate(low, chunks, running):
|
||||
# modeled after the pkg state module
|
||||
aggrs = []
|
||||
for chunk in chunks:
|
||||
tag = __utils__["state.gen_tag"](chunk)
|
||||
if tag in running:
|
||||
# Already ran
|
||||
continue
|
||||
if chunk.get("state") == "aggr":
|
||||
if "__agg__" in chunk:
|
||||
continue
|
||||
# Check for the same function
|
||||
if chunk.get("fun") != low.get("fun"):
|
||||
continue
|
||||
|
||||
if "aggrs" in chunk:
|
||||
aggrs.extend(chunk["aggrs"])
|
||||
chunk["__agg__"] = True
|
||||
elif "name" in chunk:
|
||||
aggrs.append(chunk["name"])
|
||||
chunk["__agg__"] = True
|
||||
if aggrs:
|
||||
if "aggrs" in low:
|
||||
low["aggrs"].extend(aggrs)
|
||||
else:
|
||||
low["aggrs"] = aggrs
|
||||
return low
|
||||
"""
|
||||
)
|
||||
with pytest.helpers.temp_file("aggrs.py", mod_contents, state_tree / "_states"):
|
||||
res = loaders.modules.saltutil.sync_all()
|
||||
assert "states" in res
|
||||
assert "states.aggrs" in res["states"]
|
||||
loaders.reload_all()
|
||||
assert hasattr(loaders.states, "aggr")
|
||||
yield
|
||||
loaders.modules.saltutil.sync_all()
|
||||
loaders.reload_all()
|
||||
|
||||
|
||||
def test_aggregate_requisites(state_tree, modules):
|
||||
"""Test to ensure that aggregated states honor requisites"""
|
||||
sls_name = "requisite_aggregate_test"
|
||||
sls_contents = """
|
||||
"packages 1":
|
||||
aggr.test:
|
||||
- aggrs:
|
||||
- hello
|
||||
"listen to packages 2":
|
||||
test.succeed_with_changes:
|
||||
- listen:
|
||||
- "packages 2"
|
||||
"packages 2":
|
||||
aggr:
|
||||
- test
|
||||
- aggrs:
|
||||
- cowsay
|
||||
- fortune-mod
|
||||
- require:
|
||||
- "requirement"
|
||||
"packages 3":
|
||||
aggr.test:
|
||||
- name: cowsay
|
||||
- require:
|
||||
- "test": "requirement"
|
||||
"requirement":
|
||||
test.nop:
|
||||
- name: "requirement_name"
|
||||
"""
|
||||
sls_tempfile = pytest.helpers.temp_file(f"{sls_name}.sls", sls_contents, state_tree)
|
||||
with sls_tempfile:
|
||||
# Apply the state file
|
||||
ret = modules.state.apply(sls_name)
|
||||
|
||||
# Check the results
|
||||
assert not ret.failed
|
||||
expected_order = [
|
||||
"aggr_|-packages 1_|-packages 1_|-test",
|
||||
"test_|-listen to packages 2_|-listen to packages 2_|-succeed_with_changes",
|
||||
"test_|-requirement_|-requirement_name_|-nop",
|
||||
"aggr_|-packages 2_|-packages 2_|-test",
|
||||
"aggr_|-packages 3_|-cowsay_|-test",
|
||||
"test_|-listener_listen to packages 2_|-listen to packages 2_|-mod_watch",
|
||||
]
|
||||
for index, state_run in enumerate(ret):
|
||||
assert state_run.result is True
|
||||
assert expected_order[index] in state_run.raw
|
|
@ -1,7 +1,5 @@
|
|||
import pytest
|
||||
|
||||
from . import normalize_ret
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.windows_whitelisted,
|
||||
pytest.mark.core_test,
|
||||
|
@ -93,13 +91,21 @@ def test_requisites_mixed_require_prereq_use_1(state, state_tree):
|
|||
- prereq:
|
||||
- cmd: B
|
||||
"""
|
||||
expected_result = [
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'})"
|
||||
]
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
result = normalize_ret(ret.raw)
|
||||
assert result == expected_simple_result
|
||||
assert ret.failed
|
||||
assert ret.errors == expected_result
|
||||
|
||||
|
||||
@pytest.mark.skip("Undetected infinite loops prevents this test from running...")
|
||||
def test_requisites_mixed_require_prereq_use_2(state, state_tree):
|
||||
sls_contents = """
|
||||
# Complex require/require_in/prereq/preqreq_in graph
|
||||
|
@ -153,47 +159,21 @@ def test_requisites_mixed_require_prereq_use_2(state, state_tree):
|
|||
- require_in:
|
||||
- cmd: A
|
||||
"""
|
||||
expected_result = {
|
||||
"cmd_|-A_|-echo A fifth_|-run": {
|
||||
"__run_num__": 4,
|
||||
"comment": 'Command "echo A fifth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-B_|-echo B third_|-run": {
|
||||
"__run_num__": 2,
|
||||
"comment": 'Command "echo B third" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-C_|-echo C second_|-run": {
|
||||
"__run_num__": 1,
|
||||
"comment": 'Command "echo C second" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-D_|-echo D first_|-run": {
|
||||
"__run_num__": 0,
|
||||
"comment": 'Command "echo D first" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-E_|-echo E fourth_|-run": {
|
||||
"__run_num__": 3,
|
||||
"comment": 'Command "echo E fourth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
}
|
||||
# undetected infinite loops prevents this test from running...
|
||||
# TODO: this is actually failing badly
|
||||
expected_result = [
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A fifth'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B third'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C second'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A fifth'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B third'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C second'})"
|
||||
]
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
result = normalize_ret(ret.raw)
|
||||
assert result == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == expected_result
|
||||
|
||||
|
||||
@pytest.mark.skip("Undetected infinite loops prevents this test from running...")
|
||||
def test_requisites_mixed_require_prereq_use_3(state, state_tree):
|
||||
# test Traceback recursion prereq+require #8785
|
||||
sls_contents = """
|
||||
|
@ -217,15 +197,19 @@ def test_requisites_mixed_require_prereq_use_3(state, state_tree):
|
|||
- prereq:
|
||||
- cmd: A
|
||||
"""
|
||||
expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"']
|
||||
# TODO: this is actually failing badly
|
||||
expected_result = [
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})"
|
||||
]
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert isinstance(ret, list) # Error
|
||||
assert ret == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == expected_result
|
||||
|
||||
|
||||
@pytest.mark.skip("Undetected infinite loops prevents this test from running...")
|
||||
def test_requisites_mixed_require_prereq_use_4(state, state_tree):
|
||||
# test Infinite recursion prereq+require #8785 v2
|
||||
sls_contents = """
|
||||
|
@ -260,15 +244,19 @@ def test_requisites_mixed_require_prereq_use_4(state, state_tree):
|
|||
- prereq:
|
||||
- cmd: B
|
||||
"""
|
||||
expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"']
|
||||
# TODO: this is actually failing badly
|
||||
expected_result = [
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'})"
|
||||
]
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert isinstance(ret, list) # Error
|
||||
assert ret == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == expected_result
|
||||
|
||||
|
||||
@pytest.mark.skip("Undetected infinite loops prevents this test from running...")
|
||||
def test_requisites_mixed_require_prereq_use_5(state, state_tree):
|
||||
# test Infinite recursion prereq+require #8785 v3
|
||||
sls_contents = """
|
||||
|
@ -300,12 +288,17 @@ def test_requisites_mixed_require_prereq_use_5(state, state_tree):
|
|||
- require_in:
|
||||
- cmd: A
|
||||
"""
|
||||
expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"']
|
||||
# TODO: this is actually failing badly, and expected result is maybe not a recursion
|
||||
expected_result = [
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', "
|
||||
"{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'})"
|
||||
]
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert isinstance(ret, list) # Error
|
||||
assert ret == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == expected_result
|
||||
|
||||
|
||||
def test_issue_46762_prereqs_on_a_state_with_unfulfilled_requirements(
|
||||
|
@ -451,4 +444,31 @@ def test_requisites_mixed_illegal_req(state_tree):
|
|||
"""
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state_mod.sls("requisite")
|
||||
assert ret == ["Illegal requisite \"['A']\", please check your syntax.\n"]
|
||||
assert ret == [
|
||||
'Illegal requisite "[\'A\']" in SLS "requisite", please check your syntax.\n'
|
||||
]
|
||||
|
||||
|
||||
def test_many_requisites(state, state_tree):
|
||||
"""Test to make sure that many requisites does not take too long"""
|
||||
|
||||
sls_name = "many_aggregates_test"
|
||||
sls_contents = """
|
||||
{%- for i in range(1000) %}
|
||||
nop-{{ i }}:
|
||||
test.nop:
|
||||
{%- if i > 0 %}
|
||||
- require:
|
||||
- test: nop-{{ i - 1 }}
|
||||
{%- else %}
|
||||
- require: []
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
"""
|
||||
with pytest.helpers.temp_file(f"{sls_name}.sls", sls_contents, state_tree):
|
||||
ret = state.sls(sls_name)
|
||||
# Check the results
|
||||
assert not ret.failed
|
||||
for index, state_run in enumerate(ret):
|
||||
expected_tag = f"test_|-nop-{index}_|-nop-{index}_|-nop"
|
||||
assert expected_tag in state_run.raw
|
||||
|
|
|
@ -301,12 +301,23 @@ def test_onchanges_any_recursive_error_issues_50811(state, state_tree):
|
|||
test that onchanges_any does not causes a recursive error
|
||||
"""
|
||||
sls_contents = """
|
||||
command-test:
|
||||
cmd.run:
|
||||
- name: ls
|
||||
unchanged_A:
|
||||
test.succeed_without_changes
|
||||
|
||||
unchanged_B:
|
||||
test.succeed_without_changes
|
||||
|
||||
prereq_on_test_on_changes_any:
|
||||
test.succeed_with_changes:
|
||||
- prereq:
|
||||
- test_on_changes_any
|
||||
|
||||
test_on_changes_any:
|
||||
test.succeed_without_changes:
|
||||
- onchanges_any:
|
||||
- file: /tmp/an-unfollowed-file
|
||||
- unchanged_A
|
||||
- unchanged_B
|
||||
"""
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret["command-test"].result is False
|
||||
assert ret["prereq_on_test_on_changes_any"].result is True
|
||||
|
|
|
@ -91,17 +91,13 @@ def test_requisites_prereq_simple_ordering_and_errors_1(state, state_tree):
|
|||
cmd.run:
|
||||
- name: echo C second
|
||||
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail
|
||||
I:
|
||||
cmd.run:
|
||||
test.fail_without_changes:
|
||||
- name: echo I
|
||||
- prereq:
|
||||
- cmd: Z
|
||||
J:
|
||||
cmd.run:
|
||||
test.fail_without_changes:
|
||||
- name: echo J
|
||||
- prereq:
|
||||
- foobar: A
|
||||
"""
|
||||
expected_result = {
|
||||
"cmd_|-A_|-echo A third_|-run": {
|
||||
|
@ -122,19 +118,15 @@ def test_requisites_prereq_simple_ordering_and_errors_1(state, state_tree):
|
|||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-I_|-echo I_|-run": {
|
||||
"test_|-I_|-echo I_|-fail_without_changes": {
|
||||
"__run_num__": 3,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " prereq:\n"
|
||||
+ " cmd: Z\n",
|
||||
"comment": "Failure!",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
"cmd_|-J_|-echo J_|-run": {
|
||||
"test_|-J_|-echo J_|-fail_without_changes": {
|
||||
"__run_num__": 4,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " prereq:\n"
|
||||
+ " foobar: A\n",
|
||||
"comment": "Failure!",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
|
@ -224,12 +216,10 @@ def test_requisites_prereq_simple_ordering_and_errors_3(state, state_tree):
|
|||
cmd.run:
|
||||
- name: echo C second
|
||||
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail
|
||||
I:
|
||||
cmd.run:
|
||||
test.fail_without_changes:
|
||||
- name: echo I
|
||||
- prereq:
|
||||
- Z
|
||||
"""
|
||||
expected_result = {
|
||||
"cmd_|-A_|-echo A third_|-run": {
|
||||
|
@ -250,11 +240,9 @@ def test_requisites_prereq_simple_ordering_and_errors_3(state, state_tree):
|
|||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-I_|-echo I_|-run": {
|
||||
"test_|-I_|-echo I_|-fail_without_changes": {
|
||||
"__run_num__": 3,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " prereq:\n"
|
||||
+ " id: Z\n",
|
||||
"comment": "Failure!",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
|
@ -270,7 +258,7 @@ def test_requisites_prereq_simple_ordering_and_errors_4(state, state_tree):
|
|||
"""
|
||||
Call sls file containing several prereq_in and prereq.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure that the order is right.
|
||||
"""
|
||||
sls_contents = """
|
||||
# Theory:
|
||||
|
@ -427,13 +415,14 @@ def test_requisites_prereq_simple_ordering_and_errors_6(state, state_tree):
|
|||
sls_contents = """
|
||||
# issue #8211
|
||||
# expected rank
|
||||
# B --+ 1
|
||||
# |
|
||||
# C <-+ ----+ 2/3
|
||||
# |
|
||||
# D ---+ | 3/2
|
||||
# | |
|
||||
# A <--+ <--+ 4
|
||||
#
|
||||
# D --+ -------+ 1
|
||||
# |
|
||||
# B --+ | 2
|
||||
# | |
|
||||
# C <-+ --+ | 3
|
||||
# | |
|
||||
# A <-----+ <--+ 4
|
||||
#
|
||||
# resulting rank
|
||||
# D --+
|
||||
|
@ -489,19 +478,19 @@ def test_requisites_prereq_simple_ordering_and_errors_6(state, state_tree):
|
|||
"changes": True,
|
||||
},
|
||||
"cmd_|-B_|-echo B first_|-run": {
|
||||
"__run_num__": 0,
|
||||
"__run_num__": 1,
|
||||
"comment": 'Command "echo B first" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-C_|-echo C second_|-run": {
|
||||
"__run_num__": 1,
|
||||
"__run_num__": 2,
|
||||
"comment": 'Command "echo C second" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-D_|-echo D third_|-run": {
|
||||
"__run_num__": 2,
|
||||
"__run_num__": 0,
|
||||
"comment": 'Command "echo D third" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
|
@ -522,7 +511,7 @@ def test_requisites_prereq_simple_ordering_and_errors_7(state, state_tree):
|
|||
"""
|
||||
sls_contents = """
|
||||
# will fail with 'Cannot extend ID Z (...) not part of the high state.'
|
||||
# and not "The following requisites were not found" like in yaml list syntax
|
||||
# and not "Referenced state does not exist for requisite" like in yaml list syntax
|
||||
I:
|
||||
cmd.run:
|
||||
- name: echo I
|
||||
|
@ -530,13 +519,14 @@ def test_requisites_prereq_simple_ordering_and_errors_7(state, state_tree):
|
|||
- cmd: Z
|
||||
"""
|
||||
errmsg = (
|
||||
"The following requisites were not found:\n"
|
||||
" prereq:\n"
|
||||
" cmd: Z\n"
|
||||
"Referenced state does not exist for requisite "
|
||||
"[prereq: (cmd: Z)] in state "
|
||||
"[echo I] in SLS [requisite]"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret["cmd_|-I_|-echo I_|-run"].comment == errmsg
|
||||
assert ret.failed
|
||||
assert ret.errors == [errmsg]
|
||||
|
||||
|
||||
def test_requisites_prereq_simple_ordering_and_errors_8(state, state_tree):
|
||||
|
@ -557,13 +547,14 @@ def test_requisites_prereq_simple_ordering_and_errors_8(state, state_tree):
|
|||
- foobar: A
|
||||
"""
|
||||
errmsg = (
|
||||
"The following requisites were not found:\n"
|
||||
" prereq:\n"
|
||||
" foobar: A\n"
|
||||
"Referenced state does not exist for requisite "
|
||||
"[prereq: (foobar: A)] in state "
|
||||
"[echo B] in SLS [requisite]"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret["cmd_|-B_|-echo B_|-run"].comment == errmsg
|
||||
assert ret.failed
|
||||
assert ret.errors == [errmsg]
|
||||
|
||||
|
||||
def test_requisites_prereq_simple_ordering_and_errors_9(state, state_tree):
|
||||
|
@ -584,21 +575,52 @@ def test_requisites_prereq_simple_ordering_and_errors_9(state, state_tree):
|
|||
- foobar: C
|
||||
"""
|
||||
errmsg = (
|
||||
"The following requisites were not found:\n"
|
||||
" prereq:\n"
|
||||
" foobar: C\n"
|
||||
"Referenced state does not exist for requisite "
|
||||
"[prereq: (foobar: C)] in state "
|
||||
"[echo B] in SLS [requisite]"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret["cmd_|-B_|-echo B_|-run"].comment == errmsg
|
||||
assert ret.failed
|
||||
assert ret.errors == [errmsg]
|
||||
|
||||
|
||||
@pytest.mark.skip("issue #8210 : prereq recursion undetected")
|
||||
def test_requisites_prereq_simple_ordering_and_errors_10(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several prereq_in and prereq.
|
||||
Call sls file containing several prereq.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure a recursive requisite error occurs.
|
||||
"""
|
||||
sls_contents = """
|
||||
A:
|
||||
cmd.run:
|
||||
- name: echo A
|
||||
- prereq:
|
||||
- cmd: B
|
||||
B:
|
||||
cmd.run:
|
||||
- name: echo B
|
||||
- prereq:
|
||||
- cmd: A
|
||||
"""
|
||||
errmsg = (
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, "
|
||||
"'prereq', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, "
|
||||
"'prereq', {'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret.failed
|
||||
assert ret.errors == [errmsg]
|
||||
|
||||
|
||||
def test_requisites_prereq_in_simple_ordering_and_errors(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several prereq_in.
|
||||
|
||||
Ensure a recursive requisite error occurs.
|
||||
"""
|
||||
sls_contents = """
|
||||
A:
|
||||
|
@ -613,8 +635,11 @@ def test_requisites_prereq_simple_ordering_and_errors_10(state, state_tree):
|
|||
- cmd: A
|
||||
"""
|
||||
errmsg = (
|
||||
'A recursive requisite was found, SLS "requisites.prereq_recursion_error" ID'
|
||||
' "B" ID "A"'
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, "
|
||||
"'prereq', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, "
|
||||
"'prereq', {'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
|
|
|
@ -61,9 +61,9 @@ def test_requisites_full_sls_require(state, state_tree):
|
|||
|
||||
def test_requisites_require_no_state_module(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several require_in and require.
|
||||
Call sls file containing several require_in and require with a missing req.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure an error is given.
|
||||
"""
|
||||
sls_contents = """
|
||||
# Complex require/require_in graph
|
||||
|
@ -111,135 +111,42 @@ def test_requisites_require_no_state_module(state, state_tree):
|
|||
- require_in:
|
||||
- A
|
||||
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail with "Referenced state does not exist for requisite"
|
||||
G:
|
||||
cmd.run:
|
||||
- name: echo G
|
||||
- require:
|
||||
- Z
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail with "Referenced state does not exist for requisite"
|
||||
H:
|
||||
cmd.run:
|
||||
- name: echo H
|
||||
- require:
|
||||
- Z
|
||||
"""
|
||||
expected_result = {
|
||||
"cmd_|-A_|-echo A fifth_|-run": {
|
||||
"__run_num__": 4,
|
||||
"comment": 'Command "echo A fifth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-B_|-echo B second_|-run": {
|
||||
"__run_num__": 1,
|
||||
"comment": 'Command "echo B second" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-C_|-echo C third_|-run": {
|
||||
"__run_num__": 2,
|
||||
"comment": 'Command "echo C third" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-D_|-echo D first_|-run": {
|
||||
"__run_num__": 0,
|
||||
"comment": 'Command "echo D first" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-E_|-echo E fourth_|-run": {
|
||||
"__run_num__": 3,
|
||||
"comment": 'Command "echo E fourth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-G_|-echo G_|-run": {
|
||||
"__run_num__": 5,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " require:\n"
|
||||
+ " id: Z\n",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
"cmd_|-H_|-echo H_|-run": {
|
||||
"__run_num__": 6,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " require:\n"
|
||||
+ " id: Z\n",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
}
|
||||
errmsgs = [
|
||||
(
|
||||
"Referenced state does not exist for requisite [require: (id: Z)]"
|
||||
" in state [echo G] in SLS [requisite]"
|
||||
),
|
||||
(
|
||||
"Referenced state does not exist for requisite [require: (id: Z)]"
|
||||
" in state [echo H] in SLS [requisite]"
|
||||
),
|
||||
]
|
||||
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
result = normalize_ret(ret.raw)
|
||||
assert result == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == errmsgs
|
||||
|
||||
|
||||
def test_requisites_require_ordering_and_errors_1(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several require_in and require.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure there are errors due to requisites.
|
||||
"""
|
||||
expected_result = {
|
||||
"cmd_|-A_|-echo A fifth_|-run": {
|
||||
"__run_num__": 4,
|
||||
"comment": 'Command "echo A fifth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-B_|-echo B second_|-run": {
|
||||
"__run_num__": 1,
|
||||
"comment": 'Command "echo B second" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-C_|-echo C third_|-run": {
|
||||
"__run_num__": 2,
|
||||
"comment": 'Command "echo C third" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-D_|-echo D first_|-run": {
|
||||
"__run_num__": 0,
|
||||
"comment": 'Command "echo D first" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-E_|-echo E fourth_|-run": {
|
||||
"__run_num__": 3,
|
||||
"comment": 'Command "echo E fourth" run',
|
||||
"result": True,
|
||||
"changes": True,
|
||||
},
|
||||
"cmd_|-F_|-echo F_|-run": {
|
||||
"__run_num__": 5,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " require:\n"
|
||||
+ " foobar: A\n",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
"cmd_|-G_|-echo G_|-run": {
|
||||
"__run_num__": 6,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " require:\n"
|
||||
+ " cmd: Z\n",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
"cmd_|-H_|-echo H_|-run": {
|
||||
"__run_num__": 7,
|
||||
"comment": "The following requisites were not found:\n"
|
||||
+ " require:\n"
|
||||
+ " cmd: Z\n",
|
||||
"result": False,
|
||||
"changes": False,
|
||||
},
|
||||
}
|
||||
sls_contents = """
|
||||
# Complex require/require_in graph
|
||||
#
|
||||
|
@ -286,29 +193,44 @@ def test_requisites_require_ordering_and_errors_1(state, state_tree):
|
|||
- require_in:
|
||||
- cmd: A
|
||||
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail with "Referenced state does not exist for requisite"
|
||||
F:
|
||||
cmd.run:
|
||||
- name: echo F
|
||||
- require:
|
||||
- foobar: A
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail with "Referenced state does not exist for requisite"
|
||||
G:
|
||||
cmd.run:
|
||||
- name: echo G
|
||||
- require:
|
||||
- cmd: Z
|
||||
# will fail with "The following requisites were not found"
|
||||
# will fail with "Referenced state does not exist for requisite"
|
||||
H:
|
||||
cmd.run:
|
||||
- name: echo H
|
||||
- require:
|
||||
- cmd: Z
|
||||
"""
|
||||
errmsgs = [
|
||||
(
|
||||
"Referenced state does not exist for requisite [require: (foobar: A)]"
|
||||
" in state [echo F] in SLS [requisite]"
|
||||
),
|
||||
(
|
||||
"Referenced state does not exist for requisite [require: (cmd: Z)]"
|
||||
" in state [echo G] in SLS [requisite]"
|
||||
),
|
||||
(
|
||||
"Referenced state does not exist for requisite [require: (cmd: Z)]"
|
||||
" in state [echo H] in SLS [requisite]"
|
||||
),
|
||||
]
|
||||
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
result = normalize_ret(ret.raw)
|
||||
assert result == expected_result
|
||||
assert ret.failed
|
||||
assert ret.errors == errmsgs
|
||||
|
||||
|
||||
def test_requisites_require_ordering_and_errors_2(state, state_tree):
|
||||
|
@ -425,11 +347,13 @@ def test_requisites_require_ordering_and_errors_5(state, state_tree):
|
|||
- require:
|
||||
- cmd: A
|
||||
"""
|
||||
# issue #8235
|
||||
# FIXME: Why is require enforcing list syntax while require_in does not?
|
||||
# And why preventing it?
|
||||
# Currently this state fails, should return C/B/A
|
||||
errmsg = 'A recursive requisite was found, SLS "requisite" ID "B" ID "A"'
|
||||
errmsg = (
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, "
|
||||
"'require', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), "
|
||||
"({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', "
|
||||
"{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})"
|
||||
)
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert ret.failed
|
||||
|
@ -438,9 +362,9 @@ def test_requisites_require_ordering_and_errors_5(state, state_tree):
|
|||
|
||||
def test_requisites_require_any(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several require_in and require.
|
||||
Call sls file containing require_any.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure that the order is right.
|
||||
"""
|
||||
sls_contents = """
|
||||
# Complex require/require_in graph
|
||||
|
@ -512,9 +436,9 @@ def test_requisites_require_any(state, state_tree):
|
|||
|
||||
def test_requisites_require_any_fail(state, state_tree):
|
||||
"""
|
||||
Call sls file containing several require_in and require.
|
||||
Call sls file containing require_any.
|
||||
|
||||
Ensure that some of them are failing and that the order is right.
|
||||
Ensure that the order is right.
|
||||
"""
|
||||
sls_contents = """
|
||||
# D should fail since both E & F fail
|
||||
|
|
|
@ -130,7 +130,11 @@ def test_catch_recurse(state, state_tree):
|
|||
ret = state.sls("recurse-fail")
|
||||
assert ret.failed
|
||||
assert (
|
||||
'A recursive requisite was found, SLS "recurse-fail" ID "/etc/mysql/my.cnf" ID "mysql"'
|
||||
"Recursive requisites were found: "
|
||||
"({'SLS': 'recurse-fail', 'ID': '/etc/mysql/my.cnf'}, "
|
||||
"'require', {'SLS': 'recurse-fail', 'ID': 'mysql'}), "
|
||||
"({'SLS': 'recurse-fail', 'ID': 'mysql'}, "
|
||||
"'require', {'SLS': 'recurse-fail', 'ID': '/etc/mysql/my.cnf'})"
|
||||
in ret.errors
|
||||
)
|
||||
|
||||
|
|
|
@ -367,11 +367,10 @@ def test_file_replace_prerequired_issues_55775(modules, state_tree, tmp_path):
|
|||
test changes:
|
||||
test.succeed_with_changes:
|
||||
- name: changes
|
||||
- require:
|
||||
- test: test no changes
|
||||
"""
|
||||
with pytest.helpers.temp_file("file-replace.sls", sls_contents, state_tree):
|
||||
ret = modules.state.sls("file-replace")
|
||||
assert not ret.failed
|
||||
for state_run in ret:
|
||||
assert state_run.result is True
|
||||
|
||||
|
|
|
@ -91,16 +91,16 @@ class MockState:
|
|||
Mock verify_high method
|
||||
"""
|
||||
if self.flag:
|
||||
return True
|
||||
return ["verify_high_error"]
|
||||
else:
|
||||
return -1
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def compile_high_data(data):
|
||||
"""
|
||||
Mock compile_high_data
|
||||
"""
|
||||
return [{"__id__": "ABC"}]
|
||||
return [{"__id__": "ABC"}], []
|
||||
|
||||
@staticmethod
|
||||
def call_chunk(data, data1, data2):
|
||||
|
@ -123,6 +123,9 @@ class MockState:
|
|||
"""
|
||||
return True
|
||||
|
||||
def order_chunks(self, data):
|
||||
return data, []
|
||||
|
||||
def requisite_in(self, data): # pylint: disable=unused-argument
|
||||
return data, []
|
||||
|
||||
|
@ -143,9 +146,9 @@ class MockState:
|
|||
Mock render_state method
|
||||
"""
|
||||
if self.flag:
|
||||
return {}, True
|
||||
return {}, ["render_state_error"]
|
||||
else:
|
||||
return {}, False
|
||||
return {}, []
|
||||
|
||||
@staticmethod
|
||||
def get_top():
|
||||
|
@ -210,9 +213,9 @@ class MockState:
|
|||
Mock render_highstate method
|
||||
"""
|
||||
if self.flag:
|
||||
return ["a", "b"], True
|
||||
return ["a", "b"], ["render_highstate_error"]
|
||||
else:
|
||||
return ["a", "b"], False
|
||||
return ["a", "b"], []
|
||||
|
||||
@staticmethod
|
||||
def call_highstate(
|
||||
|
@ -612,9 +615,13 @@ def test_sls_id():
|
|||
with patch.object(salt.utils.args, "test_mode", mock):
|
||||
MockState.State.flag = True
|
||||
MockState.HighState.flag = True
|
||||
assert state.sls_id("apache", "http") == 2
|
||||
assert state.sls_id("apache", "http") == [
|
||||
"render_highstate_error",
|
||||
"verify_high_error",
|
||||
]
|
||||
|
||||
MockState.State.flag = False
|
||||
MockState.HighState.flag = False
|
||||
assert state.sls_id("ABC", "http") == {"": "ABC"}
|
||||
pytest.raises(SaltInvocationError, state.sls_id, "DEF", "http")
|
||||
|
||||
|
@ -632,9 +639,13 @@ def test_show_low_sls():
|
|||
with patch.object(salt.utils.state, "get_sls_opts", mock):
|
||||
MockState.State.flag = True
|
||||
MockState.HighState.flag = True
|
||||
assert state.show_low_sls("foo") == 2
|
||||
assert state.show_low_sls("foo") == [
|
||||
"render_highstate_error",
|
||||
"verify_high_error",
|
||||
]
|
||||
|
||||
MockState.State.flag = False
|
||||
MockState.HighState.flag = False
|
||||
assert state.show_low_sls("foo") == [{"__id__": "ABC"}]
|
||||
|
||||
|
||||
|
@ -656,7 +667,7 @@ def test_show_sls():
|
|||
)
|
||||
|
||||
MockState.State.flag = True
|
||||
assert state.show_sls("foo") == 2
|
||||
assert state.show_sls("foo") == ["verify_high_error"]
|
||||
|
||||
MockState.State.flag = False
|
||||
assert state.show_sls("foo") == ["a", "b"]
|
||||
|
|
|
@ -289,6 +289,7 @@ def test_sls():
|
|||
) as _create_and_execute_salt_state:
|
||||
SSHHighState.return_value = SSHHighState
|
||||
SSHHighState.render_highstate.return_value = (None, [])
|
||||
SSHHighState.state.compile_high_data.return_value = ([], [])
|
||||
SSHHighState.state.reconcile_extend.return_value = (None, [])
|
||||
SSHHighState.state.requisite_in.return_value = (None, [])
|
||||
SSHHighState.state.verify_high.return_value = []
|
||||
|
|
|
@ -166,7 +166,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path):
|
|||
}
|
||||
},
|
||||
[
|
||||
"ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a int. It may need to be quoted"
|
||||
"ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type int. It may need to be quoted."
|
||||
],
|
||||
),
|
||||
(
|
||||
|
@ -177,7 +177,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path):
|
|||
}
|
||||
},
|
||||
[
|
||||
"ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bytes. It may need to be quoted"
|
||||
"ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type bytes. It may need to be quoted."
|
||||
],
|
||||
),
|
||||
(
|
||||
|
@ -188,7 +188,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path):
|
|||
}
|
||||
},
|
||||
[
|
||||
"ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bool. It may need to be quoted"
|
||||
"ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type bool. It may need to be quoted."
|
||||
],
|
||||
),
|
||||
(
|
||||
|
@ -463,7 +463,7 @@ def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp):
|
|||
),
|
||||
},
|
||||
[
|
||||
"Requisite declaration ('local', 'add_test_1') in SLS /srv/reactor/start.sls is not formed as a single key dictionary"
|
||||
"Requisite declaration ('local', 'add_test_1') in state add_test_2 in SLS /srv/reactor/start.sls is not formed as a single key dictionary"
|
||||
],
|
||||
),
|
||||
(
|
||||
|
@ -529,69 +529,8 @@ def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp):
|
|||
]
|
||||
),
|
||||
},
|
||||
["Illegal requisite \"['add_test_1']\", is SLS /srv/reactor/start.sls\n"],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"require",
|
||||
[OrderedDict([("local", "add_test_2")])],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[
|
||||
'A recursive requisite was found, SLS "/srv/reactor/start.sls" ID "add_test_2" ID "add_test_2"'
|
||||
'Illegal requisite "[\'add_test_1\']" in SLS "/srv/reactor/start.sls", please check your syntax.\n'
|
||||
],
|
||||
),
|
||||
(
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -54,7 +55,7 @@ def test_render_error_on_invalid_requisite(minion_opts):
|
|||
exception when a requisite cannot be resolved
|
||||
"""
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
high_data = {
|
||||
high_data: dict[str, Any] = {
|
||||
"git": salt.state.HashableOrderedDict(
|
||||
[
|
||||
(
|
||||
|
@ -88,12 +89,16 @@ def test_render_error_on_invalid_requisite(minion_opts):
|
|||
]
|
||||
)
|
||||
}
|
||||
expected_result = [
|
||||
"Requisite [require: file] in state [git] in SLS"
|
||||
" [issue_35226] must have a string as the value"
|
||||
]
|
||||
minion_opts["pillar"] = {
|
||||
"git": salt.state.HashableOrderedDict([("test1", "test")])
|
||||
}
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
with pytest.raises(salt.exceptions.SaltRenderError):
|
||||
state_obj.call_high(high_data)
|
||||
return_result = state_obj.call_high(high_data)
|
||||
assert expected_result == return_result
|
||||
|
||||
|
||||
def test_verify_onlyif_parse(minion_opts):
|
||||
|
@ -756,6 +761,7 @@ def test_render_requisite_require_disabled(minion_opts):
|
|||
minion_opts["disabled_requisites"] = ["require"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
ret = state_obj.call_high(high_data)
|
||||
assert isinstance(ret, dict)
|
||||
run_num = ret["test_|-step_one_|-step_one_|-succeed_with_changes"][
|
||||
"__run_num__"
|
||||
]
|
||||
|
@ -804,6 +810,7 @@ def test_render_requisite_require_in_disabled(minion_opts):
|
|||
minion_opts["disabled_requisites"] = ["require_in"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
ret = state_obj.call_high(high_data)
|
||||
assert isinstance(ret, dict)
|
||||
run_num = ret["test_|-step_one_|-step_one_|-succeed_with_changes"][
|
||||
"__run_num__"
|
||||
]
|
||||
|
@ -846,7 +853,7 @@ def test_call_chunk_sub_state_run(minion_opts):
|
|||
with patch("salt.state.State.call", return_value=mock_call_return):
|
||||
minion_opts["disabled_requisites"] = ["require"]
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
ret = state_obj.call_chunk(low_data, {}, {})
|
||||
ret = state_obj.call_chunk(low_data, {}, [])
|
||||
sub_state = ret.get(expected_sub_state_tag)
|
||||
assert sub_state
|
||||
assert sub_state["__run_num__"] == 1
|
||||
|
@ -855,128 +862,6 @@ def test_call_chunk_sub_state_run(minion_opts):
|
|||
assert sub_state["__sls__"] == "external"
|
||||
|
||||
|
||||
def test_aggregate_requisites(minion_opts):
|
||||
"""
|
||||
Test to ensure that the requisites are included in the aggregated low state.
|
||||
"""
|
||||
# The low that is returned from _mod_aggregrate
|
||||
low = {
|
||||
"state": "pkg",
|
||||
"name": "other_pkgs",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__id__": "other_pkgs",
|
||||
"pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"],
|
||||
"aggregate": True,
|
||||
"order": 10002,
|
||||
"fun": "installed",
|
||||
"__agg__": True,
|
||||
}
|
||||
|
||||
# Chunks that have been processed through the pkg mod_aggregate function
|
||||
chunks = [
|
||||
{
|
||||
"state": "file",
|
||||
"name": "/tmp/install-vim",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__id__": "/tmp/install-vim",
|
||||
"order": 10000,
|
||||
"fun": "managed",
|
||||
},
|
||||
{
|
||||
"state": "file",
|
||||
"name": "/tmp/install-tmux",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__id__": "/tmp/install-tmux",
|
||||
"order": 10001,
|
||||
"fun": "managed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "other_pkgs",
|
||||
"__sls__": "47628",
|
||||
"__env __": "base",
|
||||
"__id__": "other_pkgs",
|
||||
"pkgs": ["byobu"],
|
||||
"aggregate": True,
|
||||
"order": 10002,
|
||||
"fun": "installed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "bc",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__id__": "bc",
|
||||
"hold": True,
|
||||
"__agg__": True,
|
||||
"order": 10003,
|
||||
"fun": "installed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "vim",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__agg__": True,
|
||||
"__id__": "vim",
|
||||
"require": ["/tmp/install-vim"],
|
||||
"order": 10004,
|
||||
"fun": "installed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "tmux",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__agg__": True,
|
||||
"__id__": "tmux",
|
||||
"require": ["/tmp/install-tmux"],
|
||||
"order": 10005,
|
||||
"fun": "installed",
|
||||
},
|
||||
{
|
||||
"state": "pkgrepo",
|
||||
"name": "deb https://packages.cloud.google.com/apt cloud-sdk main",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__id__": "google-cloud-repo",
|
||||
"humanname": "Google Cloud SDK",
|
||||
"file": "/etc/apt/sources.list.d/google-cloud-sdk.list",
|
||||
"key_url": "https://packages.cloud.google.com/apt/doc/apt-key.gpg",
|
||||
"order": 10006,
|
||||
"fun": "managed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "google-cloud-sdk",
|
||||
"__sls__": "47628",
|
||||
"__env__": "base",
|
||||
"__agg__": True,
|
||||
"__id__": "google-cloud-sdk",
|
||||
"require": ["google-cloud-repo"],
|
||||
"order": 10007,
|
||||
"fun": "installed",
|
||||
},
|
||||
]
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
low_ret = state_obj._aggregate_requisites(low, chunks)
|
||||
|
||||
# Ensure the low returned contains require
|
||||
assert "require" in low_ret
|
||||
|
||||
# Ensure all the requires from pkg states are in low
|
||||
assert low_ret["require"] == [
|
||||
"/tmp/install-vim",
|
||||
"/tmp/install-tmux",
|
||||
"google-cloud-repo",
|
||||
]
|
||||
|
||||
|
||||
def test_mod_aggregate(minion_opts):
|
||||
"""
|
||||
Test to ensure that the requisites are included in the aggregated low state.
|
||||
|
@ -1030,6 +915,16 @@ def test_mod_aggregate(minion_opts):
|
|||
"aggregate": True,
|
||||
"fun": "installed",
|
||||
},
|
||||
{
|
||||
"state": "pkg",
|
||||
"name": "hello",
|
||||
"__sls__": "test.62439",
|
||||
"__env__": "base",
|
||||
"__id__": "hello",
|
||||
"order": 10003,
|
||||
"aggregate": True,
|
||||
"fun": "installed",
|
||||
},
|
||||
]
|
||||
|
||||
running = {}
|
||||
|
@ -1044,7 +939,7 @@ def test_mod_aggregate(minion_opts):
|
|||
"order": 10002,
|
||||
"fun": "installed",
|
||||
"__agg__": True,
|
||||
"pkgs": ["figlet", "sl"],
|
||||
"pkgs": ["sl", "hello"],
|
||||
}
|
||||
|
||||
with patch("salt.state.State._gather_pillar"):
|
||||
|
@ -1053,7 +948,8 @@ def test_mod_aggregate(minion_opts):
|
|||
state_obj.states,
|
||||
{"pkg.mod_aggregate": MagicMock(return_value=mock_pkg_mod_aggregate)},
|
||||
):
|
||||
low_ret = state_obj._mod_aggregate(low, running, chunks)
|
||||
state_obj.order_chunks(chunks)
|
||||
low_ret = state_obj._mod_aggregate(low, running)
|
||||
|
||||
# Ensure the low returned contains require
|
||||
assert "require_in" in low_ret
|
||||
|
@ -1068,7 +964,7 @@ def test_mod_aggregate(minion_opts):
|
|||
assert "require" in low_ret
|
||||
|
||||
# Ensure pkgs were aggregated
|
||||
assert low_ret["pkgs"] == ["figlet", "sl"]
|
||||
assert low_ret["pkgs"] == ["sl", "hello"]
|
||||
|
||||
|
||||
def test_mod_aggregate_order(minion_opts):
|
||||
|
|
|
@ -545,7 +545,7 @@ def test_mod_aggregate():
|
|||
}
|
||||
|
||||
expected = {
|
||||
"pkgs": ["byobu", "byobu", "vim", "tmux", "google-cloud-sdk"],
|
||||
"pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"],
|
||||
"name": "other_pkgs",
|
||||
"fun": "installed",
|
||||
"aggregate": True,
|
||||
|
|
485
tests/pytests/unit/utils/requisite/test_dependency_graph.py
Normal file
485
tests/pytests/unit/utils/requisite/test_dependency_graph.py
Normal file
|
@ -0,0 +1,485 @@
|
|||
"""
|
||||
Test functions in state.py that are not a part of a class
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.requisite
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.core_test,
|
||||
]
|
||||
|
||||
|
||||
def test_ordering():
|
||||
"""
|
||||
Testing that ordering chunks results in the expected order honoring
|
||||
requistes and order
|
||||
"""
|
||||
sls = "test"
|
||||
env = "base"
|
||||
chunks = [
|
||||
{
|
||||
"__id__": "success-6",
|
||||
"name": "success-6",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "fail-0",
|
||||
"name": "fail-0",
|
||||
"state": "test",
|
||||
"fun": "fail_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "fail-1",
|
||||
"name": "fail-1",
|
||||
"state": "test",
|
||||
"fun": "fail_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "req-fails",
|
||||
"name": "req-fails",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"require": ["fail-0", "fail-1"],
|
||||
},
|
||||
{
|
||||
"__id__": "success-4",
|
||||
"name": "success-4",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"order": 4,
|
||||
},
|
||||
{
|
||||
"__id__": "success-1",
|
||||
"name": "success-1",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
"order": 1,
|
||||
},
|
||||
{
|
||||
"__id__": "success-2",
|
||||
"name": "success-2",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
"order": 2,
|
||||
},
|
||||
{
|
||||
"__id__": "success-d",
|
||||
"name": "success-d",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "success-c",
|
||||
"name": "success-c",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "success-b",
|
||||
"name": "success-b",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "success-a",
|
||||
"name": "success-a",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
},
|
||||
{
|
||||
"__id__": "success-3",
|
||||
"name": "success-3",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
"order": 3,
|
||||
"require": [{"test": "success-a"}],
|
||||
"watch": [{"test": "success-c"}],
|
||||
"onchanges": [{"test": "success-b"}],
|
||||
"listen": [{"test": "success-d"}],
|
||||
},
|
||||
{
|
||||
"__id__": "success-5",
|
||||
"name": "success-5",
|
||||
"state": "test",
|
||||
"fun": "succeed_without_changes",
|
||||
"listen": [{"test": "success-6"}],
|
||||
},
|
||||
]
|
||||
depend_graph = salt.utils.requisite.DependencyGraph()
|
||||
for low in chunks:
|
||||
low.update(
|
||||
{
|
||||
"__env__": env,
|
||||
"__sls__": sls,
|
||||
}
|
||||
)
|
||||
depend_graph.add_chunk(low, allow_aggregate=False)
|
||||
for low in chunks:
|
||||
depend_graph.add_requisites(low, [])
|
||||
ordered_chunk_ids = [
|
||||
chunk["__id__"] for chunk in depend_graph.aggregate_and_order_chunks(100)
|
||||
]
|
||||
expected_order = [
|
||||
"success-1",
|
||||
"success-2",
|
||||
"success-a",
|
||||
"success-b",
|
||||
"success-c",
|
||||
"success-3",
|
||||
"success-4",
|
||||
"fail-0",
|
||||
"fail-1",
|
||||
"req-fails",
|
||||
"success-5",
|
||||
"success-6",
|
||||
"success-d",
|
||||
]
|
||||
assert expected_order == ordered_chunk_ids
|
||||
|
||||
|
||||
def test_find_cycle_edges():
|
||||
sls = "test"
|
||||
env = "base"
|
||||
chunks = [
|
||||
{
|
||||
"__id__": "state-1",
|
||||
"name": "state-1",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"require": [{"test": "state-2"}],
|
||||
},
|
||||
{
|
||||
"__id__": "state-2",
|
||||
"name": "state-2",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"require": [{"test": "state-3"}],
|
||||
},
|
||||
{
|
||||
"__id__": "state-3",
|
||||
"name": "state-3",
|
||||
"state": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"require": [{"test": "state-1"}],
|
||||
},
|
||||
]
|
||||
depend_graph = salt.utils.requisite.DependencyGraph()
|
||||
for low in chunks:
|
||||
low.update(
|
||||
{
|
||||
"__env__": env,
|
||||
"__sls__": sls,
|
||||
}
|
||||
)
|
||||
depend_graph.add_chunk(low, allow_aggregate=False)
|
||||
for low in chunks:
|
||||
depend_graph.add_requisites(low, [])
|
||||
expected_cycle_edges = [
|
||||
(
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-3",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-3",
|
||||
"require": [{"test": "state-1"}],
|
||||
"state": "test",
|
||||
},
|
||||
"require",
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-1",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-1",
|
||||
"require": [{"test": "state-2"}],
|
||||
"state": "test",
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-2",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-2",
|
||||
"require": [{"test": "state-3"}],
|
||||
"state": "test",
|
||||
},
|
||||
"require",
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-3",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-3",
|
||||
"require": [{"test": "state-1"}],
|
||||
"state": "test",
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-1",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-1",
|
||||
"require": [{"test": "state-2"}],
|
||||
"state": "test",
|
||||
},
|
||||
"require",
|
||||
{
|
||||
"__env__": "base",
|
||||
"__id__": "state-2",
|
||||
"__sls__": "test",
|
||||
"fun": "succeed_with_changes",
|
||||
"name": "state-2",
|
||||
"require": [{"test": "state-3"}],
|
||||
"state": "test",
|
||||
},
|
||||
),
|
||||
]
|
||||
cycle_edges = depend_graph.find_cycle_edges()
|
||||
assert expected_cycle_edges == cycle_edges
|
||||
|
||||
|
||||
def test_get_aggregate_chunks():
|
||||
sls = "test"
|
||||
env = "base"
|
||||
chunks = [
|
||||
{
|
||||
"__id__": "packages-1",
|
||||
"name": "packages-1",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["hello"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-2",
|
||||
"name": "packages-2",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay", "fortune-mod"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-3",
|
||||
"name": "packages-3",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["figlet"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "requirement",
|
||||
"name": "requirement",
|
||||
"state": "test",
|
||||
"fun": "nop",
|
||||
},
|
||||
{
|
||||
"__id__": "packages-4",
|
||||
"name": "packages-4",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-5",
|
||||
"name": "packages-5",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["sl"],
|
||||
"require": ["packages-4"],
|
||||
},
|
||||
]
|
||||
depend_graph = salt.utils.requisite.DependencyGraph()
|
||||
for low in chunks:
|
||||
low.update(
|
||||
{
|
||||
"__env__": env,
|
||||
"__sls__": sls,
|
||||
}
|
||||
)
|
||||
depend_graph.add_chunk(low, allow_aggregate=True)
|
||||
for low in chunks:
|
||||
depend_graph.add_requisites(low, [])
|
||||
depend_graph.aggregate_and_order_chunks(100)
|
||||
expected_aggregates = [
|
||||
(chunks[0], ["packages-1", "packages-4", "packages-2", "packages-3"]),
|
||||
(chunks[1], ["packages-1", "packages-4", "packages-2", "packages-3"]),
|
||||
(chunks[2], ["packages-1", "packages-4", "packages-2", "packages-3"]),
|
||||
(chunks[3], []),
|
||||
(chunks[4], ["packages-1", "packages-4", "packages-2", "packages-3"]),
|
||||
(chunks[5], []),
|
||||
]
|
||||
for low, expected_aggregate_ids in expected_aggregates:
|
||||
aggregated_ids = [
|
||||
chunk["__id__"] for chunk in depend_graph.get_aggregate_chunks(low)
|
||||
]
|
||||
assert expected_aggregate_ids == aggregated_ids
|
||||
|
||||
|
||||
def test_get_dependencies():
|
||||
sls = "test"
|
||||
env = "base"
|
||||
chunks = [
|
||||
{
|
||||
"__id__": "packages-1",
|
||||
"name": "packages-1",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["hello"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-2",
|
||||
"name": "packages-2",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay", "fortune-mod"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-3",
|
||||
"name": "packages-3",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["figlet"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "requirement",
|
||||
"name": "requirement",
|
||||
"state": "test",
|
||||
"fun": "nop",
|
||||
},
|
||||
{
|
||||
"__id__": "packages-4",
|
||||
"name": "packages-4",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-5",
|
||||
"name": "packages-5",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["sl"],
|
||||
"require": ["packages-4"],
|
||||
},
|
||||
]
|
||||
depend_graph = salt.utils.requisite.DependencyGraph()
|
||||
for low in chunks:
|
||||
low.update(
|
||||
{
|
||||
"__env__": env,
|
||||
"__sls__": sls,
|
||||
}
|
||||
)
|
||||
depend_graph.add_chunk(low, allow_aggregate=False)
|
||||
for low in chunks:
|
||||
depend_graph.add_requisites(low, [])
|
||||
depend_graph.aggregate_and_order_chunks(100)
|
||||
expected_aggregates = [
|
||||
(chunks[0], []),
|
||||
(chunks[1], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]),
|
||||
(chunks[2], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]),
|
||||
(chunks[3], []),
|
||||
(chunks[4], []),
|
||||
(chunks[5], [(salt.utils.requisite.RequisiteType.REQUIRE, "packages-4")]),
|
||||
]
|
||||
for low, expected_dependency_tuples in expected_aggregates:
|
||||
depend_tuples = [
|
||||
(req_type, chunk["__id__"])
|
||||
for (req_type, chunk) in depend_graph.get_dependencies(low)
|
||||
]
|
||||
assert expected_dependency_tuples == depend_tuples
|
||||
|
||||
|
||||
def test_get_dependencies_when_aggregated():
|
||||
sls = "test"
|
||||
env = "base"
|
||||
chunks = [
|
||||
{
|
||||
"__id__": "packages-1",
|
||||
"name": "packages-1",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["hello"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-2",
|
||||
"name": "packages-2",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay", "fortune-mod"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-3",
|
||||
"name": "packages-3",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["figlet"],
|
||||
"require": ["requirement"],
|
||||
},
|
||||
{
|
||||
"__id__": "requirement",
|
||||
"name": "requirement",
|
||||
"state": "test",
|
||||
"fun": "nop",
|
||||
},
|
||||
{
|
||||
"__id__": "packages-4",
|
||||
"name": "packages-4",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["cowsay"],
|
||||
},
|
||||
{
|
||||
"__id__": "packages-5",
|
||||
"name": "packages-5",
|
||||
"state": "pkg",
|
||||
"fun": "installed",
|
||||
"pkgs": ["sl"],
|
||||
"require": ["packages-4"],
|
||||
},
|
||||
]
|
||||
depend_graph = salt.utils.requisite.DependencyGraph()
|
||||
for low in chunks:
|
||||
low.update(
|
||||
{
|
||||
"__env__": env,
|
||||
"__sls__": sls,
|
||||
}
|
||||
)
|
||||
depend_graph.add_chunk(low, allow_aggregate=True)
|
||||
for low in chunks:
|
||||
depend_graph.add_requisites(low, [])
|
||||
depend_graph.aggregate_and_order_chunks(100)
|
||||
expected_aggregates = [
|
||||
(chunks[0], []),
|
||||
(chunks[1], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]),
|
||||
(chunks[2], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]),
|
||||
(chunks[3], []),
|
||||
(chunks[4], []),
|
||||
(
|
||||
chunks[5],
|
||||
[
|
||||
(salt.utils.requisite.RequisiteType.REQUIRE, "packages-4"),
|
||||
(salt.utils.requisite.RequisiteType.REQUIRE, "packages-1"),
|
||||
(salt.utils.requisite.RequisiteType.REQUIRE, "packages-4"),
|
||||
(salt.utils.requisite.RequisiteType.REQUIRE, "packages-2"),
|
||||
(salt.utils.requisite.RequisiteType.REQUIRE, "packages-3"),
|
||||
],
|
||||
),
|
||||
]
|
||||
for low, expected_dependency_tuples in expected_aggregates:
|
||||
depend_tuples = [
|
||||
(req_type, chunk["__id__"])
|
||||
for (req_type, chunk) in depend_graph.get_dependencies(low)
|
||||
]
|
||||
assert expected_dependency_tuples == depend_tuples
|
|
@ -165,7 +165,8 @@ def test_query_error_handling():
|
|||
ret = http.query("http://127.0.0.1:0")
|
||||
assert isinstance(ret, dict)
|
||||
assert isinstance(ret.get("error", None), str)
|
||||
ret = http.query("http://myfoobardomainthatnotexist")
|
||||
# use RFC6761 invalid domain that does not exist
|
||||
ret = http.query("http://myfoobardomainthatnotexist.invalid")
|
||||
assert isinstance(ret, dict)
|
||||
assert isinstance(ret.get("error", None), str)
|
||||
|
||||
|
|
|
@ -58,7 +58,6 @@ def test_get_ext_tops(version):
|
|||
python3 = False
|
||||
if tuple(version) >= (3, 0):
|
||||
python3 = True
|
||||
|
||||
cfg = {
|
||||
"namespace": {
|
||||
"path": "/foo",
|
||||
|
@ -68,6 +67,7 @@ def test_get_ext_tops(version):
|
|||
"yaml": "/yaml/",
|
||||
"tornado": "/tornado/tornado.py",
|
||||
"msgpack": "msgpack.py",
|
||||
"networkx": "/networkx/networkx.py",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,6 +39,15 @@ def patch_if(condition, *args, **kwargs):
|
|||
return inner
|
||||
|
||||
|
||||
class FakeSaltSystemExit(Exception):
|
||||
"""
|
||||
Fake SaltSystemExit so the process does not actually die
|
||||
"""
|
||||
|
||||
def __init__(self, code=-1, msg=None):
|
||||
super().__init__(msg or code)
|
||||
|
||||
|
||||
class SSHThinTestCase(TestCase):
|
||||
"""
|
||||
TestCase for SaltSSH-related parts.
|
||||
|
@ -69,6 +78,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml": os.path.join(lib_root, "yaml"),
|
||||
"tornado": os.path.join(lib_root, "tornado"),
|
||||
"msgpack": os.path.join(lib_root, "msgpack"),
|
||||
"networkx": os.path.join(lib_root, "networkx"),
|
||||
}
|
||||
|
||||
code_dir = pathlib.Path(RUNTIME_VARS.CODE_DIR).resolve()
|
||||
|
@ -78,6 +88,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml": str(code_dir / "yaml"),
|
||||
"tornado": str(code_dir / "tornado"),
|
||||
"msgpack": str(code_dir / "msgpack"),
|
||||
"networkx": str(code_dir / "networkx"),
|
||||
"certifi": str(code_dir / "certifi"),
|
||||
"singledispatch": str(code_dir / "singledispatch.py"),
|
||||
"looseversion": str(code_dir / "looseversion.py"),
|
||||
|
@ -164,7 +175,7 @@ class SSHThinTestCase(TestCase):
|
|||
self.assertIn("Missing dependencies", thin.log.error.call_args[0][0])
|
||||
self.assertIn("jinja2, yaml, tornado, msgpack", thin.log.error.call_args[0][0])
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False))
|
||||
def test_get_ext_tops_cfg_missing_interpreter(self):
|
||||
|
@ -178,7 +189,7 @@ class SSHThinTestCase(TestCase):
|
|||
thin.get_ext_tops(cfg)
|
||||
self.assertIn("missing specific locked Python version", str(err.value))
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False))
|
||||
def test_get_ext_tops_cfg_wrong_interpreter(self):
|
||||
|
@ -196,7 +207,7 @@ class SSHThinTestCase(TestCase):
|
|||
str(err.value),
|
||||
)
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False))
|
||||
def test_get_ext_tops_cfg_interpreter(self):
|
||||
|
@ -271,7 +282,7 @@ class SSHThinTestCase(TestCase):
|
|||
"configured with not a file or does not exist", messages["jinja2"]
|
||||
)
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=True))
|
||||
def test_get_ext_tops_config_pass(self):
|
||||
|
@ -289,6 +300,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml": "/yaml/",
|
||||
"tornado": "/tornado/tornado.py",
|
||||
"msgpack": "msgpack.py",
|
||||
"networkx": "/networkx/networkx.py",
|
||||
"distro": "distro.py",
|
||||
},
|
||||
}
|
||||
|
@ -302,6 +314,7 @@ class SSHThinTestCase(TestCase):
|
|||
"/jinja/foo.py",
|
||||
"/yaml/",
|
||||
"msgpack.py",
|
||||
"/networkx/networkx.py",
|
||||
"distro.py",
|
||||
]
|
||||
)
|
||||
|
@ -407,6 +420,10 @@ class SSHThinTestCase(TestCase):
|
|||
"salt.utils.thin.msgpack",
|
||||
type("msgpack", (), {"__file__": "/site-packages/msgpack"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.networkx",
|
||||
type("networkx", (), {"__file__": "/site-packages/networkx"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.certifi",
|
||||
type("certifi", (), {"__file__": "/site-packages/certifi"}),
|
||||
|
@ -465,6 +482,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml",
|
||||
"tornado",
|
||||
"msgpack",
|
||||
"networkx",
|
||||
"certifi",
|
||||
"sdp",
|
||||
"sdp_hlp",
|
||||
|
@ -512,6 +530,10 @@ class SSHThinTestCase(TestCase):
|
|||
"salt.utils.thin.msgpack",
|
||||
type("msgpack", (), {"__file__": "/site-packages/msgpack"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.networkx",
|
||||
type("networkx", (), {"__file__": "/site-packages/networkx"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.certifi",
|
||||
type("certifi", (), {"__file__": "/site-packages/certifi"}),
|
||||
|
@ -570,6 +592,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml",
|
||||
"tornado",
|
||||
"msgpack",
|
||||
"networkx",
|
||||
"certifi",
|
||||
"sdp",
|
||||
"sdp_hlp",
|
||||
|
@ -627,6 +650,10 @@ class SSHThinTestCase(TestCase):
|
|||
"salt.utils.thin.msgpack",
|
||||
type("msgpack", (), {"__file__": "/site-packages/msgpack"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.networkx",
|
||||
type("networkx", (), {"__file__": "/site-packages/networkx"}),
|
||||
)
|
||||
@patch(
|
||||
"salt.utils.thin.certifi",
|
||||
type("certifi", (), {"__file__": "/site-packages/certifi"}),
|
||||
|
@ -685,6 +712,7 @@ class SSHThinTestCase(TestCase):
|
|||
"yaml",
|
||||
"tornado",
|
||||
"msgpack",
|
||||
"networkx",
|
||||
"certifi",
|
||||
"sdp",
|
||||
"sdp_hlp",
|
||||
|
@ -754,7 +782,7 @@ class SSHThinTestCase(TestCase):
|
|||
assert form == "sha256"
|
||||
|
||||
@patch("salt.utils.thin.sys.version_info", (2, 5))
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
def test_gen_thin_fails_ancient_python_version(self):
|
||||
"""
|
||||
Test thin.gen_thin function raises an exception
|
||||
|
@ -770,7 +798,7 @@ class SSHThinTestCase(TestCase):
|
|||
str(err.value),
|
||||
)
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.makedirs", MagicMock())
|
||||
@patch("salt.utils.files.fopen", MagicMock())
|
||||
|
@ -826,7 +854,7 @@ class SSHThinTestCase(TestCase):
|
|||
thin.zipfile.ZipFile.assert_not_called()
|
||||
thin.tarfile.open.assert_called()
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.makedirs", MagicMock())
|
||||
@patch("salt.utils.files.fopen", MagicMock())
|
||||
|
@ -880,7 +908,7 @@ class SSHThinTestCase(TestCase):
|
|||
self.assertEqual(name, fname)
|
||||
thin.tarfile.open().close.assert_called()
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.makedirs", MagicMock())
|
||||
@patch("salt.utils.files.fopen", MagicMock())
|
||||
|
@ -948,7 +976,7 @@ class SSHThinTestCase(TestCase):
|
|||
files.pop(files.index(arcname))
|
||||
self.assertFalse(files)
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.makedirs", MagicMock())
|
||||
@patch("salt.utils.files.fopen", MagicMock())
|
||||
|
@ -1076,7 +1104,7 @@ class SSHThinTestCase(TestCase):
|
|||
for t_line in ["second-system-effect:2:7", "solar-interference:2:6"]:
|
||||
self.assertIn(t_line, out)
|
||||
|
||||
@patch("salt.exceptions.SaltSystemExit", Exception)
|
||||
@patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit)
|
||||
@patch("salt.utils.thin.log", MagicMock())
|
||||
@patch("salt.utils.thin.os.makedirs", MagicMock())
|
||||
@patch("salt.utils.files.fopen", MagicMock())
|
||||
|
@ -1148,6 +1176,7 @@ class SSHThinTestCase(TestCase):
|
|||
(bts("yaml/__init__.py"), bts("")),
|
||||
(bts("tornado/__init__.py"), bts("")),
|
||||
(bts("msgpack/__init__.py"), bts("")),
|
||||
(bts("networkx/__init__.py"), bts("")),
|
||||
(bts("certifi/__init__.py"), bts("")),
|
||||
(bts("singledispatch.py"), bts("")),
|
||||
(bts(""), bts("")),
|
||||
|
@ -1190,6 +1219,7 @@ class SSHThinTestCase(TestCase):
|
|||
side_effect=[
|
||||
(bts("tornado/__init__.py"), bts("")),
|
||||
(bts("msgpack/__init__.py"), bts("")),
|
||||
(bts("networkx/__init__.py"), bts("")),
|
||||
(bts("certifi/__init__.py"), bts("")),
|
||||
(bts("singledispatch.py"), bts("")),
|
||||
(bts(""), bts("")),
|
||||
|
@ -1235,6 +1265,7 @@ class SSHThinTestCase(TestCase):
|
|||
(bts(self.fake_libs["yaml"]), bts("")),
|
||||
(bts(self.fake_libs["tornado"]), bts("")),
|
||||
(bts(self.fake_libs["msgpack"]), bts("")),
|
||||
(bts(self.fake_libs["networkx"]), bts("")),
|
||||
(bts(""), bts("")),
|
||||
(bts(""), bts("")),
|
||||
(bts(""), bts("")),
|
||||
|
@ -1263,6 +1294,7 @@ class SSHThinTestCase(TestCase):
|
|||
os.path.join("yaml", "__init__.py"),
|
||||
os.path.join("tornado", "__init__.py"),
|
||||
os.path.join("msgpack", "__init__.py"),
|
||||
os.path.join("networkx", "__init__.py"),
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -1363,6 +1395,7 @@ class SSHThinTestCase(TestCase):
|
|||
os.path.join("yaml", "__init__.py"),
|
||||
os.path.join("tornado", "__init__.py"),
|
||||
os.path.join("msgpack", "__init__.py"),
|
||||
os.path.join("networkx", "__init__.py"),
|
||||
]
|
||||
)
|
||||
with patch_tops_py:
|
||||
|
|
Loading…
Add table
Reference in a new issue