From 66caa583465c03e3c10c7b6618c88456f1311531 Mon Sep 17 00:00:00 2001 From: bdrx312 Date: Sat, 20 Apr 2024 13:59:16 -0400 Subject: [PATCH] Fix issues with requisites and aggregate add install of networkx fix aggregate to properly work with requisites fix requisite checking to not be exponential fix pkg aggregate to work when multiple states specify the same package add some type hints to state.py to make the code easier to follow fix case of pkg aggregate duplicate package. --- .pre-commit-config.yaml | 2 +- .pylintrc | 3 +- changelog/47154.fixed.md | 1 + changelog/59123.fixed.md | 1 + changelog/62439.fixed.md | 1 + changelog/65304.fixed.md | 2 +- changelog/8210.fixed.md | 1 + requirements/base.txt | 3 +- requirements/static/ci/py3.10/darwin.txt | 4 + requirements/static/ci/py3.10/freebsd.txt | 4 + requirements/static/ci/py3.10/linux.txt | 4 + requirements/static/ci/py3.10/windows.txt | 4 + requirements/static/ci/py3.11/darwin.txt | 4 + requirements/static/ci/py3.11/freebsd.txt | 4 + requirements/static/ci/py3.11/linux.txt | 4 + requirements/static/ci/py3.11/windows.txt | 4 + requirements/static/ci/py3.12/cloud.txt | 5 + requirements/static/ci/py3.12/darwin.txt | 4 + requirements/static/ci/py3.12/docs.txt | 4 + requirements/static/ci/py3.12/freebsd.txt | 4 + requirements/static/ci/py3.12/lint.txt | 5 + requirements/static/ci/py3.12/linux.txt | 4 + requirements/static/ci/py3.12/windows.txt | 4 + requirements/static/ci/py3.8/freebsd.txt | 4 + requirements/static/ci/py3.8/linux.txt | 4 + requirements/static/ci/py3.8/windows.txt | 4 + requirements/static/ci/py3.9/darwin.txt | 4 + requirements/static/ci/py3.9/freebsd.txt | 4 + requirements/static/ci/py3.9/linux.txt | 4 + requirements/static/ci/py3.9/windows.txt | 4 + requirements/static/pkg/py3.10/darwin.txt | 2 + requirements/static/pkg/py3.10/freebsd.txt | 2 + requirements/static/pkg/py3.10/linux.txt | 2 + requirements/static/pkg/py3.10/windows.txt | 2 + requirements/static/pkg/py3.11/darwin.txt | 2 + requirements/static/pkg/py3.11/freebsd.txt | 2 + requirements/static/pkg/py3.11/linux.txt | 2 + requirements/static/pkg/py3.11/windows.txt | 2 + requirements/static/pkg/py3.12/darwin.txt | 2 + requirements/static/pkg/py3.12/freebsd.txt | 2 + requirements/static/pkg/py3.12/linux.txt | 2 + requirements/static/pkg/py3.12/windows.txt | 2 + requirements/static/pkg/py3.8/freebsd.txt | 2 + requirements/static/pkg/py3.8/linux.txt | 2 + requirements/static/pkg/py3.8/windows.txt | 2 + requirements/static/pkg/py3.9/darwin.txt | 2 + requirements/static/pkg/py3.9/freebsd.txt | 2 + requirements/static/pkg/py3.9/linux.txt | 2 + requirements/static/pkg/py3.9/windows.txt | 2 + salt/client/ssh/wrapper/state.py | 49 +- salt/modules/chroot.py | 4 +- salt/modules/state.py | 26 +- salt/state.py | 1733 ++++++----------- salt/states/pkg.py | 75 +- salt/states/test.py | 2 +- salt/thorium/__init__.py | 5 +- salt/utils/event.py | 18 +- salt/utils/reactor.py | 11 +- salt/utils/requisite.py | 469 +++++ salt/utils/thin.py | 6 +- .../state/requisites/test_aggregate.py | 123 ++ .../modules/state/requisites/test_mixed.py | 134 +- .../state/requisites/test_onchanges.py | 21 +- .../modules/state/requisites/test_prereq.py | 129 +- .../modules/state/requisites/test_require.py | 174 +- .../functional/modules/state/test_state.py | 6 +- .../functional/states/file/test_replace.py | 3 +- .../pytests/unit/modules/state/test_state.py | 31 +- tests/pytests/unit/modules/test_chroot.py | 1 + .../unit/state/test_reactor_compiler.py | 71 +- .../pytests/unit/state/test_state_compiler.py | 154 +- tests/pytests/unit/states/test_pkg.py | 2 +- .../utils/requisite/test_dependency_graph.py | 485 +++++ tests/pytests/unit/utils/test_http.py | 3 +- tests/pytests/unit/utils/test_thin.py | 2 +- tests/unit/utils/test_thin.py | 53 +- 76 files changed, 2307 insertions(+), 1625 deletions(-) create mode 100644 changelog/47154.fixed.md create mode 100644 changelog/59123.fixed.md create mode 100644 changelog/62439.fixed.md create mode 100644 changelog/8210.fixed.md create mode 100644 salt/utils/requisite.py create mode 100644 tests/pytests/functional/modules/state/requisites/test_aggregate.py create mode 100644 tests/pytests/unit/utils/requisite/test_dependency_graph.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4e38a366de2..9ce4fc38b3d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1402,7 +1402,7 @@ repos: hooks: - id: pyupgrade name: Upgrade code for Py3.8+ - args: [--py38-plus, --keep-mock] + args: [--py38-plus, --keep-mock, --keep-runtime-typing] exclude: > (?x)^( salt/client/ssh/ssh_py_shim.py diff --git a/.pylintrc b/.pylintrc index 1d8f8e51270..1809ffc356d 100644 --- a/.pylintrc +++ b/.pylintrc @@ -761,4 +761,5 @@ allowed-3rd-party-modules=msgpack, pytestskipmarkers, cryptography, aiohttp, - pytest_timeout + pytest_timeout, + networkx diff --git a/changelog/47154.fixed.md b/changelog/47154.fixed.md new file mode 100644 index 00000000000..55aafee0fc7 --- /dev/null +++ b/changelog/47154.fixed.md @@ -0,0 +1 @@ +Fixed erroneous recursive requisite error when a prereq is used in combination with onchanges_any. diff --git a/changelog/59123.fixed.md b/changelog/59123.fixed.md new file mode 100644 index 00000000000..d5a8b1803a6 --- /dev/null +++ b/changelog/59123.fixed.md @@ -0,0 +1 @@ +Fixed dependency resolution to not be quadratic. diff --git a/changelog/62439.fixed.md b/changelog/62439.fixed.md new file mode 100644 index 00000000000..064efeb3d1b --- /dev/null +++ b/changelog/62439.fixed.md @@ -0,0 +1 @@ +Fixed performance when state_aggregate is enabled. diff --git a/changelog/65304.fixed.md b/changelog/65304.fixed.md index dd162cee714..fa80712df85 100644 --- a/changelog/65304.fixed.md +++ b/changelog/65304.fixed.md @@ -1 +1 @@ -pkg.installed state aggregate does not honors requires requisite +Fixed aggregation to correctly honor requisites. diff --git a/changelog/8210.fixed.md b/changelog/8210.fixed.md new file mode 100644 index 00000000000..009dac80145 --- /dev/null +++ b/changelog/8210.fixed.md @@ -0,0 +1 @@ +Fixed recursive prereq requisites to report recursive requisite error. diff --git a/requirements/base.txt b/requirements/base.txt index e6ab6f051c4..4ff5f023ead 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -5,6 +5,7 @@ jmespath msgpack>=1.0.0 PyYAML MarkupSafe +networkx requests>=2.31.0 ; python_version < '3.8' requests>=2.32.0 ; python_version >= '3.8' distro>=1.0.1 @@ -25,7 +26,7 @@ pyopenssl>=24.0.0 python-dateutil>=2.8.1 python-gnupg>=0.4.7 cherrypy>=18.6.1 -importlib-metadata>=3.3.0 +importlib-metadata>=4.3.0 cryptography>=42.0.0 # From old requirements/static/pkg/linux.in diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index cbf72ae9d3a..03aed248703 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -282,6 +282,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index e6a7f7cc26a..b94527e3e26 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -285,6 +285,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 34906f5148c..8387571e9b0 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -310,6 +310,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 496f104398f..db35eb82e9a 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -253,6 +253,10 @@ multidict==6.0.4 # -c requirements/static/ci/../pkg/py3.10/windows.txt # aiohttp # yarl +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index ff4f6b7e032..9fcbd9a2e7a 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -275,6 +275,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 6e324cc5f4c..d96697d24cb 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -278,6 +278,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 5a2f15856d0..eab77cd9053 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -301,6 +301,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 2b77f8cb01a..91536cbeb92 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -246,6 +246,10 @@ multidict==6.0.4 # -c requirements/static/ci/../pkg/py3.11/windows.txt # aiohttp # yarl +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 41f14034d18..2de251e8d23 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -385,6 +385,11 @@ netutils==1.6.0 # via # -c requirements/static/ci/py3.12/linux.txt # napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via # -c requirements/static/ci/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index f77e05e5ddf..d4983cb783f 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -275,6 +275,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index acfaab6194d..1ae325868d1 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -158,6 +158,10 @@ multidict==6.0.4 # yarl myst-docutils[linkify]==1.0.0 # via -r requirements/static/ci/docs.in +networkx==3.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index a293d6e0945..0f0d78e6bc9 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -278,6 +278,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 2196c275bf3..0679ee0853f 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -412,6 +412,11 @@ netutils==1.6.0 # via # -c requirements/static/ci/py3.12/linux.txt # napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via # -c requirements/static/ci/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 6e9d2f93e15..ce21cc7cb66 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -301,6 +301,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index e4380533d61..e2fdcb1a273 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -246,6 +246,10 @@ multidict==6.0.4 # -c requirements/static/ci/../pkg/py3.12/windows.txt # aiohttp # yarl +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 94c0f247d1a..c5b479d2591 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -289,6 +289,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.1 + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 55faa78e308..08e036ae3e3 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -308,6 +308,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.1 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 2099586ca2c..a33dc7402ab 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -257,6 +257,10 @@ multidict==6.0.4 # -c requirements/static/ci/../pkg/py3.8/windows.txt # aiohttp # yarl +networkx==3.1 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index a364e46cc2a..cb5d73f35b1 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -282,6 +282,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index d4a44882aea..79a3940ab54 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -285,6 +285,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index dd4d0c1630a..7595f06d255 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -304,6 +304,10 @@ netmiko==4.2.0 # via napalm netutils==1.6.0 # via napalm +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt ntc-templates==4.0.1 # via netmiko oscrypto==1.3.0 diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 5c610222c6b..da52e59971b 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -253,6 +253,10 @@ multidict==6.0.4 # -c requirements/static/ci/../pkg/py3.9/windows.txt # aiohttp # yarl +networkx==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/base.txt packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index d5b06ea94c7..ee2d844c2d0 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index 3fd122e574d..a0a621cb63c 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 7f7f059baf3..d5733c9f873 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index b44be8971f8..841a6d45d02 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -91,6 +91,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index db6c4d08a5a..043922de1af 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index 99d25ff7113..110689c48cf 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index 4f25b2b1030..60448507aea 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index 6815187d2a4..34a3a1f3abc 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -89,6 +89,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt index 51020f0170a..1a39d942391 100644 --- a/requirements/static/pkg/py3.12/darwin.txt +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt index 95bedeebbe5..33290ea6c61 100644 --- a/requirements/static/pkg/py3.12/freebsd.txt +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt index 1da1d6f25a0..b0dfe85f82b 100644 --- a/requirements/static/pkg/py3.12/linux.txt +++ b/requirements/static/pkg/py3.12/linux.txt @@ -81,6 +81,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index 3b5b7009c04..a817b1b4177 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -89,6 +89,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 1097495bedd..642cb950830 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -85,6 +85,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index e144ef6b5a0..057d63e2b00 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -85,6 +85,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 11d7cf6d112..194d3c1de41 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -93,6 +93,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 47ef9bfda36..a271829cf22 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index bde4cdef69b..35f4d9edecb 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 44e4bb6ef18..305c81719b5 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -83,6 +83,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index b85fe898451..a3a4950f893 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -91,6 +91,8 @@ multidict==6.0.4 # via # aiohttp # yarl +networkx==3.2.1 + # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 diff --git a/salt/client/ssh/wrapper/state.py b/salt/client/ssh/wrapper/state.py index 79667a4dede..e545584f642 100644 --- a/salt/client/ssh/wrapper/state.py +++ b/salt/client/ssh/wrapper/state.py @@ -199,7 +199,10 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs): __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors # Compile and verify the raw chunks - chunks = st_.state.compile_high_data(high_data) + chunks, errors = st_.state.compile_high_data(high_data) + if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR + return errors file_refs = salt.client.ssh.state.lowstate_file_refs( chunks, _merge_extra_filerefs( @@ -430,7 +433,9 @@ def high(data, **kwargs): # Ensure other wrappers use the correct pillar __pillar__.update(pillar) st_.push_active() - chunks = st_.state.compile_high_data(data) + chunks, errors = st_.state.compile_high_data(data) + if errors: + return errors file_refs = salt.client.ssh.state.lowstate_file_refs( chunks, _merge_extra_filerefs( @@ -676,9 +681,9 @@ def highstate(test=None, **kwargs): # Ensure other wrappers use the correct pillar __pillar__.update(pillar) st_.push_active() - chunks = st_.compile_low_chunks(context=__context__.value()) + chunks_or_errors = st_.compile_low_chunks(context=__context__.value()) file_refs = salt.client.ssh.state.lowstate_file_refs( - chunks, + chunks_or_errors, _merge_extra_filerefs( kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", ""), @@ -686,19 +691,19 @@ def highstate(test=None, **kwargs): ), ) # Check for errors - for chunk in chunks: + for chunk in chunks_or_errors: if not isinstance(chunk, dict): __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR - return chunks + return chunks_or_errors roster = salt.roster.Roster(opts, opts.get("roster", "flat")) roster_grains = roster.opts["grains"] # Create the tar containing the state pkg and relevant files. - _cleanup_slsmod_low_data(chunks) + _cleanup_slsmod_low_data(chunks_or_errors) trans_tar = salt.client.ssh.state.prep_trans_tar( __context__["fileclient"], - chunks, + chunks_or_errors, file_refs, pillar, st_kwargs["id_"], @@ -767,14 +772,14 @@ def top(topfn, test=None, **kwargs): __pillar__.update(pillar) st_.opts["state_top"] = os.path.join("salt://", topfn) st_.push_active() - chunks = st_.compile_low_chunks(context=__context__.value()) + chunks_or_errors = st_.compile_low_chunks(context=__context__.value()) # Check for errors - for chunk in chunks: + for chunk in chunks_or_errors: if not isinstance(chunk, dict): __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR - return chunks + return chunks_or_errors file_refs = salt.client.ssh.state.lowstate_file_refs( - chunks, + chunks_or_errors, _merge_extra_filerefs( kwargs.get("extra_filerefs", ""), opts.get("extra_filerefs", ""), @@ -786,10 +791,10 @@ def top(topfn, test=None, **kwargs): roster_grains = roster.opts["grains"] # Create the tar containing the state pkg and relevant files. - _cleanup_slsmod_low_data(chunks) + _cleanup_slsmod_low_data(chunks_or_errors) trans_tar = salt.client.ssh.state.prep_trans_tar( __context__["fileclient"], - chunks, + chunks_or_errors, file_refs, pillar, st_kwargs["id_"], @@ -888,9 +893,9 @@ def show_lowstate(**kwargs): err += st_.opts["pillar"]["_errors"] return err st_.push_active() - chunks = st_.compile_low_chunks(context=__context__.value()) - _cleanup_slsmod_low_data(chunks) - return chunks + chunks_or_errors = st_.compile_low_chunks(context=__context__.value()) + _cleanup_slsmod_low_data(chunks_or_errors) + return chunks_or_errors def sls_id(id_, mods, test=None, queue=False, **kwargs): @@ -977,7 +982,10 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): if errors: __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors - chunks = st_.state.compile_high_data(high_) + chunks, errors = st_.state.compile_high_data(high_) + if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR + return errors chunk = [x for x in chunks if x.get("__id__", "") == id_] if not chunk: @@ -1108,7 +1116,10 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs): if errors: __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors - ret = st_.state.compile_high_data(high_data) + ret, errors = st_.state.compile_high_data(high_data) + if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR + return errors _cleanup_slsmod_low_data(ret) return ret diff --git a/salt/modules/chroot.py b/salt/modules/chroot.py index 5be402f1beb..01f61aa14d5 100644 --- a/salt/modules/chroot.py +++ b/salt/modules/chroot.py @@ -347,7 +347,9 @@ def sls(root, mods, saltenv="base", test=None, exclude=None, **kwargs): high_data = st_.state.apply_exclude(high_data) # Compile and verify the raw chunks - chunks = st_.state.compile_high_data(high_data) + chunks, errors = st_.state.compile_high_data(high_data) + if errors: + return errors file_refs = salt.client.ssh.state.lowstate_file_refs( chunks, salt.client.ssh.wrapper.state._merge_extra_filerefs( diff --git a/salt/modules/state.py b/salt/modules/state.py index d35273a5270..e401fdd1d89 100644 --- a/salt/modules/state.py +++ b/salt/modules/state.py @@ -1797,7 +1797,11 @@ def show_states(queue=None, **kwargs): if not isinstance(s, dict): _set_retcode(result) return result - states[s["__sls__"]] = True + # The isinstance check ensures s is a dict, + # so disable the error pylint incorrectly gives: + # [E1126(invalid-sequence-index), show_states] + # Sequence index is not an int, slice, or instance with __index__ + states[s["__sls__"]] = True # pylint: disable=E1126 finally: st_.pop_active() @@ -1915,7 +1919,9 @@ def sls_id(id_, mods, test=None, queue=None, state_events=None, **kwargs): if errors: __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors - chunks = st_.state.compile_high_data(high_) + chunks, errors = st_.state.compile_high_data(high_) + if errors: + return errors ret = {} for chunk in chunks: if chunk.get("__id__", "") == id_: @@ -2023,7 +2029,9 @@ def show_low_sls(mods, test=None, queue=None, **kwargs): if errors: __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors - ret = st_.state.compile_high_data(high_) + ret, errors = st_.state.compile_high_data(high_) + if errors: + return errors # Work around Windows multiprocessing bug, set __opts__['test'] back to # value from before this function was run. __opts__["test"] = orig_test @@ -2378,15 +2386,19 @@ def pkg(pkg_path, pkg_sum, hash_type, test=None, **kwargs): continue popts["file_roots"][fn_] = [full] st_ = salt.state.State(popts, pillar_override=pillar_override) - snapper_pre = _snapper_pre(popts, kwargs.get("__pub_jid", "called localy")) - ret = st_.call_chunks(lowstate) - ret = st_.call_listen(lowstate, ret) + snapper_pre = _snapper_pre(popts, kwargs.get("__pub_jid", "called locally")) + chunks, errors = st_.order_chunks(lowstate) + if errors: + ret = errors + else: + ret = st_.call_chunks(chunks) + ret = st_.call_listen(chunks, ret) try: shutil.rmtree(root) except OSError: pass _set_retcode(ret) - _snapper_post(popts, kwargs.get("__pub_jid", "called localy"), snapper_pre) + _snapper_post(popts, kwargs.get("__pub_jid", "called locally"), snapper_pre) return ret diff --git a/salt/state.py b/salt/state.py index f8821c49809..9865e602597 100644 --- a/salt/state.py +++ b/salt/state.py @@ -11,6 +11,8 @@ The data sent to the state calls is as follows: } """ +from __future__ import annotations + import copy import datetime import fnmatch @@ -23,6 +25,10 @@ import re import site import time import traceback +from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence +from typing import Any, Optional, Union + +import networkx as nx import salt.channel.client import salt.fileclient @@ -52,27 +58,25 @@ from salt.serializers.msgpack import deserialize as msgpack_deserialize from salt.serializers.msgpack import serialize as msgpack_serialize from salt.template import compile_template, compile_template_str from salt.utils.odict import DefaultOrderedDict, OrderedDict +from salt.utils.requisite import DependencyGraph, RequisiteType log = logging.getLogger(__name__) +# See https://docs.saltproject.io/en/latest/ref/states/layers.html for details on the naming. +# __exclude__ and __extend__ are list values; the other items are state items +# that have dict values. This could be cleaned up some by making +# exclude and extend into dicts instead of lists so the type of all the values are +# homogeneous. +HighData = dict[str, Union[Mapping[str, Any], list[Union[Mapping[str, Any], str]]]] +LowChunk = dict[str, Any] + # These are keywords passed to state module functions which are to be used # by salt in this state module and not on the actual state module function STATE_REQUISITE_KEYWORDS = frozenset( - [ - "onchanges", - "onchanges_any", - "onfail", - "onfail_any", - "onfail_all", + [req_type.value for req_type in RequisiteType] + + [ "onfail_stop", - "prereq", - "prerequired", - "watch", - "watch_any", - "require", - "require_any", - "listen", ] ) STATE_REQUISITE_IN_KEYWORDS = frozenset( @@ -93,7 +97,6 @@ STATE_RUNTIME_KEYWORDS = frozenset( "parallel", "prereq", "prereq_in", - "prerequired", "reload_modules", "reload_grains", "reload_pillar", @@ -118,7 +121,7 @@ STATE_RUNTIME_KEYWORDS = frozenset( "__pub_pid", "__pub_tgt_type", "__prereq__", - "__prerequired__", + "__prerequiring__", "__umask__", ] ) @@ -129,11 +132,11 @@ STATE_INTERNAL_KEYWORDS = STATE_REQUISITE_KEYWORDS.union( class HashableOrderedDict(OrderedDict): - def __hash__(self): + def __hash__(self) -> int: return id(self) -def split_low_tag(tag): +def split_low_tag(tag: str) -> dict[str, Any]: """ Take a low tag and split it back into the low dict that it came from """ @@ -142,21 +145,14 @@ def split_low_tag(tag): return {"state": state, "__id__": id_, "name": name, "fun": fun} -def _gen_tag(low): +def _gen_tag(low: LowChunk) -> str: """ Generate the running dict tag string from the low data structure """ return "{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}".format(low) -def _clean_tag(tag): - """ - Make tag name safe for filenames - """ - return salt.utils.files.safe_filename_leaf(tag) - - -def _l_tag(name, id_): +def _l_tag(name: str, id_: str) -> str: low = { "name": f"listen_{name}", "__id__": f"listen_{id_}", @@ -166,7 +162,7 @@ def _l_tag(name, id_): return _gen_tag(low) -def _calculate_fake_duration(): +def _calculate_fake_duration() -> tuple[str, float]: """ Generate a NULL duration for when states do not run but we want the results to be consistent. @@ -196,17 +192,7 @@ def get_accumulator_dir(cachedir): return fn_ -def trim_req(req): - """ - Trim any function off of a requisite - """ - reqfirst = next(iter(req)) - if "." in reqfirst: - return {reqfirst.split(".")[0]: req[reqfirst]} - return req - - -def state_args(id_, state, high): +def state_args(id_: Hashable, state: Hashable, high: HighData) -> set[Any]: """ Return a set of the arguments passed to the named state """ @@ -224,7 +210,9 @@ def state_args(id_, state, high): return args -def find_name(name, state, high, strict=False): +def find_name( + name: str, state: str, high: HighData, strict: bool = False +) -> list[tuple[str, dict[str, str]]]: """ Scan high data for the id referencing the given name and return a list of (IDs, state) tuples that match @@ -258,9 +246,10 @@ def find_name(name, state, high, strict=False): return ext_id -def find_sls_ids(sls, high): +def find_sls_ids(sls: Any, high: HighData) -> list[tuple[str, str]]: """ - Scan for all ids in the given sls and return them in a dict; {name: state} + Scan for all ids in the given sls and return them in a list + of (ID, state) tuples that match """ ret = [] for nid, item in high.items(): @@ -281,7 +270,7 @@ def find_sls_ids(sls, high): return ret -def format_log(ret): +def format_log(ret: Any) -> None: """ Format the state into a log message """ @@ -334,7 +323,7 @@ def master_compile(master_opts, minion_opts, grains, id_, saltenv): return st_.compile_highstate() -def ishashable(obj): +def ishashable(obj: object) -> bool: try: hash(obj) except TypeError: @@ -342,7 +331,7 @@ def ishashable(obj): return True -def mock_ret(cdata): +def mock_ret(cdata: dict[str, Any]) -> dict[str, Any]: """ Returns a mocked return dict with information about the run, without executing the state function @@ -361,6 +350,170 @@ def mock_ret(cdata): } +def _apply_exclude(high: HighData) -> HighData: + """ + Read in the __exclude__ list and remove all excluded objects from the + high data + """ + if "__exclude__" not in high: + return high + ex_sls = set() + ex_id = set() + exclude = high.pop("__exclude__") + for exc in exclude: + if isinstance(exc, str): + # The exclude statement is a string, assume it is an sls + ex_sls.add(exc) + if isinstance(exc, dict): + # Explicitly declared exclude + if len(exc) != 1: + continue + key = next(iter(exc.keys())) + if key == "sls": + ex_sls.add(exc["sls"]) + elif key == "id": + ex_id.add(exc["id"]) + # Now the excludes have been simplified, use them + if ex_sls: + # There are sls excludes, find the associated ids + for name, body in high.items(): + if name.startswith("__"): + continue + sls = body.get("__sls__", "") + if not sls: + continue + for ex_ in ex_sls: + if fnmatch.fnmatch(sls, ex_): + ex_id.add(name) + for id_ in ex_id: + if id_ in high: + high.pop(id_) + return high + + +def _verify_high(high: dict) -> list[str]: + """ + Verify that the high data is viable and follows the data structure + """ + if not isinstance(high, dict): + return ["High data is not a dictionary and is invalid"] + errors = [] + for id_, body in high.items(): + if not isinstance(id_, str): + errors.append( + f"ID '{id_}' in SLS '{body['__sls__']}' is not formed as a string, " + f"but is type {type(id_).__name__}. It may need to be quoted." + ) + elif id_.startswith("__"): + continue + if not isinstance(body, dict): + err = f"The type {id_} in {body} is not formatted as a dictionary" + errors.append(err) + continue + for state in body: + if state.startswith("__"): + continue + if body[state] is None: + errors.append( + f"ID '{id_}' in SLS '{body['__sls__']}' contains a short declaration " + f"({state}) with a trailing colon. When not passing any " + "arguments to a state, the colon must be omitted." + ) + continue + if not isinstance(body[state], list): + errors.append( + f"State '{id_}' in SLS '{body['__sls__']}' is not formed as a list" + ) + else: + fun_count = 0 + if "." in state: + # This should not happen usually since `_handle_state_decls` or + # `pad_funcs` is run on rendered templates + fun_count += 1 + for arg in body[state]: + if isinstance(arg, str): + fun_count += 1 + if " " in arg.strip(): + errors.append( + f'The function "{arg}" in state ' + f'"{id_}" in SLS "{body["__sls__"]}" has ' + "whitespace, a function with whitespace is " + "not supported, perhaps this is an argument" + ' that is missing a ":"' + ) + + elif isinstance(arg, dict): + # The arg is a dict, if the arg is require or + # watch, it must be a list. + argfirst = next(iter(arg)) + if argfirst == "names": + if not isinstance(arg[argfirst], list): + errors.append( + "The 'names' argument in state " + f"'{id_}' in SLS '{body['__sls__']}' needs to be " + "formed as a list" + ) + if argfirst in STATE_REQUISITE_KEYWORDS: + if not isinstance(arg[argfirst], list): + errors.append( + f"The {argfirst} statement in state '{id_}' in " + f"SLS '{body['__sls__']}' needs to be formed as a " + "list" + ) + # It is a list, verify that the members of the + # list are all single key dicts. + else: + for req in arg[argfirst]: + if isinstance(req, str): + req = {"id": req} + if not isinstance(req, dict) or len(req) != 1: + errors.append( + f"Requisite declaration {req} in " + f"state {id_} in SLS {body['__sls__']} " + "is not formed as a single key dictionary" + ) + continue + # req_key: the name or id of the required state; the requisite will match both + # req_val: the type of requirement i.e. id, sls, name of state module like file + req_key, req_val = next(iter(req.items())) + if "." in req_key: + errors.append( + f"Invalid requisite type '{req_key}' " + f"in state '{id_}', in SLS " + f"'{ body['__sls__']}'. Requisite types must " + "not contain dots, did you " + f"mean '{req_key[: req_key.find('.')]}'?" + ) + if not ishashable(req_val): + errors.append( + f'Illegal requisite "{req_val}" ' + f'in SLS "{body["__sls__"]}", ' + "please check your syntax.\n" + ) + continue + # Make sure that there is only one key in the + # dict + if len(list(arg)) != 1: + errors.append( + "Multiple dictionaries defined in " + f"argument of state '{id_}' in SLS '{body['__sls__']}'" + ) + if not fun_count: + errors.append( + f"No function declared in state '{id_}' in SLS " + f"'{body['__sls__']}'" + ) + elif fun_count > 1: + funs = [state.split(".", maxsplit=1)[1]] if "." in state else [] + funs.extend(arg for arg in body[state] if isinstance(arg, str)) + errors.append( + f"Too many functions declared in state '{id_}' in " + f"SLS '{body['__sls__']}'. Please choose one of " + "the following: " + ", ".join(funs) + ) + return errors + + class StateError(Exception): """ Custom exception class. @@ -373,6 +526,7 @@ class Compiler: """ def __init__(self, opts, renderers): + self.dependency_dag = DependencyGraph() self.opts = opts self.rend = renderers @@ -445,215 +599,50 @@ class Compiler: """ Verify that the high data is viable and follows the data structure """ - errors = [] - if not isinstance(high, dict): - errors.append("High data is not a dictionary and is invalid") - reqs = HashableOrderedDict() - if not errors: - for name, body in high.items(): - try: - if name.startswith("__"): - continue - except (AttributeError, TypeError): - # Do not traceback on non string state ID - # handle the error properly - pass + return _verify_high(high) - if not isinstance(name, str): - errors.append( - "ID '{}' in SLS '{}' is not formed as a string, but is a {}. It may need to be quoted".format( - name, body["__sls__"], type(name).__name__ - ) - ) - if not isinstance(body, dict): - err = "The type {} in {} is not formatted as a dictionary".format( - name, body - ) - errors.append(err) - continue - for state in body: - if state.startswith("__"): - continue - if not isinstance(body[state], list): - errors.append( - "State '{}' in SLS '{}' is not formed as a list".format( - name, body["__sls__"] - ) - ) - else: - fun = 0 - if "." in state: - # This should not happen usually since `pad_funcs` - # is run on rendered templates - fun += 1 - for arg in body[state]: - if isinstance(arg, str): - fun += 1 - if " " in arg.strip(): - errors.append( - f'The function "{arg}" in state ' - f'"{name}" in SLS "{body["__sls__"]}" has ' - "whitespace, a function with whitespace is " - "not supported, perhaps this is an argument" - ' that is missing a ":"' - ) - elif isinstance(arg, dict): - # The arg is a dict, if the arg is require or - # watch, it must be a list. - # - # Add the requires to the reqs dict and check them - # all for recursive requisites. - argfirst = next(iter(arg)) - if argfirst in ( - "require", - "watch", - "prereq", - "onchanges", - ): - if not isinstance(arg[argfirst], list): - errors.append( - "The {} statement in state '{}' in SLS '{}' " - "needs to be formed as a list".format( - argfirst, name, body["__sls__"] - ) - ) - # It is a list, verify that the members of the - # list are all single key dicts. - else: - reqs[name] = {"state": state} - for req in arg[argfirst]: - if isinstance(req, str): - req = {"id": req} - if not isinstance(req, dict): - errors.append( - "Requisite declaration {} in SLS {} " - "is not formed as a single key " - "dictionary".format( - req, body["__sls__"] - ) - ) - continue - req_key = next(iter(req)) - req_val = req[req_key] - if "." in req_key: - errors.append( - "Invalid requisite type '{}' " - "in state '{}', in SLS " - "'{}'. Requisite types must " - "not contain dots, did you " - "mean '{}'?".format( - req_key, - name, - body["__sls__"], - req_key[: req_key.find(".")], - ) - ) - if not ishashable(req_val): - errors.append( - 'Illegal requisite "{}", is SLS {}\n'.format( - str(req_val), - body["__sls__"], - ) - ) - continue - - # Check for global recursive requisites - reqs[name][req_val] = req_key - # I am going beyond 80 chars on - # purpose, this is just too much - # of a pain to deal with otherwise - if req_val in reqs: - if name in reqs[req_val]: - if reqs[req_val][name] == state: - if ( - reqs[req_val]["state"] - == reqs[name][req_val] - ): - errors.append( - "A recursive requisite was" - ' found, SLS "{}" ID "{}"' - ' ID "{}"'.format( - body["__sls__"], - name, - req_val, - ) - ) - # Make sure that there is only one key in the - # dict - if len(list(arg)) != 1: - errors.append( - "Multiple dictionaries defined in argument " - "of state '{}' in SLS '{}'".format( - name, body["__sls__"] - ) - ) - if not fun: - if state == "require" or state == "watch": - continue - errors.append( - f"No function declared in state '{name}' in SLS " - f"'{body['__sls__']}'" - ) - elif fun > 1: - funs = ( - [state.split(".", maxsplit=1)[1]] - if "." in state - else [] - ) - funs.extend( - arg for arg in body[state] if isinstance(arg, str) - ) - errors.append( - f"Too many functions declared in state '{name}' in " - f"SLS '{body['__sls__']}'. Please choose one of " - "the following: " + ", ".join(funs) - ) - return errors - - def order_chunks(self, chunks): + def order_chunks( + self, chunks: Iterable[LowChunk] + ) -> tuple[list[LowChunk], list[str]]: """ Sort the chunk list verifying that the chunks follow the order specified in the order options. + + :return: a tuple of a list of the ordered chunks and a list of errors """ cap = 1 + errors = [] for chunk in chunks: - if "order" in chunk: + chunk["name"] = salt.utils.data.decode(chunk["name"]) + error = self.dependency_dag.add_requisites(chunk, []) + if error: + errors.append(error) + elif "order" in chunk: if not isinstance(chunk["order"], int): continue chunk_order = chunk["order"] if chunk_order > cap - 1 and chunk_order > 0: cap = chunk_order + 100 - for chunk in chunks: - if "order" not in chunk: - chunk["order"] = cap - continue - if not isinstance(chunk["order"], (int, float)): - if chunk["order"] == "last": - chunk["order"] = cap + 1000000 - elif chunk["order"] == "first": - chunk["order"] = 0 - else: - chunk["order"] = cap - if "name_order" in chunk: - chunk["order"] = chunk["order"] + chunk.pop("name_order") / 10000.0 - if chunk["order"] < 0: - chunk["order"] = cap + 1000000 + chunk["order"] - chunk["name"] = salt.utils.data.decode(chunk["name"]) - chunks.sort( - key=lambda chunk: ( - chunk["order"], - "{0[state]}{0[name]}{0[fun]}".format(chunk), - ) - ) - return chunks + try: + # Get nodes in topological order also sorted by order attribute + sorted_chunks = self.dependency_dag.aggregate_and_order_chunks(cap) + except nx.NetworkXUnfeasible: + sorted_chunks = [] + cycle_edges = self.dependency_dag.get_cycles_str() + errors.append(f"Recursive requisites were found: {cycle_edges}") + return sorted_chunks, errors - def compile_high_data(self, high): + def compile_high_data( + self, + high: dict[str, Any], + ) -> tuple[list[LowChunk], list[str]]: """ "Compile" the high data as it is retrieved from the CLI or YAML into the individual state executor structures """ + self.dependency_dag = DependencyGraph() chunks = [] for name, body in high.items(): if name.startswith("__"): @@ -698,50 +687,25 @@ class Compiler: name_order = name_order + 1 for fun in funcs: live["fun"] = fun + self.dependency_dag.add_chunk(live, False) chunks.append(live) + break else: live = copy.deepcopy(chunk) for fun in funcs: live["fun"] = fun + self.dependency_dag.add_chunk(live, False) chunks.append(live) - chunks = self.order_chunks(chunks) - return chunks + break + chunks, errors = self.order_chunks(chunks) + return chunks, errors def apply_exclude(self, high): """ Read in the __exclude__ list and remove all excluded objects from the high data """ - if "__exclude__" not in high: - return high - ex_sls = set() - ex_id = set() - exclude = high.pop("__exclude__") - for exc in exclude: - if isinstance(exc, str): - # The exclude statement is a string, assume it is an sls - ex_sls.add(exc) - if isinstance(exc, dict): - # Explicitly declared exclude - if len(exc) != 1: - continue - key = next(iter(exc.keys())) - if key == "sls": - ex_sls.add(exc["sls"]) - elif key == "id": - ex_id.add(exc["id"]) - # Now the excludes have been simplified, use them - if ex_sls: - # There are sls excludes, find the associtaed ids - for name, body in high.items(): - if name.startswith("__"): - continue - if body.get("__sls__", "") in ex_sls: - ex_id.add(name) - for id_ in ex_id: - if id_ in high: - high.pop(id_) - return high + return _apply_exclude(high) class State: @@ -813,7 +777,6 @@ class State: self.state_con = context self.state_con["fileclient"] = self.file_client self.load_modules() - self.active = set() self.mod_init = set() self.pre = {} self.__run_num = 0 @@ -822,6 +785,9 @@ class State: self.inject_globals = {} self.mocked = mocked self.global_state_conditions = None + self.dependency_dag = DependencyGraph() + # a mapping of state tag (unique id) to the return result dict + self.disabled_states: Optional[dict[str, dict[str, Any]]] = None def _match_global_state_conditions(self, full, state, name): """ @@ -926,72 +892,30 @@ class State: return self.mod_init.add(low["state"]) - def _aggregate_requisites(self, low, chunks): - """ - Aggregate the requisites - """ - requisites = {} - for chunk in chunks: - # if the state function in the chunk matches - # the state function in the low we're looking at - # and __agg__ is True, add the requisites from the - # chunk to those in the low. - if ( - chunk["state"] == low["state"] - and chunk.get("__agg__") - and low["name"] != chunk["name"] - ): - for req in frozenset.union( - *[STATE_REQUISITE_KEYWORDS, STATE_REQUISITE_IN_KEYWORDS] - ): - if req in chunk: - if req in requisites: - requisites[req].extend(chunk[req]) - else: - requisites[req] = chunk[req] - low.update(requisites) - return low - - def _mod_aggregate(self, low, running, chunks): + def _mod_aggregate( + self, low: LowChunk, running: dict[str, dict[str, Any]] + ) -> LowChunk: """ Execute the aggregation systems to runtime modify the low chunk """ - agg_opt = self.functions["config.option"]("state_aggregate") - if "aggregate" in low: - agg_opt = low["aggregate"] - if agg_opt is True: - agg_opt = [low["state"]] - elif not isinstance(agg_opt, list): - return low - if low["state"] in agg_opt and not low.get("__agg__"): - agg_fun = "{}.mod_aggregate".format(low["state"]) - if "loader_cache" not in self.state_con: - self.state_con["loader_cache"] = {} - if ( - agg_fun in self.state_con["loader_cache"] - and not self.state_con["loader_cache"][agg_fun] - ): - return low - if agg_fun in self.states: - self.state_con["loader_cache"][agg_fun] = True - try: - low["__agg__"] = True - # Aggregate the states *before* aggregating requisites otherwise there will never be requisites to aggregate - low = self.states[agg_fun](low, chunks, running) - low = self._aggregate_requisites(low, chunks) - except TypeError: - log.error("Failed to execute aggregate for state %s", low["state"]) - else: - self.state_con["loader_cache"][agg_fun] = False + if not low.get("__agg__") and ( + aggregate_chunks := self.dependency_dag.get_aggregate_chunks(low) + ): + agg_fun = f"{low['state']}.mod_aggregate" + try: + low["__agg__"] = True + low = self.states[agg_fun](low, aggregate_chunks, running) + except TypeError: + log.error("Failed to execute aggregate for state %s", low["state"]) return low - def _run_check(self, low_data): + def _run_check(self, low: LowChunk) -> dict[str, Any]: """ Check that unless doesn't return 0, and that onlyif returns a 0. """ ret = {"result": False, "comment": []} for key in ("__sls__", "__id__", "name"): - ret[key] = low_data.get(key) + ret[key] = low.get(key) cmd_opts = {} # Set arguments from cmd.run state as appropriate @@ -1005,31 +929,31 @@ class State: "timeout", "success_retcodes", ) - if "cmd_opts_exclude" in low_data: - if not isinstance(low_data["cmd_opts_exclude"], list): - cmd_opts_exclude = [low_data["cmd_opts_exclude"]] + if "cmd_opts_exclude" in low: + if not isinstance(low["cmd_opts_exclude"], list): + cmd_opts_exclude = [low["cmd_opts_exclude"]] else: - cmd_opts_exclude = low_data["cmd_opts_exclude"] + cmd_opts_exclude = low["cmd_opts_exclude"] else: cmd_opts_exclude = [] for run_cmd_arg in POSSIBLE_CMD_ARGS: if run_cmd_arg not in cmd_opts_exclude: - cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg) + cmd_opts[run_cmd_arg] = low.get(run_cmd_arg) - if "shell" in low_data and "shell" not in cmd_opts_exclude: - cmd_opts["shell"] = low_data["shell"] + if "shell" in low and "shell" not in cmd_opts_exclude: + cmd_opts["shell"] = low["shell"] elif "shell" in self.opts["grains"]: cmd_opts["shell"] = self.opts["grains"].get("shell") - if "onlyif" in low_data: - _ret = self._run_check_onlyif(low_data, cmd_opts) + if "onlyif" in low: + _ret = self._run_check_onlyif(low, cmd_opts) ret["result"] = _ret["result"] ret["comment"].append(_ret["comment"]) if "skip_watch" in _ret: ret["skip_watch"] = _ret["skip_watch"] - if "unless" in low_data: - _ret = self._run_check_unless(low_data, cmd_opts) + if "unless" in low: + _ret = self._run_check_unless(low, cmd_opts) # If either result is True, the returned result should be True ret["result"] = _ret["result"] or ret["result"] ret["comment"].append(_ret["comment"]) @@ -1037,8 +961,8 @@ class State: # If either result is True, the returned result should be True ret["skip_watch"] = _ret["skip_watch"] or ret["skip_watch"] - if "creates" in low_data: - _ret = self._run_check_creates(low_data) + if "creates" in low: + _ret = self._run_check_creates(low) ret["result"] = _ret["result"] or ret["result"] ret["comment"].append(_ret["comment"]) if "skip_watch" in _ret: @@ -1055,19 +979,19 @@ class State: self.format_slots(cdata) return self.functions[fun](*cdata["args"], **cdata["kwargs"]) - def _run_check_onlyif(self, low_data, cmd_opts): + def _run_check_onlyif(self, low: LowChunk, cmd_opts) -> dict[str, Any]: """ Make sure that all commands return True for the state to run. If any command returns False (non 0), the state will not run """ - ret = {"result": False} + ret: dict[str, Any] = {"result": False} for key in ("__sls__", "__id__", "name"): - ret[key] = low_data.get(key) + ret[key] = low.get(key) - if not isinstance(low_data["onlyif"], list): - low_data_onlyif = [low_data["onlyif"]] + if not isinstance(low["onlyif"], list): + low_onlyif = [low["onlyif"]] else: - low_data_onlyif = low_data["onlyif"] + low_onlyif = low["onlyif"] # If any are False the state will NOT run def _check_cmd(cmd): @@ -1085,7 +1009,7 @@ class State: ret.update({"comment": "onlyif condition is true", "result": False}) return True - for entry in low_data_onlyif: + for entry in low_onlyif: if isinstance(entry, str): try: cmd = self.functions["cmd.retcode"]( @@ -1132,19 +1056,19 @@ class State: return ret return ret - def _run_check_unless(self, low_data, cmd_opts): + def _run_check_unless(self, low: LowChunk, cmd_opts) -> dict[str, Any]: """ Check if any of the commands return False (non 0). If any are False the state will run. """ - ret = {"result": False} + ret: dict[str, Any] = {"result": False} for key in ("__sls__", "__id__", "name"): - ret[key] = low_data.get(key) + ret[key] = low.get(key) - if not isinstance(low_data["unless"], list): - low_data_unless = [low_data["unless"]] + if not isinstance(low["unless"], list): + low_unless = [low["unless"]] else: - low_data_unless = low_data["unless"] + low_unless = low["unless"] # If any are False the state will run def _check_cmd(cmd): @@ -1163,7 +1087,7 @@ class State: ret.update({"comment": "unless condition is false", "result": False}) return True - for entry in low_data_unless: + for entry in low_unless: if isinstance(entry, str): try: cmd = self.functions["cmd.retcode"]( @@ -1212,17 +1136,17 @@ class State: # No reason to stop, return ret return ret - def _run_check_cmd(self, low_data): + def _run_check_cmd(self, low: LowChunk) -> dict[str, Any]: """ Alter the way a successful state run is determined """ - ret = {"result": False} + ret: dict[str, Any] = {"result": False} for key in ("__sls__", "__id__", "name"): - ret[key] = low_data.get(key) + ret[key] = low.get(key) cmd_opts = {} if "shell" in self.opts["grains"]: cmd_opts["shell"] = self.opts["grains"].get("shell") - for entry in low_data["check_cmd"]: + for entry in low["check_cmd"]: cmd = self.functions["cmd.retcode"]( entry, ignore_retcode=True, python_shell=True, **cmd_opts ) @@ -1244,20 +1168,20 @@ class State: return ret return ret - def _run_check_creates(self, low_data): + def _run_check_creates(self, low: LowChunk) -> dict[str, Any]: """ Check that listed files exist """ - ret = {"result": False} + ret: dict[str, Any] = {"result": False} for key in ("__sls__", "__id__", "name"): - ret[key] = low_data.get(key) + ret[key] = low.get(key) - if isinstance(low_data["creates"], str) and os.path.exists(low_data["creates"]): - ret["comment"] = "{} exists".format(low_data["creates"]) + if isinstance(low["creates"], str) and os.path.exists(low["creates"]): + ret["comment"] = "{} exists".format(low["creates"]) ret["result"] = True ret["skip_watch"] = True - elif isinstance(low_data["creates"], list) and all( - [os.path.exists(path) for path in low_data["creates"]] + elif isinstance(low["creates"], list) and all( + [os.path.exists(path) for path in low["creates"]] ): ret["comment"] = "All files in creates exist" ret["result"] = True @@ -1334,7 +1258,7 @@ class State: context=self.state_con, ) - def module_refresh(self): + def module_refresh(self) -> None: """ Refresh all the modules """ @@ -1357,7 +1281,7 @@ class State: if not self.opts.get("local", False) and self.opts.get("multiprocessing", True): self.functions["saltutil.refresh_modules"]() - def check_refresh(self, data, ret): + def check_refresh(self, data: dict, ret: dict) -> None: """ Check to see if the modules for this state instance need to be updated, only update if the state is a file or a package and if it changed @@ -1398,7 +1322,7 @@ class State: elif data["state"] in ("pkg", "ports", "pip"): self.module_refresh() - def verify_data(self, data): + def verify_data(self, data: dict[str, Any]) -> list[str]: """ Verify the data, return an error statement if something is wrong """ @@ -1452,270 +1376,99 @@ class State: aspec.args[ind], full ) ) - # If this chunk has a recursive require, then it will cause a - # recursive loop when executing, check for it - reqdec = "" - if "require" in data: - reqdec = "require" - if "watch" in data: - # Check to see if the service has a mod_watch function, if it does - # not, then just require - # to just require extend the require statement with the contents - # of watch so that the mod_watch function is not called and the - # requisite capability is still used - if "{}.mod_watch".format(data["state"]) not in self.states: - if "require" in data: - data["require"].extend(data.pop("watch")) - else: - data["require"] = data.pop("watch") - reqdec = "require" - else: - reqdec = "watch" - if reqdec: - for req in data[reqdec]: - reqfirst = next(iter(req)) - if data["state"] == reqfirst: - if fnmatch.fnmatch(data["name"], req[reqfirst]) or fnmatch.fnmatch( - data["__id__"], req[reqfirst] - ): - errors.append( - "Recursive require detected in SLS {} for " - "require {} in ID {}".format( - data["__sls__"], req, data["__id__"] - ) - ) return errors - def verify_high(self, high): + def verify_high(self, high: dict) -> list[str]: """ Verify that the high data is viable and follows the data structure """ - errors = [] - if not isinstance(high, dict): - errors.append("High data is not a dictionary and is invalid") - reqs = HashableOrderedDict() - for name, body in high.items(): - try: - if name.startswith("__"): - continue - except AttributeError: - pass - if not isinstance(name, str): - errors.append( - "ID '{}' in SLS '{}' is not formed as a string, but " - "is a {}. It may need to be quoted.".format( - name, body["__sls__"], type(name).__name__ - ) - ) - if not isinstance(body, dict): - err = "The type {} in {} is not formatted as a dictionary".format( - name, body - ) - errors.append(err) - continue - for state in body: - if state.startswith("__"): - continue - if body[state] is None: - errors.append( - "ID '{}' in SLS '{}' contains a short declaration " - "({}) with a trailing colon. When not passing any " - "arguments to a state, the colon must be omitted.".format( - name, body["__sls__"], state - ) - ) - continue - if not isinstance(body[state], list): - errors.append( - "State '{}' in SLS '{}' is not formed as a list".format( - name, body["__sls__"] - ) - ) - else: - fun = 0 - if "." in state: - # This should not happen usually since `_handle_state_decls` - # is run on rendered templates - fun += 1 - for arg in body[state]: - if isinstance(arg, str): - fun += 1 - if " " in arg.strip(): - errors.append( - f'The function "{arg}" in state ' - f'"{name}" in SLS "{body["__sls__"]}" has ' - "whitespace, a function with whitespace is " - "not supported, perhaps this is an argument" - ' that is missing a ":"' - ) + return _verify_high(high) - elif isinstance(arg, dict): - # The arg is a dict, if the arg is require or - # watch, it must be a list. - # - # Add the requires to the reqs dict and check them - # all for recursive requisites. - argfirst = next(iter(arg)) - if argfirst == "names": - if not isinstance(arg[argfirst], list): - errors.append( - "The 'names' argument in state " - "'{}' in SLS '{}' needs to be " - "formed as a list".format(name, body["__sls__"]) - ) - if argfirst in ("require", "watch", "prereq", "onchanges"): - if not isinstance(arg[argfirst], list): - errors.append( - "The {} statement in state '{}' in " - "SLS '{}' needs to be formed as a " - "list".format(argfirst, name, body["__sls__"]) - ) - # It is a list, verify that the members of the - # list are all single key dicts. - else: - reqs[name] = HashableOrderedDict(state=state) - for req in arg[argfirst]: - if isinstance(req, str): - req = {"id": req} - if not isinstance(req, dict): - errors.append( - "Requisite declaration {} in SLS {} is" - " not formed as a single key dictionary".format( - req, body["__sls__"] - ) - ) - continue - req_key = next(iter(req)) - req_val = req[req_key] - if "." in req_key: - errors.append( - "Invalid requisite type '{}' " - "in state '{}', in SLS " - "'{}'. Requisite types must " - "not contain dots, did you " - "mean '{}'?".format( - req_key, - name, - body["__sls__"], - req_key[: req_key.find(".")], - ) - ) - if not ishashable(req_val): - errors.append( - 'Illegal requisite "{}", please check ' - "your syntax.\n".format(req_val) - ) - continue - - # Check for global recursive requisites - reqs[name][req_val] = req_key - # I am going beyond 80 chars on - # purpose, this is just too much - # of a pain to deal with otherwise - if req_val in reqs: - if name in reqs[req_val]: - if reqs[req_val][name] == state: - if ( - reqs[req_val]["state"] - == reqs[name][req_val] - ): - errors.append( - "A recursive requisite was" - ' found, SLS "{}" ID "{}"' - ' ID "{}"'.format( - body["__sls__"], - name, - req_val, - ) - ) - # Make sure that there is only one key in the - # dict - if len(list(arg)) != 1: - errors.append( - "Multiple dictionaries defined in " - "argument of state '{}' in SLS '{}'".format( - name, body["__sls__"] - ) - ) - if not fun: - if state == "require" or state == "watch": - continue - errors.append( - f"No function declared in state '{name}' in SLS " - f"'{body['__sls__']}'" - ) - elif fun > 1: - funs = [state.split(".", maxsplit=1)[1]] if "." in state else [] - funs.extend(arg for arg in body[state] if isinstance(arg, str)) - errors.append( - f"Too many functions declared in state '{name}' in " - f"SLS '{body['__sls__']}'. Please choose one of " - "the following: " + ", ".join(funs) - ) - return errors - - def verify_chunks(self, chunks): - """ - Verify the chunks in a list of low data structures - """ - err = [] - for chunk in chunks: - err.extend(self.verify_data(chunk)) - return err - - def order_chunks(self, chunks): + def order_chunks( + self, chunks: Iterable[LowChunk] + ) -> tuple[list[LowChunk], list[str]]: """ Sort the chunk list verifying that the chunks follow the order specified in the order options. + + :return: a tuple of a list of the ordered chunks and a list of errors """ cap = 1 + errors = [] + disabled_reqs = self.opts.get("disabled_requisites", []) + if not isinstance(disabled_reqs, list): + disabled_reqs = [disabled_reqs] + if not self.dependency_dag.dag: + # if order_chunks was called without calling compile_high_data then + # we need to add the chunks to the dag + agg_opt = self.functions["config.option"]("state_aggregate") + for chunk in chunks: + self.dependency_dag.add_chunk( + chunk, self._allow_aggregate(chunk, agg_opt) + ) for chunk in chunks: - if "order" in chunk: + chunk["name"] = salt.utils.data.decode(chunk["name"]) + self._reconcile_watch_req(chunk) + error = self.dependency_dag.add_requisites(chunk, disabled_reqs) + if error: + errors.append(error) + elif "order" in chunk: if not isinstance(chunk["order"], int): continue chunk_order = chunk["order"] if chunk_order > cap - 1 and chunk_order > 0: cap = chunk_order + 100 - for chunk in chunks: - if "order" not in chunk: - chunk["order"] = cap - continue - if not isinstance(chunk["order"], (int, float)): - if chunk["order"] == "last": - chunk["order"] = cap + 1000000 - elif chunk["order"] == "first": - chunk["order"] = 0 - else: - chunk["order"] = cap - if "name_order" in chunk: - chunk["order"] = chunk["order"] + chunk.pop("name_order") / 10000.0 - if chunk["order"] < 0: - chunk["order"] = cap + 1000000 + chunk["order"] - chunks.sort( - key=lambda chunk: ( - chunk["order"], - "{0[state]}{0[name]}{0[fun]}".format(chunk), - ) - ) - return chunks + try: + # Get nodes in topological order also sorted by order attribute + sorted_chunks = self.dependency_dag.aggregate_and_order_chunks(cap) + except nx.NetworkXUnfeasible: + sorted_chunks = [] + cycle_edges = self.dependency_dag.get_cycles_str() + errors.append(f"Recursive requisites were found: {cycle_edges}") + return sorted_chunks, errors - def compile_high_data(self, high, orchestration_jid=None): + def _reconcile_watch_req(self, low: LowChunk): + """ + Change watch requisites to require if mod_watch is not available. + """ + if RequisiteType.WATCH in low: + if f"{low['state']}.mod_watch" not in self.states: + low.setdefault(RequisiteType.REQUIRE.value, []).extend( + low.pop(RequisiteType.WATCH) + ) + if RequisiteType.WATCH_ANY in low: + if f"{low['state']}.mod_watch" not in self.states: + low.setdefault(RequisiteType.REQUIRE_ANY.value, []).extend( + low.pop(RequisiteType.WATCH_ANY) + ) + + def compile_high_data( + self, high: dict[str, Any], orchestration_jid: Union[str, int, None] = None + ) -> tuple[list[LowChunk], list[str]]: """ "Compile" the high data as it is retrieved from the CLI or YAML into the individual state executor structures + + return a tuple of the LowChunk structures and a list of errors """ + self.dependency_dag = DependencyGraph() chunks = [] - for name, body in high.items(): - if name.startswith("__"): + disabled = {} + agg_opt = self.functions["config.option"]("state_aggregate") + for id_, body in high.items(): + if id_.startswith("__"): continue for state, run in body.items(): + # This should be a single value instead of a set + # because multiple functions of the same state + # type are not allowed in the same state funcs = set() names = [] if state.startswith("__"): continue - chunk = {"state": state, "name": name} + chunk = {"state": state, "name": id_} if orchestration_jid is not None: chunk["__orchestration_jid__"] = orchestration_jid if "__sls__" in body: @@ -1724,7 +1477,7 @@ class State: chunk["__env__"] = body["__env__"] if "__sls_included_from__" in body: chunk["__sls_included_from__"] = body["__sls_included_from__"] - chunk["__id__"] = name + chunk["__id__"] = id_ for arg in run: if isinstance(arg, str): funcs.add(arg) @@ -1740,7 +1493,7 @@ class State: continue elif key == "name" and not isinstance(val, str): # Invalid name, fall back to ID - chunk[key] = name + chunk[key] = id_ else: chunk[key] = val if names: @@ -1757,16 +1510,68 @@ class State: name_order += 1 for fun in funcs: live["fun"] = fun - chunks.append(live) + if not self._check_disabled(live, disabled): + self.dependency_dag.add_chunk( + live, self._allow_aggregate(live, agg_opt) + ) + chunks.append(live) + break else: live = copy.deepcopy(chunk) for fun in funcs: live["fun"] = fun - chunks.append(live) - chunks = self.order_chunks(chunks) - return chunks + if not self._check_disabled(live, disabled): + self.dependency_dag.add_chunk( + live, self._allow_aggregate(live, agg_opt) + ) + chunks.append(live) + break + chunks, errors = self.order_chunks(chunks) + self.disabled = disabled + return chunks, errors - def reconcile_extend(self, high, strict=False): + def _allow_aggregate(self, low: LowChunk, agg_opt: Any) -> bool: + if "aggregate" in low: + agg_opt = low["aggregate"] + check_fun = False + try: + if agg_opt is True or ( + not isinstance(agg_opt, str) and low["state"] in agg_opt + ): + check_fun = True + except TypeError: + pass + allow = False + if check_fun: + agg_fun = f"{low['state']}.mod_aggregate" + loader_cache = self.state_con.setdefault("loader_cache", {}) + if (allow := loader_cache.get(agg_fun)) is None: + allow = agg_fun in self.states + self.state_con["loader_cache"][agg_fun] = allow + return allow + + def _check_disabled(self, low: LowChunk, disabled: dict[str, Any]) -> bool: + if "state_runs_disabled" in self.opts["grains"]: + state_func = f"{low['state']}.{low['fun']}" + for pat in self.opts["grains"]["state_runs_disabled"]: + if fnmatch.fnmatch(state_func, pat): + comment = ( + f'The state function "{state_func}" is currently disabled by "{pat}", ' + f"to re-enable, run state.enable {pat}." + ) + _tag = _gen_tag(low) + disabled[_tag] = { + "changes": {}, + "result": False, + "comment": comment, + "__run_num__": self.__run_num, + "__sls__": low["__sls__"], + } + self.__run_num += 1 + return True + return False + + def reconcile_extend(self, high: HighData, strict=False): """ Pull the extend data and add it to the respective high data """ @@ -1856,47 +1661,14 @@ class State: high[name][state].append(arg) return high, errors - def apply_exclude(self, high): + def apply_exclude(self, high: HighData) -> HighData: """ Read in the __exclude__ list and remove all excluded objects from the high data """ - if "__exclude__" not in high: - return high - ex_sls = set() - ex_id = set() - exclude = high.pop("__exclude__") - for exc in exclude: - if isinstance(exc, str): - # The exclude statement is a string, assume it is an sls - ex_sls.add(exc) - if isinstance(exc, dict): - # Explicitly declared exclude - if len(exc) != 1: - continue - key = next(iter(exc.keys())) - if key == "sls": - ex_sls.add(exc["sls"]) - elif key == "id": - ex_id.add(exc["id"]) - # Now the excludes have been simplified, use them - if ex_sls: - # There are sls excludes, find the associated ids - for name, body in high.items(): - if name.startswith("__"): - continue - sls = body.get("__sls__", "") - if not sls: - continue - for ex_ in ex_sls: - if fnmatch.fnmatch(sls, ex_): - ex_id.add(name) - for id_ in ex_id: - if id_ in high: - high.pop(id_) - return high + return _apply_exclude(high) - def requisite_in(self, high): + def requisite_in(self, high: HighData): """ Extend the data reference with requisite_in arguments """ @@ -1907,11 +1679,17 @@ class State: "onchanges_in", "use", "use_in", - "prereq", "prereq_in", } req_in_all = req_in.union( - {"require", "watch", "onfail", "onfail_stop", "onchanges"} + { + RequisiteType.REQUIRE.value, + RequisiteType.WATCH.value, + RequisiteType.PREREQ.value, + RequisiteType.ONFAIL.value, + "onfail_stop", # onfail_stop is an undocumented poorly named req type + RequisiteType.ONCHANGES.value, + } ) extend = {} errors = [] @@ -1954,7 +1732,7 @@ class State: if "." in _state: errors.append( "Invalid requisite in {}: {} for " - "{}, in SLS '{}'. Requisites must " + "{} in SLS '{}'. Requisites must " "not contain dots, did you mean '{}'?".format( rkey, _state, @@ -2022,42 +1800,13 @@ class State: hinges.append((pname, pstate)) if "." in pstate: errors.append( - "Invalid requisite in {}: {} for " - "{}, in SLS '{}'. Requisites must " - "not contain dots, did you mean '{}'?".format( - rkey, - pstate, - pname, - body["__sls__"], - pstate[: pstate.find(".")], - ) + f"Invalid requisite in {rkey}: {pstate} for " + f"{pname}, in SLS '{body['__sls__']}'. Requisites must " + f"not contain dots, did you mean '{pstate[: pstate.find('.')]}'?" ) pstate = pstate.split(".")[0] for tup in hinges: name, _state = tup - if key == "prereq_in": - # Add prerequired to origin - if id_ not in extend: - extend[id_] = HashableOrderedDict() - if state not in extend[id_]: - extend[id_][state] = [] - extend[id_][state].append( - {"prerequired": [{_state: name}]} - ) - if key == "prereq": - # Add prerequired to prereqs - ext_ids = find_name( - name, _state, high, strict=True - ) - for ext_id, _req_state in ext_ids: - if ext_id not in extend: - extend[ext_id] = HashableOrderedDict() - if _req_state not in extend[ext_id]: - extend[ext_id][_req_state] = [] - extend[ext_id][_req_state].append( - {"prerequired": [{state: id_}]} - ) - continue if key == "use_in": # Add the running states args to the # use_in states @@ -2137,9 +1886,7 @@ class State: continue # The rkey is not present yet, create it extend[name][_state].append({rkey: [{state: id_}]}) - high["__extend__"] = [] - for key, val in extend.items(): - high["__extend__"].append({key: val}) + high["__extend__"] = [{key: val} for key, val in extend.items()] req_in_high, req_in_errors = self.reconcile_extend(high, strict=True) errors.extend(req_in_errors) return req_in_high, errors @@ -2255,7 +2002,7 @@ class State: with salt.utils.files.fopen(tfile, "wb+") as fp_: fp_.write(msgpack_serialize(ret)) - def call_parallel(self, cdata, low): + def call_parallel(self, cdata: dict[str, Any], low: LowChunk): """ Call the state defined in the given cdata in parallel """ @@ -2289,7 +2036,13 @@ class State: return ret @salt.utils.decorators.state.OutputUnifier("content_check", "unify") - def call(self, low, chunks=None, running=None, retries=1): + def call( + self, + low: LowChunk, + chunks: Optional[Sequence[LowChunk]] = None, + running: Optional[dict[str, dict]] = None, + retries: int = 1, + ): """ Call a state directly with the low data structure, verify data before processing. @@ -2298,16 +2051,18 @@ class State: local_start_time = utc_start_time - ( datetime.datetime.utcnow() - datetime.datetime.now() ) + low_name = low.get("name") + log_low_name = low_name.strip() if isinstance(low_name, str) else low_name log.info( "Running state [%s] at time %s", - low["name"].strip() if isinstance(low["name"], str) else low["name"], + log_low_name, local_start_time.time().isoformat(), ) errors = self.verify_data(low) if errors: ret = { "result": False, - "name": low["name"], + "name": low_name, "changes": {}, "comment": "", } @@ -2333,7 +2088,7 @@ class State: "Executing state %s.%s for [%s]", low["state"], low["fun"], - low["name"].strip() if isinstance(low["name"], str) else low["name"], + log_low_name, ) if "provider" in low: @@ -2705,35 +2460,21 @@ class State: validated_retry_data = retry_defaults return validated_retry_data - def call_chunks(self, chunks): + def call_chunks( + self, + chunks: Sequence[LowChunk], + disabled_states: Optional[dict[str, dict[str, Any]]] = None, + ) -> dict[str, Any]: """ Iterate over a list of chunks and call them, checking for requires. """ - # Check for any disabled states - disabled = {} - if "state_runs_disabled" in self.opts["grains"]: - for low in chunks[:]: - state_ = "{}.{}".format(low["state"], low["fun"]) - for pat in self.opts["grains"]["state_runs_disabled"]: - if fnmatch.fnmatch(state_, pat): - comment = ( - 'The state function "{0}" is currently disabled by "{1}", ' - "to re-enable, run state.enable {1}.".format( - state_, - pat, - ) - ) - _tag = _gen_tag(low) - disabled[_tag] = { - "changes": {}, - "result": False, - "comment": comment, - "__run_num__": self.__run_num, - "__sls__": low["__sls__"], - } - self.__run_num += 1 - chunks.remove(low) - break + if disabled_states is None: + # Check for any disabled states + disabled = {} + for chunk in chunks: + self._check_disabled(chunk, disabled) + else: + disabled = disabled_states running = {} for low in chunks: if "__FAILHARD__" in running: @@ -2748,7 +2489,6 @@ class State: running = self.call_chunk(low, running, chunks) if self.check_failhard(low, running): return running - self.active = set() while True: if self.reconcile_procs(running): break @@ -2756,7 +2496,7 @@ class State: ret = dict(list(disabled.items()) + list(running.items())) return ret - def check_failhard(self, low, running): + def check_failhard(self, low: LowChunk, running: dict[str, dict]): """ Check if the low data chunk should send a failhard signal """ @@ -2769,7 +2509,7 @@ class State: return not running[tag]["result"] return False - def check_pause(self, low): + def check_pause(self, low: LowChunk) -> Optional[str]: """ Check to see if this low chunk has been paused """ @@ -2816,7 +2556,7 @@ class State: return "run" return "run" - def reconcile_procs(self, running): + def reconcile_procs(self, running: dict) -> bool: """ Check the running dict for processes and resolve them """ @@ -2853,121 +2593,19 @@ class State: retset.add(False) return False not in retset - def check_requisite(self, low, running, chunks, pre=False): + def _check_requisites(self, low: LowChunk, running: dict[str, dict[str, Any]]): """ Look into the running data to check the status of all requisite - states + states. """ - disabled_reqs = self.opts.get("disabled_requisites", []) - if not isinstance(disabled_reqs, list): - disabled_reqs = [disabled_reqs] - present = False - # If mod_watch is not available make it a require - if "watch" in low: - if "{}.mod_watch".format(low["state"]) not in self.states: - if "require" in low: - low["require"].extend(low.pop("watch")) - else: - low["require"] = low.pop("watch") - else: - present = True - if "watch_any" in low: - if "{}.mod_watch".format(low["state"]) not in self.states: - if "require_any" in low: - low["require_any"].extend(low.pop("watch_any")) - else: - low["require_any"] = low.pop("watch_any") - else: - present = True - if "require" in low: - present = True - if "require_any" in low: - present = True - if "prerequired" in low: - present = True - if "prereq" in low: - present = True - if "onfail" in low: - present = True - if "onfail_any" in low: - present = True - if "onfail_all" in low: - present = True - if "onchanges" in low: - present = True - if "onchanges_any" in low: - present = True - if not present: - return "met", () - self.reconcile_procs(running) - reqs = { - "require": [], - "require_any": [], - "watch": [], - "watch_any": [], - "prereq": [], - "onfail": [], - "onfail_any": [], - "onfail_all": [], - "onchanges": [], - "onchanges_any": [], - } - if pre: - reqs["prerequired"] = [] - for r_state in reqs: - if r_state in low and low[r_state] is not None: - if r_state in disabled_reqs: - log.warning( - "The %s requisite has been disabled, Ignoring.", r_state - ) - continue - for req in low[r_state]: - if isinstance(req, str): - req = {"id": req} - req = trim_req(req) - found = False - req_key = next(iter(req)) - req_val = req[req_key] - if req_val is None: - continue - for chunk in chunks: - if req_key == "sls": - # Allow requisite tracking of entire sls files - if fnmatch.fnmatch( - chunk["__sls__"], req_val - ) or req_val in chunk.get("__sls_included_from__", []): - found = True - reqs[r_state].append(chunk) - continue - try: - if isinstance(req_val, str): - if fnmatch.fnmatch( - chunk["name"], req_val - ) or fnmatch.fnmatch(chunk["__id__"], req_val): - if req_key == "id" or chunk["state"] == req_key: - found = True - reqs[r_state].append(chunk) - else: - raise KeyError - except KeyError as exc: - raise SaltRenderError( - "Could not locate requisite of [{}] present in state" - " with name [{}]".format(req_key, chunk["name"]) - ) - except TypeError: - # On Python 2, the above req_val, being an HashableOrderedDict, will raise a KeyError, - # however on Python 3 it will raise a TypeError - # This was found when running tests.unit.test_state.StateCompilerTestCase.test_render_error_on_invalid_requisite - raise SaltRenderError( - "Could not locate requisite of [{}] present in state" - " with name [{}]".format(req_key, chunk["name"]) - ) - if not found: - return "unmet", () + reqs = {} + for req_type, chunk in self.dependency_dag.get_dependencies(low): + reqs.setdefault(req_type, []).append(chunk) fun_stats = set() - for r_state, chunks in reqs.items(): + for r_type, chunks in reqs.items(): req_stats = set() - if r_state.startswith("prereq") and not r_state.startswith("prerequired"): + r_type_base = re.sub(r"_any$|_all$", "", r_type) + if r_type_base == RequisiteType.PREREQ.value: run_dict = self.pre else: run_dict = running @@ -2996,7 +2634,7 @@ class State: # change or running mod_watch if run_dict[tag].get("skip_req"): req_stats.add("skip_req") - if r_state.startswith("onfail"): + if r_type_base == RequisiteType.ONFAIL.value: if run_dict[tag]["result"] is True: req_stats.add("onfail") # At least one state is OK continue @@ -3004,25 +2642,27 @@ class State: if run_dict[tag]["result"] is False: req_stats.add("fail") continue - if r_state.startswith("onchanges"): + if r_type_base == RequisiteType.ONCHANGES.value: if not run_dict[tag]["changes"]: req_stats.add("onchanges") else: req_stats.add("onchangesmet") continue - if r_state.startswith("watch") and run_dict[tag]["changes"]: + if ( + r_type_base == RequisiteType.WATCH.value + and run_dict[tag]["changes"] + ): req_stats.add("change") continue - if r_state.startswith("prereq") and run_dict[tag]["result"] is None: - if not r_state.startswith("prerequired"): + if r_type_base == RequisiteType.PREREQ.value: + if run_dict[tag]["result"] is None: req_stats.add("premet") - if r_state.startswith("prereq") and not run_dict[tag]["result"] is None: - if not r_state.startswith("prerequired"): + else: req_stats.add("pre") else: if run_dict[tag].get("__state_ran__", True): req_stats.add("met") - if r_state.endswith("_any") or r_state == "onfail": + if r_type.endswith("_any") or r_type == RequisiteType.ONFAIL.value: if "met" in req_stats or "change" in req_stats: if "fail" in req_stats: req_stats.remove("fail") @@ -3035,7 +2675,7 @@ class State: # a met requisite in this case implies a success if "met" in req_stats: req_stats.remove("onfail") - if r_state.endswith("_all"): + if r_type.endswith("_all") and "onfail" in req_stats: if "onfail" in req_stats: # a met requisite in this case implies a failure if "met" in req_stats: @@ -3068,7 +2708,9 @@ class State: return status, reqs - def event(self, chunk_ret, length, fire_event=False): + def event( + self, chunk_ret: dict, length: int, fire_event: Union[bool, str] = False + ) -> None: """ Fire an event on the master bus @@ -3097,7 +2739,7 @@ class State: else: ev_func = self.functions["event.fire_master"] - ret = {"ret": chunk_ret} + ret: dict[str, Any] = {"ret": chunk_ret} if fire_event is True: tag = salt.utils.event.tagify( [self.jid, self.opts["id"], str(chunk_ret["name"])], @@ -3117,167 +2759,29 @@ class State: preload = {"jid": self.jid} ev_func(ret, tag, preload=preload) - def call_chunk(self, low, running, chunks, depth=0): + def call_chunk( + self, + low: LowChunk, + running: dict[str, dict], + chunks: Sequence[LowChunk], + depth: int = 0, + ) -> dict[str, dict]: """ - Check if a chunk has any requires, execute the requires and then - the chunk + Execute the chunk if the requisites did not fail """ - low = self._mod_aggregate(low, running, chunks) + if self.dependency_dag.dag: + low = self._mod_aggregate(low, running) + elif chunks: + raise SaltRenderError( + "call_chunk called with multiple chunks but order_chunks was not called." + " order_chunks must be called first when there are multiple chunks." + ) self._mod_init(low) tag = _gen_tag(low) - if not low.get("prerequired"): - self.active.add(tag) - requisites = [ - "require", - "require_any", - "watch", - "watch_any", - "prereq", - "onfail", - "onfail_any", - "onchanges", - "onchanges_any", - ] - if not low.get("__prereq__"): - requisites.append("prerequired") - status, reqs = self.check_requisite(low, running, chunks, pre=True) - else: - status, reqs = self.check_requisite(low, running, chunks) + + status, reqs = self._check_requisites(low, running) if status == "unmet": - lost = {} - reqs = [] - for requisite in requisites: - lost[requisite] = [] - if requisite not in low: - continue - for req in low[requisite]: - if isinstance(req, str): - req = {"id": req} - req = trim_req(req) - found = False - req_key = next(iter(req)) - req_val = req[req_key] - for chunk in chunks: - if req_val is None: - continue - if req_key == "sls": - # Allow requisite tracking of entire sls files - if fnmatch.fnmatch( - chunk["__sls__"], req_val - ) or req_val in chunk.get("__sls_included_from__", []): - if requisite == "prereq": - chunk["__prereq__"] = True - reqs.append(chunk) - found = True - continue - if fnmatch.fnmatch(chunk["name"], req_val) or fnmatch.fnmatch( - chunk["__id__"], req_val - ): - if req_key == "id" or chunk["state"] == req_key: - if requisite == "prereq": - chunk["__prereq__"] = True - elif requisite == "prerequired": - chunk["__prerequired__"] = True - reqs.append(chunk) - found = True - if not found: - lost[requisite].append(req) - if ( - lost["require"] - or lost["watch"] - or lost["prereq"] - or lost["onfail"] - or lost["onchanges"] - or lost["require_any"] - or lost["watch_any"] - or lost["onfail_any"] - or lost["onchanges_any"] - or lost.get("prerequired") - ): - comment = "The following requisites were not found:\n" - for requisite, lreqs in lost.items(): - if not lreqs: - continue - comment += "{}{}:\n".format(" " * 19, requisite) - for lreq in lreqs: - req_key = next(iter(lreq)) - req_val = lreq[req_key] - comment += "{}{}: {}\n".format(" " * 23, req_key, req_val) - if low.get("__prereq__"): - run_dict = self.pre - else: - run_dict = running - start_time, duration = _calculate_fake_duration() - run_dict[tag] = { - "changes": {}, - "result": False, - "duration": duration, - "start_time": start_time, - "comment": comment, - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - run_dict[tag][key] = low.get(key) - self.__run_num += 1 - self.event(run_dict[tag], len(chunks), fire_event=low.get("fire_event")) - return running - for chunk in reqs: - # Check to see if the chunk has been run, only run it if - # it has not been run already - ctag = _gen_tag(chunk) - if ctag not in running: - if ctag in self.active: - if chunk.get("__prerequired__"): - # Prereq recusive, run this chunk with prereq on - if tag not in self.pre: - low["__prereq__"] = True - self.pre[ctag] = self.call(low, chunks, running) - return running - else: - return running - elif ctag not in running: - log.error("Recursive requisite found") - running[tag] = { - "changes": {}, - "result": False, - "comment": "Recursive requisite found", - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) - self.__run_num += 1 - self.event( - running[tag], len(chunks), fire_event=low.get("fire_event") - ) - return running - running = self.call_chunk(chunk, running, chunks) - if self.check_failhard(chunk, running): - running["__FAILHARD__"] = True - return running - if low.get("__prereq__"): - status, reqs = self.check_requisite(low, running, chunks) - self.pre[tag] = self.call(low, chunks, running) - if not self.pre[tag]["changes"] and status == "change": - self.pre[tag]["changes"] = {"watch": "watch"} - self.pre[tag]["result"] = None - else: - depth += 1 - # even this depth is being generous. This shouldn't exceed 1 no - # matter how the loops are happening - if depth >= 20: - log.error("Recursive requisite found") - running[tag] = { - "changes": {}, - "result": False, - "comment": "Recursive requisite found", - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) - else: - running = self.call_chunk(low, running, chunks, depth) - if self.check_failhard(chunk, running): - running["__FAILHARD__"] = True + if self._call_unmet_requisites(low, running, chunks, tag, depth): return running elif status == "met": if low.get("__prereq__"): @@ -3285,14 +2789,13 @@ class State: else: running[tag] = self.call(low, chunks, running) elif status == "skip_req": - running[tag] = { - "changes": {}, - "result": True, - "comment": "State was not run because requisites were skipped by another state", - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) + self._assign_not_run_result_dict( + low=low, + tag=tag, + result=True, + comment="State was not run because requisites were skipped by another state", + running=running, + ) elif status == "skip_watch" and not low.get("__prereq__"): ret = self.call(low, chunks, running) ret[ @@ -3314,9 +2817,10 @@ class State: running[tag]["__run_num__"] = self.__run_num for key in ("__sls__", "__id__", "name"): running[tag][key] = low.get(key) + self.__run_num += 1 # otherwise the failure was due to a requisite down the chain else: - # determine what the requisite failures where, and return + # determine what the requisite failures were, and return # a nice error message failed_requisites = set() # look at all requisite types for a failure @@ -3339,19 +2843,13 @@ class State: _cmt = "One or more requisite failed: {}".format( ", ".join(str(i) for i in failed_requisites) ) - start_time, duration = _calculate_fake_duration() - running[tag] = { - "changes": {}, - "result": False, - "duration": duration, - "start_time": start_time, - "comment": _cmt, - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) - self.pre[tag] = running[tag] - self.__run_num += 1 + self._assign_not_run_result_dict( + low=low, + tag=tag, + result=False, + comment=_cmt, + running=running, + ) elif status == "change" and not low.get("__prereq__"): ret = self.call(low, chunks, running) if not ret["changes"] and not ret.get("skip_watch", False): @@ -3362,57 +2860,38 @@ class State: ret = self.call(low, chunks, running) running[tag] = ret elif status == "pre": - start_time, duration = _calculate_fake_duration() - pre_ret = { - "changes": {}, - "result": True, - "duration": duration, - "start_time": start_time, - "comment": "No changes detected", - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - pre_ret[key] = low.get(key) - running[tag] = pre_ret - self.pre[tag] = pre_ret - self.__run_num += 1 + self._assign_not_run_result_dict( + low=low, + tag=tag, + result=True, + comment="No changes detected", + running=running, + ) elif status == "onfail": - start_time, duration = _calculate_fake_duration() - running[tag] = { - "changes": {}, - "result": True, - "duration": duration, - "start_time": start_time, - "comment": "State was not run because onfail req did not change", - "__state_ran__": False, - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) - self.__run_num += 1 + self._assign_not_run_result_dict( + low=low, + tag=tag, + result=True, + comment="State was not run because onfail req did not change", + running=running, + ) elif status == "onchanges": - start_time, duration = _calculate_fake_duration() - running[tag] = { - "changes": {}, - "result": True, - "duration": duration, - "start_time": start_time, - "comment": ( - "State was not run because none of the onchanges reqs changed" - ), - "__state_ran__": False, - "__run_num__": self.__run_num, - } - for key in ("__sls__", "__id__", "name"): - running[tag][key] = low.get(key) - self.__run_num += 1 + self._assign_not_run_result_dict( + low=low, + tag=tag, + result=True, + comment="State was not run because none of the onchanges reqs changed", + running=running, + ) else: if low.get("__prereq__"): self.pre[tag] = self.call(low, chunks, running) else: running[tag] = self.call(low, chunks, running) if tag in running: - self.event(running[tag], len(chunks), fire_event=low.get("fire_event")) + self.event( + running[tag], len(chunks), fire_event=low.get("fire_event", False) + ) for sub_state_data in running[tag].pop("sub_state_run", ()): start_time, duration = _calculate_fake_duration() @@ -3433,32 +2912,93 @@ class State: return running - def call_beacons(self, chunks, running): + def _assign_not_run_result_dict( + self, + low: LowChunk, + tag: str, + result: bool, + comment: str, + running: dict[str, dict], + ) -> None: + start_time, duration = _calculate_fake_duration() + ret = { + "changes": {}, + "result": result, + "duration": duration, + "start_time": start_time, + "comment": comment, + "__state_ran__": False, + "__run_num__": self.__run_num, + } + for key in ("__sls__", "__id__", "name"): + ret[key] = low.get(key) + running[tag] = ret + if low.get("__prereq__"): + self.pre[tag] = ret + self.__run_num += 1 + + def _call_unmet_requisites( + self, + low: LowChunk, + running: dict[str, dict], + chunks: Sequence[LowChunk], + tag: str, + depth: int, + ) -> dict[str, dict]: + for _, chunk in self.dependency_dag.get_dependencies(low): + # Check to see if the chunk has been run, only run it if + # it has not been run already + ctag = _gen_tag(chunk) + if ctag not in running: + running = self.call_chunk(chunk, running, chunks) + if self.check_failhard(chunk, running): + running["__FAILHARD__"] = True + return running + if low.get("__prereq__"): + status, _ = self._check_requisites(low, running) + self.pre[tag] = self.call(low, chunks, running) + if not self.pre[tag]["changes"] and status == "change": + self.pre[tag]["changes"] = {"watch": "watch"} + self.pre[tag]["result"] = None + else: + depth += 1 + # even this depth is being generous. This shouldn't exceed 1 no + # matter how the loops are happening + if depth >= 20: + log.error("Recursive requisite found") + running[tag] = { + "changes": {}, + "result": False, + "comment": "Recursive requisite found", + "__run_num__": self.__run_num, + } + for key in ("__sls__", "__id__", "name"): + running[tag][key] = low.get(key) + else: + running = self.call_chunk(low, running, chunks, depth) + if self.check_failhard(low, running): + running["__FAILHARD__"] = True + return running + return {} + + def call_beacons(self, chunks: Iterable[LowChunk], running: dict) -> dict: """ Find all of the beacon routines and call the associated mod_beacon runs """ - beacons = [] - for chunk in chunks: - if "beacon" in chunk: - beacons.append(chunk) + beacons = (chunk for chunk in chunks if "beacon" in chunk) mod_beacons = [] - errors = {} for chunk in beacons: low = chunk.copy() low["sfun"] = chunk["fun"] low["fun"] = "mod_beacon" - low["__id__"] = "beacon_{}".format(low["__id__"]) + low["__id__"] = f"beacon_{low['__id__']}" mod_beacons.append(low) ret = self.call_chunks(mod_beacons) running.update(ret) - for err in errors: - errors[err]["__run_num__"] = self.__run_num - self.__run_num += 1 - running.update(errors) return running - def call_listen(self, chunks, running): + def call_listen(self, chunks: Iterable[LowChunk], running: dict) -> dict: """ Find all of the listen routines and call the associated mod_watch runs """ @@ -3552,6 +3092,9 @@ class State: for req in STATE_REQUISITE_KEYWORDS: if req in low: low.pop(req) + self.dependency_dag.add_chunk( + low, self._allow_aggregate(low, {}) + ) mod_watchers.append(low) ret = self.call_chunks(mod_watchers) running.update(ret) @@ -3561,7 +3104,9 @@ class State: running.update(errors) return running - def call_high(self, high, orchestration_jid=None): + def call_high( + self, high: HighData, orchestration_jid: Union[str, int, None] = None + ) -> Union[dict, list]: """ Process a high data call and ensure the defined states. """ @@ -3579,13 +3124,13 @@ class State: if errors: return errors # Compile and verify the raw chunks - chunks = self.compile_high_data(high, orchestration_jid) - - # If there are extensions in the highstate, process them and update - # the low data chunks + chunks, errors = self.compile_high_data(high, orchestration_jid) if errors: return errors - ret = self.call_chunks(chunks) + # If there are extensions in the highstate, process them and update + # the low data chunks + + ret = self.call_chunks(chunks, disabled_states=self.disabled_states) ret = self.call_listen(chunks, ret) ret = self.call_beacons(chunks, ret) @@ -3739,16 +3284,16 @@ class LazyAvailStates: The LazyAvailStates lazily loads the list of states of available environments. - This is particularly usefull when top_file_merging_strategy=same and there + This is particularly useful when top_file_merging_strategy=same and there are many environments. """ - def __init__(self, hs): + def __init__(self, hs: BaseHighState): self._hs = hs - self._avail = {"base": None} + self._avail: dict[Hashable, Optional[list[str]]] = {"base": None} self._filled = False - def _fill(self): + def _fill(self) -> None: if self._filled: return for saltenv in self._hs._get_envs(): @@ -3756,24 +3301,23 @@ class LazyAvailStates: self._avail[saltenv] = None self._filled = True - def __contains__(self, saltenv): + def __contains__(self, saltenv: Hashable) -> bool: if saltenv == "base": return True self._fill() return saltenv in self._avail - def __getitem__(self, saltenv): + def __getitem__(self, saltenv: Hashable) -> list[str]: if saltenv != "base": self._fill() - if saltenv not in self._avail or self._avail[saltenv] is None: - self._avail[saltenv] = self._hs.client.list_states(saltenv) - return self._avail[saltenv] + if (states := self._avail.get(saltenv)) is None: + states = self._hs.client.list_states(saltenv) + self._avail[saltenv] = states + return states def items(self): self._fill() - ret = [] - for saltenv in self._avail: - ret.append((saltenv, self[saltenv])) + ret = [(saltenv, self[saltenv]) for saltenv, _ in self._avail.items()] return ret @@ -3786,6 +3330,10 @@ class BaseHighState: ``self.matcher`` should be instantiated and handled. """ + client: salt.fileclient.RemoteClient + matchers: Mapping[str, Callable] + state: State + def __init__(self, opts): self.opts = self.__gen_opts(opts) self.iorder = 10000 @@ -4749,9 +4297,7 @@ class BaseHighState: this_sls = f"SLS {sls_match} in saltenv" if this_sls in error: errors[i] = ( - "No matching sls found for '{}' in env '{}'".format( - sls_match, saltenv - ) + f"No matching sls found for '{sls_match}' in env '{saltenv}'" ) all_errors.extend(errors) @@ -4929,8 +4475,10 @@ class BaseHighState: def compile_low_chunks(self, context=None): """ - Compile the highstate but don't run it, return the low chunks to - see exactly what the highstate will execute + Compile the highstate but don't run it + + If there are errors return the errors otherwise return + the low chunks to see exactly what the highstate will execute """ top = self.get_top(context=context) matches = self.top_matches(top) @@ -4950,8 +4498,9 @@ class BaseHighState: return errors # Compile and verify the raw chunks - chunks = self.state.compile_high_data(high) - + chunks, errors = self.state.compile_high_data(high) + if errors: + return errors return chunks def compile_state_usage(self): diff --git a/salt/states/pkg.py b/salt/states/pkg.py index da31436376b..ff872153d23 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py @@ -3597,8 +3597,6 @@ def mod_aggregate(low, chunks, running): The mod_aggregate function which looks up all packages in the available low chunks and merges them into a single pkgs ref in the present low data """ - pkgs = [] - pkg_type = None agg_enabled = [ "installed", "latest", @@ -3607,6 +3605,9 @@ def mod_aggregate(low, chunks, running): ] if low.get("fun") not in agg_enabled: return low + is_sources = "sources" in low + # use a dict instead of a set to maintain insertion order + pkgs = {} for chunk in chunks: tag = __utils__["state.gen_tag"](chunk) if tag in running: @@ -3621,42 +3622,52 @@ def mod_aggregate(low, chunks, running): # Check for the same repo if chunk.get("fromrepo") != low.get("fromrepo"): continue + # If hold exists in the chunk, do not add to aggregation + # otherwise all packages will be held or unheld. + # setting a package to be held/unheld is not as + # time consuming as installing/uninstalling. + if "hold" in chunk: + continue # Check first if 'sources' was passed so we don't aggregate pkgs # and sources together. - if "sources" in chunk: - if pkg_type is None: - pkg_type = "sources" - if pkg_type == "sources": - pkgs.extend(chunk["sources"]) + if is_sources and "sources" in chunk: + _combine_pkgs(pkgs, chunk["sources"]) + chunk["__agg__"] = True + elif not is_sources: + # Pull out the pkg names! + if "pkgs" in chunk: + _combine_pkgs(pkgs, chunk["pkgs"]) chunk["__agg__"] = True - else: - # If hold exists in the chunk, do not add to aggregation - # otherwise all packages will be held or unheld. - # setting a package to be held/unheld is not as - # time consuming as installing/uninstalling. - if "hold" not in chunk: - if pkg_type is None: - pkg_type = "pkgs" - if pkg_type == "pkgs": - # Pull out the pkg names! - if "pkgs" in chunk: - pkgs.extend(chunk["pkgs"]) - chunk["__agg__"] = True - elif "name" in chunk: - version = chunk.pop("version", None) - if version is not None: - pkgs.append({chunk["name"]: version}) - else: - pkgs.append(chunk["name"]) - chunk["__agg__"] = True - if pkg_type is not None and pkgs: - if pkg_type in low: - low[pkg_type].extend(pkgs) - else: - low[pkg_type] = pkgs + elif "name" in chunk: + version = chunk.pop("version", None) + pkgs.setdefault(chunk["name"], set()).add(version) + chunk["__agg__"] = True + if pkgs: + pkg_type = "sources" if is_sources else "pkgs" + low_pkgs = {} + _combine_pkgs(low_pkgs, low.get(pkg_type, [])) + for pkg, values in pkgs.items(): + low_pkgs.setdefault(pkg, {None}).update(values) + # the value is the version for pkgs and + # the URI for sources + low_pkgs_list = [ + name if value is None else {name: value} + for name, values in pkgs.items() + for value in values + ] + low[pkg_type] = low_pkgs_list return low +def _combine_pkgs(pkgs_dict, additional_pkgs_list): + for item in additional_pkgs_list: + if isinstance(item, str): + pkgs_dict.setdefault(item, {None}) + else: + for pkg, version in item: + pkgs_dict.setdefault(pkg, {None}).add(version) + + def mod_watch(name, **kwargs): """ Install/reinstall a package based on a watch requisite diff --git a/salt/states/test.py b/salt/states/test.py index bdded67a16e..4b1770983aa 100644 --- a/salt/states/test.py +++ b/salt/states/test.py @@ -352,7 +352,7 @@ def mod_watch(name, sfun=None, **kwargs): """ has_changes = [] if "__reqs__" in __low__: - for req in __low__["__reqs__"]["watch"]: + for req in __low__["__reqs__"].get("watch", []): tag = _gen_tag(req) if __running__[tag]["changes"]: has_changes.append("{state}: {__id__}".format(**req)) diff --git a/salt/thorium/__init__.py b/salt/thorium/__init__.py index 6dffc972a07..82351e44858 100644 --- a/salt/thorium/__init__.py +++ b/salt/thorium/__init__.py @@ -137,7 +137,10 @@ class ThorState(salt.state.HighState): err += self.state.verify_high(high) if err: raise SaltRenderError(err) - return self.state.compile_high_data(high) + chunks, errors = self.state.compile_high_data(high) + if errors: + raise SaltRenderError(errors) + return chunks def get_events(self): """ diff --git a/salt/utils/event.py b/salt/utils/event.py index e3132f0b7c8..f99f83a0bf5 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -59,7 +59,7 @@ import hashlib import logging import os import time -from collections.abc import MutableMapping +from collections.abc import Iterable, MutableMapping import tornado.ioloop import tornado.iostream @@ -186,17 +186,23 @@ def tagify(suffix="", prefix="", base=SALT): """ parts = [base, TAGS.get(prefix, prefix)] - if hasattr(suffix, "append"): # list so extend parts + if isinstance(suffix, Iterable) and not isinstance( + suffix, str + ): # list so extend parts parts.extend(suffix) else: # string so append parts.append(suffix) - for index, _ in enumerate(parts): + str_parts = [] + for part in parts: + part_str = None try: - parts[index] = salt.utils.stringutils.to_str(parts[index]) + part_str = salt.utils.stringutils.to_str(part) except TypeError: - parts[index] = str(parts[index]) - return TAGPARTER.join([part for part in parts if part]) + part_str = str(part) + if part_str: + str_parts.append(part_str) + return TAGPARTER.join(str_parts) class SaltEvent: diff --git a/salt/utils/reactor.py b/salt/utils/reactor.py index 0229738ec3c..5b9138636b2 100644 --- a/salt/utils/reactor.py +++ b/salt/utils/reactor.py @@ -188,7 +188,16 @@ class Reactor(salt.utils.process.SignalHandlingProcess, salt.state.Compiler): reactors, ) return [] # We'll return nothing since there was an error - chunks = self.order_chunks(self.compile_high_data(high)) + chunks, errors = self.compile_high_data(high) + if errors: + log.error( + "Unable to render reactions for event %s due to " + "errors (%s) in one or more of the sls files (%s)", + tag, + errors, + reactors, + ) + return [] # We'll return nothing since there was an error except Exception as exc: # pylint: disable=broad-except log.exception("Exception encountered while compiling reactions") diff --git a/salt/utils/requisite.py b/salt/utils/requisite.py new file mode 100644 index 00000000000..b353793cf69 --- /dev/null +++ b/salt/utils/requisite.py @@ -0,0 +1,469 @@ +""" +The classes and functions in this module are related to requisite +handling and ordering of chunks by the State compiler +""" + +from __future__ import annotations + +import fnmatch +import logging +import sys +from collections import defaultdict +from collections.abc import Generator, Iterable, Sequence +from enum import Enum, auto +from typing import TYPE_CHECKING, Any + +import networkx as nx # pylint: disable=3rd-party-module-not-gated + +log = logging.getLogger(__name__) + +# See https://docs.saltproject.io/en/latest/ref/states/layers.html for details on the naming +LowChunk = dict[str, Any] + +if TYPE_CHECKING or sys.version_info >= (3, 10): + staticmethod_hack = staticmethod +else: + # Python < 3.10 does not support calling static methods directly from the class body + # as is the case with enum _generate_next_value_. + # Since @staticmethod is only added for static type checking, substitute a dummy decorator. + def staticmethod_hack(f): + return f + + +def _gen_tag(low: LowChunk) -> str: + """ + Generate the a unique identifer tag string from the low data structure + """ + return "{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}".format(low) + + +def trim_req(req: dict[str, Any]) -> dict[str, Any]: + """ + Trim any function off of a requisite reference + """ + reqfirst, valfirst = next(iter(req.items())) + if "." in reqfirst: + return {reqfirst.split(".", maxsplit=1)[0]: valfirst} + return req + + +class RequisiteType(str, Enum): + """Types of direct requisites""" + + # Once salt no longer needs to support python < 3.10, + # remove this hack and use @staticmethod + @staticmethod_hack + def _generate_next_value_( + name: str, start: int, count: int, last_values: list[Any] + ) -> tuple[str, int]: + return name.lower(), count + + def __new__(cls, value, weight): + member = str.__new__(cls, value) + member._value_ = value + member.weight = weight + return member + + def __init__(self, value, weight): + super().__init__() + self._value_ = value + self.weight = weight + + # The items here are listed in order of precedence for determining + # the order of execution, so do not change the order unless you + # are intentionally changing the precedence + ONFAIL = auto() + ONFAIL_ANY = auto() + ONFAIL_ALL = auto() + REQUIRE = auto() + REQUIRE_ANY = auto() + ONCHANGES = auto() + ONCHANGES_ANY = auto() + WATCH = auto() + WATCH_ANY = auto() + PREREQ = auto() + PREREQUIRED = auto() + LISTEN = auto() + + +class DependencyGraph: + """ + Class used to track dependencies (requisites) among salt states. + + This class utilizes a Directed Acyclic Graph to determine the + ordering of states. The nodes represent the individual states that + can be depended on and edges represent the types of requisites + between the states. + """ + + def __init__(self) -> None: + self.dag = nx.MultiDiGraph() + # a mapping to node_id to be able to find nodes with + # specific state type (module name), names, and/or IDs + self.nodes_lookup_map: dict[tuple[str, str], set[str]] = {} + self.sls_to_nodes: dict[str, set[str]] = {} + + def _add_prereq(self, node_tag: str, req_tag: str): + # the prerequiring chunk is the state declaring the prereq + # requisite; the prereq/prerequired state is the one that is + # declared in the requisite prereq statement + self.dag.nodes[node_tag]["chunk"]["__prerequiring__"] = True + prereq_chunk = self.dag.nodes[req_tag]["chunk"] + # set __prereq__ true to run the state in test mode + prereq_chunk["__prereq__"] = True + prereq_check_node = self._get_prereq_node_tag(req_tag) + if not self.dag.nodes.get(prereq_check_node): + self.dag.add_node( + prereq_check_node, chunk=prereq_chunk, state=prereq_chunk["state"] + ) + # all the dependencies of the node for the prerequired + # chunk also need to be applied to its prereq check node + for dependency_node, _, req_type, data in self.dag.in_edges( + req_tag, data=True, keys=True + ): + if req_type != RequisiteType.PREREQ: + self.dag.add_edge( + dependency_node, prereq_check_node, req_type, **data + ) + self.dag.add_edge(prereq_check_node, node_tag, RequisiteType.PREREQ) + self.dag.add_edge(node_tag, req_tag, RequisiteType.REQUIRE) + + def _add_reqs( + self, + node_tag: str, + has_preq_node: bool, + req_type: RequisiteType, + req_tags: Iterable[str], + ) -> None: + for req_tag in req_tags: + if req_type == RequisiteType.PREREQ: + self._add_prereq(node_tag, req_tag) + else: + if has_preq_node: + # if the low chunk is set to run in test mode for a + # prereq check then also add the requisites to the + # prereq node. + prereq_node_tag = self._get_prereq_node_tag(node_tag) + self.dag.add_edge(req_tag, prereq_node_tag, key=req_type) + self.dag.add_edge(req_tag, node_tag, key=req_type) + + def _copy_edges(self, source: str, dest: str) -> None: + """Add the edges from source node to dest node""" + for dependency, _, req_type, data in self.dag.in_edges( + source, data=True, keys=True + ): + self.dag.add_edge(dependency, dest, req_type, **data) + for _, dependent, req_type, data in self.dag.out_edges( + source, data=True, keys=True + ): + self.dag.add_edge(dest, dependent, req_type, **data) + + def _get_chunk_order(self, cap: int, node: str) -> tuple[int | float, int | float]: + dag = self.dag + stack: list[tuple[str, bool, int | float, int | float]] = [ + # node, is_processing_children, child_min, req_order + (node, False, float("inf"), float("-inf")) + ] + order = cap + while stack: + node, is_processing_children, child_min, req_order = stack[-1] + node_data = dag.nodes[node] + chunk = node_data.get("chunk", {}) + if not is_processing_children: # initial stage + order = chunk.get("order") + if order is None or not isinstance(order, (int, float)): + if order == "last": + order = cap + 1000000 + elif order == "first": + order = 0 + else: + order = cap + chunk["order"] = order + name_order = chunk.pop("name_order", 0) + if name_order: + order += name_order / 10000.0 + chunk["order"] = order + if order < 0: + order += cap + 1000000 + chunk["order"] = order + stack.pop() + # update stage + stack.append((node, True, child_min, req_order)) + else: # after processing node + child_min_node = node_data.get("child_min") + if child_min_node is None: + for _, child, req_type in dag.out_edges(node, keys=True): + if req_order <= req_type.weight: + req_order = req_type.weight + child_order = ( + dag.nodes[child] + .get("chunk", {}) + .get("order", float("inf")) + ) + child_min = min(child_min, child_order) + node_data["child_min"] = child_min + if order > child_min: + order = child_min + stack.pop() + return (order, chunk["order"]) + + def _get_prereq_node_tag(self, low_tag: str): + return f"{low_tag}_|-__prereq_test__" + + def _is_fnmatch_pattern(self, value: str) -> bool: + return any(char in value for char in ("*", "?", "[", "]")) + + def _chunk_str(self, chunk: LowChunk) -> str: + node_dict = { + "SLS": chunk["__sls__"], + "ID": chunk["__id__"], + } + if chunk["__id__"] != chunk["name"]: + node_dict["NAME"] = chunk["name"] + return str(node_dict) + + def add_chunk(self, low: LowChunk, allow_aggregate: bool) -> None: + node_id = _gen_tag(low) + self.dag.add_node( + node_id, allow_aggregate=allow_aggregate, chunk=low, state=low["state"] + ) + self.nodes_lookup_map.setdefault((low["state"], low["name"]), set()).add( + node_id + ) + self.nodes_lookup_map.setdefault((low["state"], low["__id__"]), set()).add( + node_id + ) + self.nodes_lookup_map.setdefault(("id", low["__id__"]), set()).add(node_id) + self.nodes_lookup_map.setdefault(("id", low["name"]), set()).add(node_id) + if sls := low.get("__sls__"): + self.sls_to_nodes.setdefault(sls, set()).add(node_id) + if sls_included_from := low.get("__sls_included_from__"): + for sls in sls_included_from: + self.sls_to_nodes.setdefault(sls, set()).add(node_id) + + def add_dependency( + self, low: LowChunk, req_type: RequisiteType, req_key: str, req_val: str + ) -> bool: + found = False + prereq_tag = None + has_prereq_node = low.get("__prereq__", False) + if req_key == "sls": + # Allow requisite tracking of entire sls files + if self._is_fnmatch_pattern(req_val): + found = True + node_tag = _gen_tag(low) + for sls, req_tags in self.sls_to_nodes.items(): + if fnmatch.fnmatch(sls, req_val): + found = True + self._add_reqs(node_tag, has_prereq_node, req_type, req_tags) + else: + node_tag = _gen_tag(low) + if req_tags := self.sls_to_nodes.get(req_val, []): + found = True + self._add_reqs(node_tag, has_prereq_node, req_type, req_tags) + elif self._is_fnmatch_pattern(req_val): + # This iterates over every chunk to check + # if any match instead of doing a look up since + # it has to support wildcard matching. + node_tag = _gen_tag(low) + for (state_type, name_or_id), req_tags in self.nodes_lookup_map.items(): + if req_key == state_type and (fnmatch.fnmatch(name_or_id, req_val)): + found = True + self._add_reqs(node_tag, has_prereq_node, req_type, req_tags) + elif req_tags := self.nodes_lookup_map.get((req_key, req_val)): + found = True + node_tag = _gen_tag(low) + self._add_reqs(node_tag, has_prereq_node, req_type, req_tags) + return found + + def add_requisites(self, low: LowChunk, disabled_reqs: Sequence[str]) -> str | None: + """ + Add all the dependency requisites of the low chunk as edges to the DAG + :return: an error string if there was an error otherwise None + """ + present = False + for req_type in RequisiteType: + if req_type.value in low: + present = True + break + if not present: + return None + reqs = { + rtype: [] + for rtype in ( + RequisiteType.REQUIRE, + RequisiteType.REQUIRE_ANY, + RequisiteType.WATCH, + RequisiteType.WATCH_ANY, + RequisiteType.PREREQ, + RequisiteType.ONFAIL, + RequisiteType.ONFAIL_ANY, + RequisiteType.ONFAIL_ALL, + RequisiteType.ONCHANGES, + RequisiteType.ONCHANGES_ANY, + ) + } + for r_type in reqs: + if low_reqs := low.get(r_type.value): + if r_type in disabled_reqs: + log.warning("The %s requisite has been disabled, Ignoring.", r_type) + continue + for req_ref in low_reqs: + if isinstance(req_ref, str): + req_ref = {"id": req_ref} + req_ref = trim_req(req_ref) + # req_key: match state module name + # req_val: match state id or name + req_key, req_val = next(iter(req_ref.items())) + if req_val is None: + continue + if not isinstance(req_val, str): + return ( + f"Requisite [{r_type}: {req_key}] in state" + f" [{low['name']}] in SLS [{low.get('__sls__')}]" + " must have a string as the value" + ) + found = self.add_dependency(low, r_type, req_key, req_val) + if not found: + return ( + "Referenced state does not exist" + f" for requisite [{r_type}: ({req_key}: {req_val})] in state" + f" [{low['name']}] in SLS [{low.get('__sls__')}]" + ) + return None + + def aggregate_and_order_chunks(self, cap: int) -> list[LowChunk]: + """ + Aggregate eligible nodes in the dependencies graph. + + Return a list of the chunks in the sorted order in which the + chunks should be executed. + Nodes are eligible for aggregation if the state function in the + chunks match and aggregation is enabled in the configuration for + the state function. + :param cap: the maximum order value configured in the states + :return: the ordered chunks + """ + dag: nx.MultiDiGraph = self.dag + # dict for tracking topo order and for mapping each node that + # was aggregated to the aggregated node that replaces it + topo_order = {} + + max_group_size = 500 + groups_by_type = defaultdict(list) + + def _get_order(node): + chunk = dag.nodes[node].get("chunk", {}) + chunk_label = "{0[state]}{0[name]}{0[fun]}".format(chunk) if chunk else "" + chunk_order = self._get_chunk_order(cap, node) + return (chunk_order, chunk_label) + + # Iterate over the nodes in topological order to get the correct + # ordering which takes requisites into account + for node in nx.lexicographical_topological_sort(dag, key=_get_order): + topo_order[node] = None + data = dag.nodes[node] + if not data.get("allow_aggregate"): + continue + + node_type = data["state"] + added = False + for idx, group in enumerate(groups_by_type[node_type]): + if len(group) >= max_group_size: + continue + # Check if the node can be reached from any node in the group + first_node = next(iter(group)) + agg_node = topo_order.get(first_node) + # Since we are iterating in topological order we know + # that there is no path from the current node to the + # node in the group; so we only need to check the path + # from the group node to the current node + reachable = nx.has_path(dag, agg_node or first_node, node) + if not reachable: + # If not, add the node to the group + if agg_node is None: + # there is now more than one node for this + # group so aggregate them + agg_node = f"__aggregate_{node_type}_{idx}__" + dag.add_node( + agg_node, state=node_type, aggregated_nodes=group.keys() + ) + # add the edges of the first node in the group to + # the aggregate + self._copy_edges(first_node, agg_node) + dag.nodes[first_node]["aggregate"] = agg_node + topo_order[first_node] = agg_node + + self._copy_edges(node, agg_node) + dag.nodes[node]["aggregate"] = agg_node + topo_order[node] = agg_node + group[node] = None + added = True + break + + # If the node was not added to any set, create a new set + if not added: + # use a dict instead of set to retain insertion ordering + groups_by_type[node_type].append({node: None}) + + ordered_chunks = [dag.nodes[node].get("chunk", {}) for node in topo_order] + return ordered_chunks + + def find_cycle_edges(self) -> list[tuple[LowChunk, RequisiteType, LowChunk]]: + """ + Find the cycles if the graph is not a Directed Acyclic Graph + """ + dag = self.dag + try: + cycle_edges = [] + for dependency, dependent, req_type in nx.find_cycle(dag): + dependency_chunk = self.dag.nodes[dependency]["chunk"] + dependent_chunk = self.dag.nodes[dependent]["chunk"] + if ( + req_type not in dependent_chunk + and req_type == RequisiteType.REQUIRE + ): + # show the original prereq requisite for the require edges + # added for the prereq + req_type = RequisiteType.PREREQ + cycle_edges.append((dependent_chunk, req_type, dependency_chunk)) + return cycle_edges + except nx.NetworkXNoCycle: + # If the graph is a DAG, return an empty list + return [] + + def get_aggregate_chunks(self, low: LowChunk) -> list[LowChunk]: + """ + Get the chunks that were set to be valid for aggregation with + this low chunk. + """ + low_tag = _gen_tag(low) + if aggregate_node := self.dag.nodes[low_tag].get("aggregate"): + return [ + self.dag.nodes[node]["chunk"] + for node in self.dag.nodes[aggregate_node]["aggregated_nodes"] + ] + return [] + + def get_cycles_str(self) -> str: + cycle_edges = [ + f"({self._chunk_str(dependency)}, '{req_type.value}', {self._chunk_str(dependent)})" + for dependency, req_type, dependent in self.find_cycle_edges() + ] + return ", ".join(cycle_edges) + + def get_dependencies( + self, low: LowChunk + ) -> Generator[tuple[RequisiteType, LowChunk], None, None]: + """Get the requisite type and low chunk for each dependency of low""" + low_tag = _gen_tag(low) + if low.get("__prereq__"): + # if the low chunk is set to run in test mode for a + # prereq check then return the reqs for prereq test node. + low_tag = self._get_prereq_node_tag(low_tag) + for req_id, _, req_type in self.dag.in_edges(low_tag, keys=True): + if chunk := self.dag.nodes[req_id].get("chunk"): + yield req_type, chunk + else: + for node in self.dag.nodes[req_id]["aggregated_nodes"]: + yield req_type, self.dag.nodes[node].get("chunk") diff --git a/salt/utils/thin.py b/salt/utils/thin.py index 8596253749e..691f44076d1 100644 --- a/salt/utils/thin.py +++ b/salt/utils/thin.py @@ -23,6 +23,7 @@ import distro import jinja2 import looseversion import msgpack +import networkx import packaging import tornado import yaml @@ -280,6 +281,7 @@ def get_tops_python(py_ver, exclude=None, ext_py_ver=None): "yaml", "tornado", "msgpack", + "networkx", "certifi", "singledispatch", "concurrent", @@ -330,7 +332,7 @@ def get_ext_tops(config): """ config = copy.deepcopy(config) or {} alternatives = {} - required = ["jinja2", "yaml", "tornado", "msgpack"] + required = ["jinja2", "yaml", "tornado", "msgpack", "networkx"] tops = [] for ns, cfg in config.items(): alternatives[ns] = cfg @@ -429,6 +431,7 @@ def get_tops(extra_mods="", so_mods=""): yaml, tornado, msgpack, + networkx, certifi, singledispatch, concurrent, @@ -1035,6 +1038,7 @@ def gen_min( "salt/utils/process.py", "salt/utils/jinja.py", "salt/utils/rsax931.py", + "salt/utils/requisite.py", "salt/utils/context.py", "salt/utils/minion.py", "salt/utils/error.py", diff --git a/tests/pytests/functional/modules/state/requisites/test_aggregate.py b/tests/pytests/functional/modules/state/requisites/test_aggregate.py new file mode 100644 index 00000000000..08131202aff --- /dev/null +++ b/tests/pytests/functional/modules/state/requisites/test_aggregate.py @@ -0,0 +1,123 @@ +import textwrap + +import pytest + + +@pytest.fixture(scope="module") +def minion_config_overrides(): + return { + "file_client": "local", + "master_type": "disable", + "state_aggregate": True, + } + + +@pytest.fixture(scope="module", autouse=True) +def nop_aggregate_mod(loaders, state_tree): + mod_contents = textwrap.dedent( + """ + __virtualname__ = "aggr" + + + def __virtual__(): + return __virtualname__ + + + def test(name, aggrs=None, **kwargs): + return { + "name": name, + "result": True, + "comment": "", + "changes": { + "aggrs": aggrs or [name] + }, + } + + + def mod_aggregate(low, chunks, running): + # modeled after the pkg state module + aggrs = [] + for chunk in chunks: + tag = __utils__["state.gen_tag"](chunk) + if tag in running: + # Already ran + continue + if chunk.get("state") == "aggr": + if "__agg__" in chunk: + continue + # Check for the same function + if chunk.get("fun") != low.get("fun"): + continue + + if "aggrs" in chunk: + aggrs.extend(chunk["aggrs"]) + chunk["__agg__"] = True + elif "name" in chunk: + aggrs.append(chunk["name"]) + chunk["__agg__"] = True + if aggrs: + if "aggrs" in low: + low["aggrs"].extend(aggrs) + else: + low["aggrs"] = aggrs + return low + """ + ) + with pytest.helpers.temp_file("aggrs.py", mod_contents, state_tree / "_states"): + res = loaders.modules.saltutil.sync_all() + assert "states" in res + assert "states.aggrs" in res["states"] + loaders.reload_all() + assert hasattr(loaders.states, "aggr") + yield + loaders.modules.saltutil.sync_all() + loaders.reload_all() + + +def test_aggregate_requisites(state_tree, modules): + """Test to ensure that aggregated states honor requisites""" + sls_name = "requisite_aggregate_test" + sls_contents = """ + "packages 1": + aggr.test: + - aggrs: + - hello + "listen to packages 2": + test.succeed_with_changes: + - listen: + - "packages 2" + "packages 2": + aggr: + - test + - aggrs: + - cowsay + - fortune-mod + - require: + - "requirement" + "packages 3": + aggr.test: + - name: cowsay + - require: + - "test": "requirement" + "requirement": + test.nop: + - name: "requirement_name" + """ + sls_tempfile = pytest.helpers.temp_file(f"{sls_name}.sls", sls_contents, state_tree) + with sls_tempfile: + # Apply the state file + ret = modules.state.apply(sls_name) + + # Check the results + assert not ret.failed + expected_order = [ + "aggr_|-packages 1_|-packages 1_|-test", + "test_|-listen to packages 2_|-listen to packages 2_|-succeed_with_changes", + "test_|-requirement_|-requirement_name_|-nop", + "aggr_|-packages 2_|-packages 2_|-test", + "aggr_|-packages 3_|-cowsay_|-test", + "test_|-listener_listen to packages 2_|-listen to packages 2_|-mod_watch", + ] + for index, state_run in enumerate(ret): + assert state_run.result is True + assert expected_order[index] in state_run.raw diff --git a/tests/pytests/functional/modules/state/requisites/test_mixed.py b/tests/pytests/functional/modules/state/requisites/test_mixed.py index 54aab4dcded..8a723051083 100644 --- a/tests/pytests/functional/modules/state/requisites/test_mixed.py +++ b/tests/pytests/functional/modules/state/requisites/test_mixed.py @@ -1,7 +1,5 @@ import pytest -from . import normalize_ret - pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.core_test, @@ -93,13 +91,21 @@ def test_requisites_mixed_require_prereq_use_1(state, state_tree): - prereq: - cmd: B """ + expected_result = [ + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), " + "({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'})" + ] with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - result = normalize_ret(ret.raw) - assert result == expected_simple_result + assert ret.failed + assert ret.errors == expected_result -@pytest.mark.skip("Undetected infinite loops prevents this test from running...") def test_requisites_mixed_require_prereq_use_2(state, state_tree): sls_contents = """ # Complex require/require_in/prereq/preqreq_in graph @@ -153,47 +159,21 @@ def test_requisites_mixed_require_prereq_use_2(state, state_tree): - require_in: - cmd: A """ - expected_result = { - "cmd_|-A_|-echo A fifth_|-run": { - "__run_num__": 4, - "comment": 'Command "echo A fifth" run', - "result": True, - "changes": True, - }, - "cmd_|-B_|-echo B third_|-run": { - "__run_num__": 2, - "comment": 'Command "echo B third" run', - "result": True, - "changes": True, - }, - "cmd_|-C_|-echo C second_|-run": { - "__run_num__": 1, - "comment": 'Command "echo C second" run', - "result": True, - "changes": True, - }, - "cmd_|-D_|-echo D first_|-run": { - "__run_num__": 0, - "comment": 'Command "echo D first" run', - "result": True, - "changes": True, - }, - "cmd_|-E_|-echo E fourth_|-run": { - "__run_num__": 3, - "comment": 'Command "echo E fourth" run', - "result": True, - "changes": True, - }, - } - # undetected infinite loops prevents this test from running... - # TODO: this is actually failing badly + expected_result = [ + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A fifth'}, 'require', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B third'}), " + "({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C second'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A fifth'}), " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B third'}, 'require', " + "{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C second'})" + ] with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - result = normalize_ret(ret.raw) - assert result == expected_result + assert ret.failed + assert ret.errors == expected_result -@pytest.mark.skip("Undetected infinite loops prevents this test from running...") def test_requisites_mixed_require_prereq_use_3(state, state_tree): # test Traceback recursion prereq+require #8785 sls_contents = """ @@ -217,15 +197,19 @@ def test_requisites_mixed_require_prereq_use_3(state, state_tree): - prereq: - cmd: A """ - expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"'] - # TODO: this is actually failing badly + expected_result = [ + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'require', " + "{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})" + ] with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert isinstance(ret, list) # Error - assert ret == expected_result + assert ret.failed + assert ret.errors == expected_result -@pytest.mark.skip("Undetected infinite loops prevents this test from running...") def test_requisites_mixed_require_prereq_use_4(state, state_tree): # test Infinite recursion prereq+require #8785 v2 sls_contents = """ @@ -260,15 +244,19 @@ def test_requisites_mixed_require_prereq_use_4(state, state_tree): - prereq: - cmd: B """ - expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"'] - # TODO: this is actually failing badly + expected_result = [ + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'}, 'require', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'C', 'NAME': 'echo C'})" + ] with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert isinstance(ret, list) # Error - assert ret == expected_result + assert ret.failed + assert ret.errors == expected_result -@pytest.mark.skip("Undetected infinite loops prevents this test from running...") def test_requisites_mixed_require_prereq_use_5(state, state_tree): # test Infinite recursion prereq+require #8785 v3 sls_contents = """ @@ -300,12 +288,17 @@ def test_requisites_mixed_require_prereq_use_5(state, state_tree): - require_in: - cmd: A """ - expected_result = ['A recursive requisite was found, SLS "requisite" ID "B" ID "A"'] - # TODO: this is actually failing badly, and expected result is maybe not a recursion + expected_result = [ + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}), " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, 'prereq', " + "{'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'})" + ] with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert isinstance(ret, list) # Error - assert ret == expected_result + assert ret.failed + assert ret.errors == expected_result def test_issue_46762_prereqs_on_a_state_with_unfulfilled_requirements( @@ -451,4 +444,31 @@ def test_requisites_mixed_illegal_req(state_tree): """ with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state_mod.sls("requisite") - assert ret == ["Illegal requisite \"['A']\", please check your syntax.\n"] + assert ret == [ + 'Illegal requisite "[\'A\']" in SLS "requisite", please check your syntax.\n' + ] + + +def test_many_requisites(state, state_tree): + """Test to make sure that many requisites does not take too long""" + + sls_name = "many_aggregates_test" + sls_contents = """ + {%- for i in range(1000) %} + nop-{{ i }}: + test.nop: + {%- if i > 0 %} + - require: + - test: nop-{{ i - 1 }} + {%- else %} + - require: [] + {%- endif %} + {%- endfor %} + """ + with pytest.helpers.temp_file(f"{sls_name}.sls", sls_contents, state_tree): + ret = state.sls(sls_name) + # Check the results + assert not ret.failed + for index, state_run in enumerate(ret): + expected_tag = f"test_|-nop-{index}_|-nop-{index}_|-nop" + assert expected_tag in state_run.raw diff --git a/tests/pytests/functional/modules/state/requisites/test_onchanges.py b/tests/pytests/functional/modules/state/requisites/test_onchanges.py index 5c3f0c3e390..c55792e7a3f 100644 --- a/tests/pytests/functional/modules/state/requisites/test_onchanges.py +++ b/tests/pytests/functional/modules/state/requisites/test_onchanges.py @@ -301,12 +301,23 @@ def test_onchanges_any_recursive_error_issues_50811(state, state_tree): test that onchanges_any does not causes a recursive error """ sls_contents = """ - command-test: - cmd.run: - - name: ls + unchanged_A: + test.succeed_without_changes + + unchanged_B: + test.succeed_without_changes + + prereq_on_test_on_changes_any: + test.succeed_with_changes: + - prereq: + - test_on_changes_any + + test_on_changes_any: + test.succeed_without_changes: - onchanges_any: - - file: /tmp/an-unfollowed-file + - unchanged_A + - unchanged_B """ with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert ret["command-test"].result is False + assert ret["prereq_on_test_on_changes_any"].result is True diff --git a/tests/pytests/functional/modules/state/requisites/test_prereq.py b/tests/pytests/functional/modules/state/requisites/test_prereq.py index 546df6f9909..a94d10c11d9 100644 --- a/tests/pytests/functional/modules/state/requisites/test_prereq.py +++ b/tests/pytests/functional/modules/state/requisites/test_prereq.py @@ -91,17 +91,13 @@ def test_requisites_prereq_simple_ordering_and_errors_1(state, state_tree): cmd.run: - name: echo C second - # will fail with "The following requisites were not found" + # will fail I: - cmd.run: + test.fail_without_changes: - name: echo I - - prereq: - - cmd: Z J: - cmd.run: + test.fail_without_changes: - name: echo J - - prereq: - - foobar: A """ expected_result = { "cmd_|-A_|-echo A third_|-run": { @@ -122,19 +118,15 @@ def test_requisites_prereq_simple_ordering_and_errors_1(state, state_tree): "result": True, "changes": True, }, - "cmd_|-I_|-echo I_|-run": { + "test_|-I_|-echo I_|-fail_without_changes": { "__run_num__": 3, - "comment": "The following requisites were not found:\n" - + " prereq:\n" - + " cmd: Z\n", + "comment": "Failure!", "result": False, "changes": False, }, - "cmd_|-J_|-echo J_|-run": { + "test_|-J_|-echo J_|-fail_without_changes": { "__run_num__": 4, - "comment": "The following requisites were not found:\n" - + " prereq:\n" - + " foobar: A\n", + "comment": "Failure!", "result": False, "changes": False, }, @@ -224,12 +216,10 @@ def test_requisites_prereq_simple_ordering_and_errors_3(state, state_tree): cmd.run: - name: echo C second - # will fail with "The following requisites were not found" + # will fail I: - cmd.run: + test.fail_without_changes: - name: echo I - - prereq: - - Z """ expected_result = { "cmd_|-A_|-echo A third_|-run": { @@ -250,11 +240,9 @@ def test_requisites_prereq_simple_ordering_and_errors_3(state, state_tree): "result": True, "changes": True, }, - "cmd_|-I_|-echo I_|-run": { + "test_|-I_|-echo I_|-fail_without_changes": { "__run_num__": 3, - "comment": "The following requisites were not found:\n" - + " prereq:\n" - + " id: Z\n", + "comment": "Failure!", "result": False, "changes": False, }, @@ -270,7 +258,7 @@ def test_requisites_prereq_simple_ordering_and_errors_4(state, state_tree): """ Call sls file containing several prereq_in and prereq. - Ensure that some of them are failing and that the order is right. + Ensure that the order is right. """ sls_contents = """ # Theory: @@ -427,13 +415,14 @@ def test_requisites_prereq_simple_ordering_and_errors_6(state, state_tree): sls_contents = """ # issue #8211 # expected rank - # B --+ 1 - # | - # C <-+ ----+ 2/3 - # | - # D ---+ | 3/2 - # | | - # A <--+ <--+ 4 + # + # D --+ -------+ 1 + # | + # B --+ | 2 + # | | + # C <-+ --+ | 3 + # | | + # A <-----+ <--+ 4 # # resulting rank # D --+ @@ -489,19 +478,19 @@ def test_requisites_prereq_simple_ordering_and_errors_6(state, state_tree): "changes": True, }, "cmd_|-B_|-echo B first_|-run": { - "__run_num__": 0, + "__run_num__": 1, "comment": 'Command "echo B first" run', "result": True, "changes": True, }, "cmd_|-C_|-echo C second_|-run": { - "__run_num__": 1, + "__run_num__": 2, "comment": 'Command "echo C second" run', "result": True, "changes": True, }, "cmd_|-D_|-echo D third_|-run": { - "__run_num__": 2, + "__run_num__": 0, "comment": 'Command "echo D third" run', "result": True, "changes": True, @@ -522,7 +511,7 @@ def test_requisites_prereq_simple_ordering_and_errors_7(state, state_tree): """ sls_contents = """ # will fail with 'Cannot extend ID Z (...) not part of the high state.' - # and not "The following requisites were not found" like in yaml list syntax + # and not "Referenced state does not exist for requisite" like in yaml list syntax I: cmd.run: - name: echo I @@ -530,13 +519,14 @@ def test_requisites_prereq_simple_ordering_and_errors_7(state, state_tree): - cmd: Z """ errmsg = ( - "The following requisites were not found:\n" - " prereq:\n" - " cmd: Z\n" + "Referenced state does not exist for requisite " + "[prereq: (cmd: Z)] in state " + "[echo I] in SLS [requisite]" ) with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert ret["cmd_|-I_|-echo I_|-run"].comment == errmsg + assert ret.failed + assert ret.errors == [errmsg] def test_requisites_prereq_simple_ordering_and_errors_8(state, state_tree): @@ -557,13 +547,14 @@ def test_requisites_prereq_simple_ordering_and_errors_8(state, state_tree): - foobar: A """ errmsg = ( - "The following requisites were not found:\n" - " prereq:\n" - " foobar: A\n" + "Referenced state does not exist for requisite " + "[prereq: (foobar: A)] in state " + "[echo B] in SLS [requisite]" ) with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert ret["cmd_|-B_|-echo B_|-run"].comment == errmsg + assert ret.failed + assert ret.errors == [errmsg] def test_requisites_prereq_simple_ordering_and_errors_9(state, state_tree): @@ -584,21 +575,52 @@ def test_requisites_prereq_simple_ordering_and_errors_9(state, state_tree): - foobar: C """ errmsg = ( - "The following requisites were not found:\n" - " prereq:\n" - " foobar: C\n" + "Referenced state does not exist for requisite " + "[prereq: (foobar: C)] in state " + "[echo B] in SLS [requisite]" ) with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - assert ret["cmd_|-B_|-echo B_|-run"].comment == errmsg + assert ret.failed + assert ret.errors == [errmsg] -@pytest.mark.skip("issue #8210 : prereq recursion undetected") def test_requisites_prereq_simple_ordering_and_errors_10(state, state_tree): """ - Call sls file containing several prereq_in and prereq. + Call sls file containing several prereq. - Ensure that some of them are failing and that the order is right. + Ensure a recursive requisite error occurs. + """ + sls_contents = """ + A: + cmd.run: + - name: echo A + - prereq: + - cmd: B + B: + cmd.run: + - name: echo B + - prereq: + - cmd: A + """ + errmsg = ( + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, " + "'prereq', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, " + "'prereq', {'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})" + ) + with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): + ret = state.sls("requisite") + assert ret.failed + assert ret.errors == [errmsg] + + +def test_requisites_prereq_in_simple_ordering_and_errors(state, state_tree): + """ + Call sls file containing several prereq_in. + + Ensure a recursive requisite error occurs. """ sls_contents = """ A: @@ -613,8 +635,11 @@ def test_requisites_prereq_simple_ordering_and_errors_10(state, state_tree): - cmd: A """ errmsg = ( - 'A recursive requisite was found, SLS "requisites.prereq_recursion_error" ID' - ' "B" ID "A"' + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, " + "'prereq', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, " + "'prereq', {'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})" ) with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") diff --git a/tests/pytests/functional/modules/state/requisites/test_require.py b/tests/pytests/functional/modules/state/requisites/test_require.py index 5c041630573..ce2a3fd0109 100644 --- a/tests/pytests/functional/modules/state/requisites/test_require.py +++ b/tests/pytests/functional/modules/state/requisites/test_require.py @@ -61,9 +61,9 @@ def test_requisites_full_sls_require(state, state_tree): def test_requisites_require_no_state_module(state, state_tree): """ - Call sls file containing several require_in and require. + Call sls file containing several require_in and require with a missing req. - Ensure that some of them are failing and that the order is right. + Ensure an error is given. """ sls_contents = """ # Complex require/require_in graph @@ -111,135 +111,42 @@ def test_requisites_require_no_state_module(state, state_tree): - require_in: - A - # will fail with "The following requisites were not found" + # will fail with "Referenced state does not exist for requisite" G: cmd.run: - name: echo G - require: - Z - # will fail with "The following requisites were not found" + # will fail with "Referenced state does not exist for requisite" H: cmd.run: - name: echo H - require: - Z """ - expected_result = { - "cmd_|-A_|-echo A fifth_|-run": { - "__run_num__": 4, - "comment": 'Command "echo A fifth" run', - "result": True, - "changes": True, - }, - "cmd_|-B_|-echo B second_|-run": { - "__run_num__": 1, - "comment": 'Command "echo B second" run', - "result": True, - "changes": True, - }, - "cmd_|-C_|-echo C third_|-run": { - "__run_num__": 2, - "comment": 'Command "echo C third" run', - "result": True, - "changes": True, - }, - "cmd_|-D_|-echo D first_|-run": { - "__run_num__": 0, - "comment": 'Command "echo D first" run', - "result": True, - "changes": True, - }, - "cmd_|-E_|-echo E fourth_|-run": { - "__run_num__": 3, - "comment": 'Command "echo E fourth" run', - "result": True, - "changes": True, - }, - "cmd_|-G_|-echo G_|-run": { - "__run_num__": 5, - "comment": "The following requisites were not found:\n" - + " require:\n" - + " id: Z\n", - "result": False, - "changes": False, - }, - "cmd_|-H_|-echo H_|-run": { - "__run_num__": 6, - "comment": "The following requisites were not found:\n" - + " require:\n" - + " id: Z\n", - "result": False, - "changes": False, - }, - } + errmsgs = [ + ( + "Referenced state does not exist for requisite [require: (id: Z)]" + " in state [echo G] in SLS [requisite]" + ), + ( + "Referenced state does not exist for requisite [require: (id: Z)]" + " in state [echo H] in SLS [requisite]" + ), + ] + with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - result = normalize_ret(ret.raw) - assert result == expected_result + assert ret.failed + assert ret.errors == errmsgs def test_requisites_require_ordering_and_errors_1(state, state_tree): """ Call sls file containing several require_in and require. - Ensure that some of them are failing and that the order is right. + Ensure there are errors due to requisites. """ - expected_result = { - "cmd_|-A_|-echo A fifth_|-run": { - "__run_num__": 4, - "comment": 'Command "echo A fifth" run', - "result": True, - "changes": True, - }, - "cmd_|-B_|-echo B second_|-run": { - "__run_num__": 1, - "comment": 'Command "echo B second" run', - "result": True, - "changes": True, - }, - "cmd_|-C_|-echo C third_|-run": { - "__run_num__": 2, - "comment": 'Command "echo C third" run', - "result": True, - "changes": True, - }, - "cmd_|-D_|-echo D first_|-run": { - "__run_num__": 0, - "comment": 'Command "echo D first" run', - "result": True, - "changes": True, - }, - "cmd_|-E_|-echo E fourth_|-run": { - "__run_num__": 3, - "comment": 'Command "echo E fourth" run', - "result": True, - "changes": True, - }, - "cmd_|-F_|-echo F_|-run": { - "__run_num__": 5, - "comment": "The following requisites were not found:\n" - + " require:\n" - + " foobar: A\n", - "result": False, - "changes": False, - }, - "cmd_|-G_|-echo G_|-run": { - "__run_num__": 6, - "comment": "The following requisites were not found:\n" - + " require:\n" - + " cmd: Z\n", - "result": False, - "changes": False, - }, - "cmd_|-H_|-echo H_|-run": { - "__run_num__": 7, - "comment": "The following requisites were not found:\n" - + " require:\n" - + " cmd: Z\n", - "result": False, - "changes": False, - }, - } sls_contents = """ # Complex require/require_in graph # @@ -286,29 +193,44 @@ def test_requisites_require_ordering_and_errors_1(state, state_tree): - require_in: - cmd: A - # will fail with "The following requisites were not found" + # will fail with "Referenced state does not exist for requisite" F: cmd.run: - name: echo F - require: - foobar: A - # will fail with "The following requisites were not found" + # will fail with "Referenced state does not exist for requisite" G: cmd.run: - name: echo G - require: - cmd: Z - # will fail with "The following requisites were not found" + # will fail with "Referenced state does not exist for requisite" H: cmd.run: - name: echo H - require: - cmd: Z """ + errmsgs = [ + ( + "Referenced state does not exist for requisite [require: (foobar: A)]" + " in state [echo F] in SLS [requisite]" + ), + ( + "Referenced state does not exist for requisite [require: (cmd: Z)]" + " in state [echo G] in SLS [requisite]" + ), + ( + "Referenced state does not exist for requisite [require: (cmd: Z)]" + " in state [echo H] in SLS [requisite]" + ), + ] + with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") - result = normalize_ret(ret.raw) - assert result == expected_result + assert ret.failed + assert ret.errors == errmsgs def test_requisites_require_ordering_and_errors_2(state, state_tree): @@ -425,11 +347,13 @@ def test_requisites_require_ordering_and_errors_5(state, state_tree): - require: - cmd: A """ - # issue #8235 - # FIXME: Why is require enforcing list syntax while require_in does not? - # And why preventing it? - # Currently this state fails, should return C/B/A - errmsg = 'A recursive requisite was found, SLS "requisite" ID "B" ID "A"' + errmsg = ( + "Recursive requisites were found: " + "({'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'}, " + "'require', {'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}), " + "({'SLS': 'requisite', 'ID': 'A', 'NAME': 'echo A'}, 'require', " + "{'SLS': 'requisite', 'ID': 'B', 'NAME': 'echo B'})" + ) with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): ret = state.sls("requisite") assert ret.failed @@ -438,9 +362,9 @@ def test_requisites_require_ordering_and_errors_5(state, state_tree): def test_requisites_require_any(state, state_tree): """ - Call sls file containing several require_in and require. + Call sls file containing require_any. - Ensure that some of them are failing and that the order is right. + Ensure that the order is right. """ sls_contents = """ # Complex require/require_in graph @@ -512,9 +436,9 @@ def test_requisites_require_any(state, state_tree): def test_requisites_require_any_fail(state, state_tree): """ - Call sls file containing several require_in and require. + Call sls file containing require_any. - Ensure that some of them are failing and that the order is right. + Ensure that the order is right. """ sls_contents = """ # D should fail since both E & F fail diff --git a/tests/pytests/functional/modules/state/test_state.py b/tests/pytests/functional/modules/state/test_state.py index 7497892e807..32b811be1dd 100644 --- a/tests/pytests/functional/modules/state/test_state.py +++ b/tests/pytests/functional/modules/state/test_state.py @@ -130,7 +130,11 @@ def test_catch_recurse(state, state_tree): ret = state.sls("recurse-fail") assert ret.failed assert ( - 'A recursive requisite was found, SLS "recurse-fail" ID "/etc/mysql/my.cnf" ID "mysql"' + "Recursive requisites were found: " + "({'SLS': 'recurse-fail', 'ID': '/etc/mysql/my.cnf'}, " + "'require', {'SLS': 'recurse-fail', 'ID': 'mysql'}), " + "({'SLS': 'recurse-fail', 'ID': 'mysql'}, " + "'require', {'SLS': 'recurse-fail', 'ID': '/etc/mysql/my.cnf'})" in ret.errors ) diff --git a/tests/pytests/functional/states/file/test_replace.py b/tests/pytests/functional/states/file/test_replace.py index 8ef1003ddd4..e644e14d176 100644 --- a/tests/pytests/functional/states/file/test_replace.py +++ b/tests/pytests/functional/states/file/test_replace.py @@ -367,11 +367,10 @@ def test_file_replace_prerequired_issues_55775(modules, state_tree, tmp_path): test changes: test.succeed_with_changes: - name: changes - - require: - - test: test no changes """ with pytest.helpers.temp_file("file-replace.sls", sls_contents, state_tree): ret = modules.state.sls("file-replace") + assert not ret.failed for state_run in ret: assert state_run.result is True diff --git a/tests/pytests/unit/modules/state/test_state.py b/tests/pytests/unit/modules/state/test_state.py index 50f58fe487d..c8fbafb6c1b 100644 --- a/tests/pytests/unit/modules/state/test_state.py +++ b/tests/pytests/unit/modules/state/test_state.py @@ -91,16 +91,16 @@ class MockState: Mock verify_high method """ if self.flag: - return True + return ["verify_high_error"] else: - return -1 + return [] @staticmethod def compile_high_data(data): """ Mock compile_high_data """ - return [{"__id__": "ABC"}] + return [{"__id__": "ABC"}], [] @staticmethod def call_chunk(data, data1, data2): @@ -123,6 +123,9 @@ class MockState: """ return True + def order_chunks(self, data): + return data, [] + def requisite_in(self, data): # pylint: disable=unused-argument return data, [] @@ -143,9 +146,9 @@ class MockState: Mock render_state method """ if self.flag: - return {}, True + return {}, ["render_state_error"] else: - return {}, False + return {}, [] @staticmethod def get_top(): @@ -210,9 +213,9 @@ class MockState: Mock render_highstate method """ if self.flag: - return ["a", "b"], True + return ["a", "b"], ["render_highstate_error"] else: - return ["a", "b"], False + return ["a", "b"], [] @staticmethod def call_highstate( @@ -612,9 +615,13 @@ def test_sls_id(): with patch.object(salt.utils.args, "test_mode", mock): MockState.State.flag = True MockState.HighState.flag = True - assert state.sls_id("apache", "http") == 2 + assert state.sls_id("apache", "http") == [ + "render_highstate_error", + "verify_high_error", + ] MockState.State.flag = False + MockState.HighState.flag = False assert state.sls_id("ABC", "http") == {"": "ABC"} pytest.raises(SaltInvocationError, state.sls_id, "DEF", "http") @@ -632,9 +639,13 @@ def test_show_low_sls(): with patch.object(salt.utils.state, "get_sls_opts", mock): MockState.State.flag = True MockState.HighState.flag = True - assert state.show_low_sls("foo") == 2 + assert state.show_low_sls("foo") == [ + "render_highstate_error", + "verify_high_error", + ] MockState.State.flag = False + MockState.HighState.flag = False assert state.show_low_sls("foo") == [{"__id__": "ABC"}] @@ -656,7 +667,7 @@ def test_show_sls(): ) MockState.State.flag = True - assert state.show_sls("foo") == 2 + assert state.show_sls("foo") == ["verify_high_error"] MockState.State.flag = False assert state.show_sls("foo") == ["a", "b"] diff --git a/tests/pytests/unit/modules/test_chroot.py b/tests/pytests/unit/modules/test_chroot.py index b1e9beb674e..6e6beea73ac 100644 --- a/tests/pytests/unit/modules/test_chroot.py +++ b/tests/pytests/unit/modules/test_chroot.py @@ -289,6 +289,7 @@ def test_sls(): ) as _create_and_execute_salt_state: SSHHighState.return_value = SSHHighState SSHHighState.render_highstate.return_value = (None, []) + SSHHighState.state.compile_high_data.return_value = ([], []) SSHHighState.state.reconcile_extend.return_value = (None, []) SSHHighState.state.requisite_in.return_value = (None, []) SSHHighState.state.verify_high.return_value = [] diff --git a/tests/pytests/unit/state/test_reactor_compiler.py b/tests/pytests/unit/state/test_reactor_compiler.py index d0f03fbfdb7..e1902d7c5b0 100644 --- a/tests/pytests/unit/state/test_reactor_compiler.py +++ b/tests/pytests/unit/state/test_reactor_compiler.py @@ -166,7 +166,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path): } }, [ - "ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a int. It may need to be quoted" + "ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type int. It may need to be quoted." ], ), ( @@ -177,7 +177,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path): } }, [ - "ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bytes. It may need to be quoted" + "ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type bytes. It may need to be quoted." ], ), ( @@ -188,7 +188,7 @@ def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path): } }, [ - "ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bool. It may need to be quoted" + "ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is type bool. It may need to be quoted." ], ), ( @@ -463,7 +463,7 @@ def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp): ), }, [ - "Requisite declaration ('local', 'add_test_1') in SLS /srv/reactor/start.sls is not formed as a single key dictionary" + "Requisite declaration ('local', 'add_test_1') in state add_test_2 in SLS /srv/reactor/start.sls is not formed as a single key dictionary" ], ), ( @@ -529,69 +529,8 @@ def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp): ] ), }, - ["Illegal requisite \"['add_test_1']\", is SLS /srv/reactor/start.sls\n"], - ), - ( - { - "add_test_1": OrderedDict( - [ - ( - "local", - [ - OrderedDict([("tgt", "poc-minion")]), - OrderedDict( - [ - ( - "args", - [ - OrderedDict( - [("cmd", "touch /tmp/test1")] - ) - ], - ) - ] - ), - "cmd.run", - ], - ), - ("__sls__", "/srv/reactor/start.sls"), - ] - ), - "add_test_2": OrderedDict( - [ - ( - "local", - [ - OrderedDict([("tgt", "poc-minion")]), - OrderedDict( - [ - ( - "args", - [ - OrderedDict( - [("cmd", "touch /tmp/test2")] - ) - ], - ) - ] - ), - OrderedDict( - [ - ( - "require", - [OrderedDict([("local", "add_test_2")])], - ) - ] - ), - "cmd.run", - ], - ), - ("__sls__", "/srv/reactor/start.sls"), - ] - ), - }, [ - 'A recursive requisite was found, SLS "/srv/reactor/start.sls" ID "add_test_2" ID "add_test_2"' + 'Illegal requisite "[\'add_test_1\']" in SLS "/srv/reactor/start.sls", please check your syntax.\n' ], ), ( diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index 4a546ce1dfe..fec7fb31f10 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -3,6 +3,7 @@ """ import logging +from typing import Any import pytest @@ -54,7 +55,7 @@ def test_render_error_on_invalid_requisite(minion_opts): exception when a requisite cannot be resolved """ with patch("salt.state.State._gather_pillar"): - high_data = { + high_data: dict[str, Any] = { "git": salt.state.HashableOrderedDict( [ ( @@ -88,12 +89,16 @@ def test_render_error_on_invalid_requisite(minion_opts): ] ) } + expected_result = [ + "Requisite [require: file] in state [git] in SLS" + " [issue_35226] must have a string as the value" + ] minion_opts["pillar"] = { "git": salt.state.HashableOrderedDict([("test1", "test")]) } state_obj = salt.state.State(minion_opts) - with pytest.raises(salt.exceptions.SaltRenderError): - state_obj.call_high(high_data) + return_result = state_obj.call_high(high_data) + assert expected_result == return_result def test_verify_onlyif_parse(minion_opts): @@ -756,6 +761,7 @@ def test_render_requisite_require_disabled(minion_opts): minion_opts["disabled_requisites"] = ["require"] state_obj = salt.state.State(minion_opts) ret = state_obj.call_high(high_data) + assert isinstance(ret, dict) run_num = ret["test_|-step_one_|-step_one_|-succeed_with_changes"][ "__run_num__" ] @@ -804,6 +810,7 @@ def test_render_requisite_require_in_disabled(minion_opts): minion_opts["disabled_requisites"] = ["require_in"] state_obj = salt.state.State(minion_opts) ret = state_obj.call_high(high_data) + assert isinstance(ret, dict) run_num = ret["test_|-step_one_|-step_one_|-succeed_with_changes"][ "__run_num__" ] @@ -846,7 +853,7 @@ def test_call_chunk_sub_state_run(minion_opts): with patch("salt.state.State.call", return_value=mock_call_return): minion_opts["disabled_requisites"] = ["require"] state_obj = salt.state.State(minion_opts) - ret = state_obj.call_chunk(low_data, {}, {}) + ret = state_obj.call_chunk(low_data, {}, []) sub_state = ret.get(expected_sub_state_tag) assert sub_state assert sub_state["__run_num__"] == 1 @@ -855,128 +862,6 @@ def test_call_chunk_sub_state_run(minion_opts): assert sub_state["__sls__"] == "external" -def test_aggregate_requisites(minion_opts): - """ - Test to ensure that the requisites are included in the aggregated low state. - """ - # The low that is returned from _mod_aggregrate - low = { - "state": "pkg", - "name": "other_pkgs", - "__sls__": "47628", - "__env__": "base", - "__id__": "other_pkgs", - "pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"], - "aggregate": True, - "order": 10002, - "fun": "installed", - "__agg__": True, - } - - # Chunks that have been processed through the pkg mod_aggregate function - chunks = [ - { - "state": "file", - "name": "/tmp/install-vim", - "__sls__": "47628", - "__env__": "base", - "__id__": "/tmp/install-vim", - "order": 10000, - "fun": "managed", - }, - { - "state": "file", - "name": "/tmp/install-tmux", - "__sls__": "47628", - "__env__": "base", - "__id__": "/tmp/install-tmux", - "order": 10001, - "fun": "managed", - }, - { - "state": "pkg", - "name": "other_pkgs", - "__sls__": "47628", - "__env __": "base", - "__id__": "other_pkgs", - "pkgs": ["byobu"], - "aggregate": True, - "order": 10002, - "fun": "installed", - }, - { - "state": "pkg", - "name": "bc", - "__sls__": "47628", - "__env__": "base", - "__id__": "bc", - "hold": True, - "__agg__": True, - "order": 10003, - "fun": "installed", - }, - { - "state": "pkg", - "name": "vim", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "vim", - "require": ["/tmp/install-vim"], - "order": 10004, - "fun": "installed", - }, - { - "state": "pkg", - "name": "tmux", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "tmux", - "require": ["/tmp/install-tmux"], - "order": 10005, - "fun": "installed", - }, - { - "state": "pkgrepo", - "name": "deb https://packages.cloud.google.com/apt cloud-sdk main", - "__sls__": "47628", - "__env__": "base", - "__id__": "google-cloud-repo", - "humanname": "Google Cloud SDK", - "file": "/etc/apt/sources.list.d/google-cloud-sdk.list", - "key_url": "https://packages.cloud.google.com/apt/doc/apt-key.gpg", - "order": 10006, - "fun": "managed", - }, - { - "state": "pkg", - "name": "google-cloud-sdk", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "google-cloud-sdk", - "require": ["google-cloud-repo"], - "order": 10007, - "fun": "installed", - }, - ] - - with patch("salt.state.State._gather_pillar"): - state_obj = salt.state.State(minion_opts) - low_ret = state_obj._aggregate_requisites(low, chunks) - - # Ensure the low returned contains require - assert "require" in low_ret - - # Ensure all the requires from pkg states are in low - assert low_ret["require"] == [ - "/tmp/install-vim", - "/tmp/install-tmux", - "google-cloud-repo", - ] - - def test_mod_aggregate(minion_opts): """ Test to ensure that the requisites are included in the aggregated low state. @@ -1030,6 +915,16 @@ def test_mod_aggregate(minion_opts): "aggregate": True, "fun": "installed", }, + { + "state": "pkg", + "name": "hello", + "__sls__": "test.62439", + "__env__": "base", + "__id__": "hello", + "order": 10003, + "aggregate": True, + "fun": "installed", + }, ] running = {} @@ -1044,7 +939,7 @@ def test_mod_aggregate(minion_opts): "order": 10002, "fun": "installed", "__agg__": True, - "pkgs": ["figlet", "sl"], + "pkgs": ["sl", "hello"], } with patch("salt.state.State._gather_pillar"): @@ -1053,7 +948,8 @@ def test_mod_aggregate(minion_opts): state_obj.states, {"pkg.mod_aggregate": MagicMock(return_value=mock_pkg_mod_aggregate)}, ): - low_ret = state_obj._mod_aggregate(low, running, chunks) + state_obj.order_chunks(chunks) + low_ret = state_obj._mod_aggregate(low, running) # Ensure the low returned contains require assert "require_in" in low_ret @@ -1068,7 +964,7 @@ def test_mod_aggregate(minion_opts): assert "require" in low_ret # Ensure pkgs were aggregated - assert low_ret["pkgs"] == ["figlet", "sl"] + assert low_ret["pkgs"] == ["sl", "hello"] def test_mod_aggregate_order(minion_opts): diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py index a7b6b048f88..e494047f535 100644 --- a/tests/pytests/unit/states/test_pkg.py +++ b/tests/pytests/unit/states/test_pkg.py @@ -545,7 +545,7 @@ def test_mod_aggregate(): } expected = { - "pkgs": ["byobu", "byobu", "vim", "tmux", "google-cloud-sdk"], + "pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"], "name": "other_pkgs", "fun": "installed", "aggregate": True, diff --git a/tests/pytests/unit/utils/requisite/test_dependency_graph.py b/tests/pytests/unit/utils/requisite/test_dependency_graph.py new file mode 100644 index 00000000000..dc4bbbc1d0f --- /dev/null +++ b/tests/pytests/unit/utils/requisite/test_dependency_graph.py @@ -0,0 +1,485 @@ +""" +Test functions in state.py that are not a part of a class +""" + +import pytest + +import salt.utils.requisite + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_ordering(): + """ + Testing that ordering chunks results in the expected order honoring + requistes and order + """ + sls = "test" + env = "base" + chunks = [ + { + "__id__": "success-6", + "name": "success-6", + "state": "test", + "fun": "succeed_with_changes", + }, + { + "__id__": "fail-0", + "name": "fail-0", + "state": "test", + "fun": "fail_without_changes", + }, + { + "__id__": "fail-1", + "name": "fail-1", + "state": "test", + "fun": "fail_without_changes", + }, + { + "__id__": "req-fails", + "name": "req-fails", + "state": "test", + "fun": "succeed_with_changes", + "require": ["fail-0", "fail-1"], + }, + { + "__id__": "success-4", + "name": "success-4", + "state": "test", + "fun": "succeed_with_changes", + "order": 4, + }, + { + "__id__": "success-1", + "name": "success-1", + "state": "test", + "fun": "succeed_without_changes", + "order": 1, + }, + { + "__id__": "success-2", + "name": "success-2", + "state": "test", + "fun": "succeed_without_changes", + "order": 2, + }, + { + "__id__": "success-d", + "name": "success-d", + "state": "test", + "fun": "succeed_without_changes", + }, + { + "__id__": "success-c", + "name": "success-c", + "state": "test", + "fun": "succeed_without_changes", + }, + { + "__id__": "success-b", + "name": "success-b", + "state": "test", + "fun": "succeed_without_changes", + }, + { + "__id__": "success-a", + "name": "success-a", + "state": "test", + "fun": "succeed_without_changes", + }, + { + "__id__": "success-3", + "name": "success-3", + "state": "test", + "fun": "succeed_without_changes", + "order": 3, + "require": [{"test": "success-a"}], + "watch": [{"test": "success-c"}], + "onchanges": [{"test": "success-b"}], + "listen": [{"test": "success-d"}], + }, + { + "__id__": "success-5", + "name": "success-5", + "state": "test", + "fun": "succeed_without_changes", + "listen": [{"test": "success-6"}], + }, + ] + depend_graph = salt.utils.requisite.DependencyGraph() + for low in chunks: + low.update( + { + "__env__": env, + "__sls__": sls, + } + ) + depend_graph.add_chunk(low, allow_aggregate=False) + for low in chunks: + depend_graph.add_requisites(low, []) + ordered_chunk_ids = [ + chunk["__id__"] for chunk in depend_graph.aggregate_and_order_chunks(100) + ] + expected_order = [ + "success-1", + "success-2", + "success-a", + "success-b", + "success-c", + "success-3", + "success-4", + "fail-0", + "fail-1", + "req-fails", + "success-5", + "success-6", + "success-d", + ] + assert expected_order == ordered_chunk_ids + + +def test_find_cycle_edges(): + sls = "test" + env = "base" + chunks = [ + { + "__id__": "state-1", + "name": "state-1", + "state": "test", + "fun": "succeed_with_changes", + "require": [{"test": "state-2"}], + }, + { + "__id__": "state-2", + "name": "state-2", + "state": "test", + "fun": "succeed_with_changes", + "require": [{"test": "state-3"}], + }, + { + "__id__": "state-3", + "name": "state-3", + "state": "test", + "fun": "succeed_with_changes", + "require": [{"test": "state-1"}], + }, + ] + depend_graph = salt.utils.requisite.DependencyGraph() + for low in chunks: + low.update( + { + "__env__": env, + "__sls__": sls, + } + ) + depend_graph.add_chunk(low, allow_aggregate=False) + for low in chunks: + depend_graph.add_requisites(low, []) + expected_cycle_edges = [ + ( + { + "__env__": "base", + "__id__": "state-3", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-3", + "require": [{"test": "state-1"}], + "state": "test", + }, + "require", + { + "__env__": "base", + "__id__": "state-1", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-1", + "require": [{"test": "state-2"}], + "state": "test", + }, + ), + ( + { + "__env__": "base", + "__id__": "state-2", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-2", + "require": [{"test": "state-3"}], + "state": "test", + }, + "require", + { + "__env__": "base", + "__id__": "state-3", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-3", + "require": [{"test": "state-1"}], + "state": "test", + }, + ), + ( + { + "__env__": "base", + "__id__": "state-1", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-1", + "require": [{"test": "state-2"}], + "state": "test", + }, + "require", + { + "__env__": "base", + "__id__": "state-2", + "__sls__": "test", + "fun": "succeed_with_changes", + "name": "state-2", + "require": [{"test": "state-3"}], + "state": "test", + }, + ), + ] + cycle_edges = depend_graph.find_cycle_edges() + assert expected_cycle_edges == cycle_edges + + +def test_get_aggregate_chunks(): + sls = "test" + env = "base" + chunks = [ + { + "__id__": "packages-1", + "name": "packages-1", + "state": "pkg", + "fun": "installed", + "pkgs": ["hello"], + }, + { + "__id__": "packages-2", + "name": "packages-2", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay", "fortune-mod"], + "require": ["requirement"], + }, + { + "__id__": "packages-3", + "name": "packages-3", + "state": "pkg", + "fun": "installed", + "pkgs": ["figlet"], + "require": ["requirement"], + }, + { + "__id__": "requirement", + "name": "requirement", + "state": "test", + "fun": "nop", + }, + { + "__id__": "packages-4", + "name": "packages-4", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay"], + }, + { + "__id__": "packages-5", + "name": "packages-5", + "state": "pkg", + "fun": "installed", + "pkgs": ["sl"], + "require": ["packages-4"], + }, + ] + depend_graph = salt.utils.requisite.DependencyGraph() + for low in chunks: + low.update( + { + "__env__": env, + "__sls__": sls, + } + ) + depend_graph.add_chunk(low, allow_aggregate=True) + for low in chunks: + depend_graph.add_requisites(low, []) + depend_graph.aggregate_and_order_chunks(100) + expected_aggregates = [ + (chunks[0], ["packages-1", "packages-4", "packages-2", "packages-3"]), + (chunks[1], ["packages-1", "packages-4", "packages-2", "packages-3"]), + (chunks[2], ["packages-1", "packages-4", "packages-2", "packages-3"]), + (chunks[3], []), + (chunks[4], ["packages-1", "packages-4", "packages-2", "packages-3"]), + (chunks[5], []), + ] + for low, expected_aggregate_ids in expected_aggregates: + aggregated_ids = [ + chunk["__id__"] for chunk in depend_graph.get_aggregate_chunks(low) + ] + assert expected_aggregate_ids == aggregated_ids + + +def test_get_dependencies(): + sls = "test" + env = "base" + chunks = [ + { + "__id__": "packages-1", + "name": "packages-1", + "state": "pkg", + "fun": "installed", + "pkgs": ["hello"], + }, + { + "__id__": "packages-2", + "name": "packages-2", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay", "fortune-mod"], + "require": ["requirement"], + }, + { + "__id__": "packages-3", + "name": "packages-3", + "state": "pkg", + "fun": "installed", + "pkgs": ["figlet"], + "require": ["requirement"], + }, + { + "__id__": "requirement", + "name": "requirement", + "state": "test", + "fun": "nop", + }, + { + "__id__": "packages-4", + "name": "packages-4", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay"], + }, + { + "__id__": "packages-5", + "name": "packages-5", + "state": "pkg", + "fun": "installed", + "pkgs": ["sl"], + "require": ["packages-4"], + }, + ] + depend_graph = salt.utils.requisite.DependencyGraph() + for low in chunks: + low.update( + { + "__env__": env, + "__sls__": sls, + } + ) + depend_graph.add_chunk(low, allow_aggregate=False) + for low in chunks: + depend_graph.add_requisites(low, []) + depend_graph.aggregate_and_order_chunks(100) + expected_aggregates = [ + (chunks[0], []), + (chunks[1], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]), + (chunks[2], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]), + (chunks[3], []), + (chunks[4], []), + (chunks[5], [(salt.utils.requisite.RequisiteType.REQUIRE, "packages-4")]), + ] + for low, expected_dependency_tuples in expected_aggregates: + depend_tuples = [ + (req_type, chunk["__id__"]) + for (req_type, chunk) in depend_graph.get_dependencies(low) + ] + assert expected_dependency_tuples == depend_tuples + + +def test_get_dependencies_when_aggregated(): + sls = "test" + env = "base" + chunks = [ + { + "__id__": "packages-1", + "name": "packages-1", + "state": "pkg", + "fun": "installed", + "pkgs": ["hello"], + }, + { + "__id__": "packages-2", + "name": "packages-2", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay", "fortune-mod"], + "require": ["requirement"], + }, + { + "__id__": "packages-3", + "name": "packages-3", + "state": "pkg", + "fun": "installed", + "pkgs": ["figlet"], + "require": ["requirement"], + }, + { + "__id__": "requirement", + "name": "requirement", + "state": "test", + "fun": "nop", + }, + { + "__id__": "packages-4", + "name": "packages-4", + "state": "pkg", + "fun": "installed", + "pkgs": ["cowsay"], + }, + { + "__id__": "packages-5", + "name": "packages-5", + "state": "pkg", + "fun": "installed", + "pkgs": ["sl"], + "require": ["packages-4"], + }, + ] + depend_graph = salt.utils.requisite.DependencyGraph() + for low in chunks: + low.update( + { + "__env__": env, + "__sls__": sls, + } + ) + depend_graph.add_chunk(low, allow_aggregate=True) + for low in chunks: + depend_graph.add_requisites(low, []) + depend_graph.aggregate_and_order_chunks(100) + expected_aggregates = [ + (chunks[0], []), + (chunks[1], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]), + (chunks[2], [(salt.utils.requisite.RequisiteType.REQUIRE, "requirement")]), + (chunks[3], []), + (chunks[4], []), + ( + chunks[5], + [ + (salt.utils.requisite.RequisiteType.REQUIRE, "packages-4"), + (salt.utils.requisite.RequisiteType.REQUIRE, "packages-1"), + (salt.utils.requisite.RequisiteType.REQUIRE, "packages-4"), + (salt.utils.requisite.RequisiteType.REQUIRE, "packages-2"), + (salt.utils.requisite.RequisiteType.REQUIRE, "packages-3"), + ], + ), + ] + for low, expected_dependency_tuples in expected_aggregates: + depend_tuples = [ + (req_type, chunk["__id__"]) + for (req_type, chunk) in depend_graph.get_dependencies(low) + ] + assert expected_dependency_tuples == depend_tuples diff --git a/tests/pytests/unit/utils/test_http.py b/tests/pytests/unit/utils/test_http.py index 492086051fd..e62a9c02bbf 100644 --- a/tests/pytests/unit/utils/test_http.py +++ b/tests/pytests/unit/utils/test_http.py @@ -165,7 +165,8 @@ def test_query_error_handling(): ret = http.query("http://127.0.0.1:0") assert isinstance(ret, dict) assert isinstance(ret.get("error", None), str) - ret = http.query("http://myfoobardomainthatnotexist") + # use RFC6761 invalid domain that does not exist + ret = http.query("http://myfoobardomainthatnotexist.invalid") assert isinstance(ret, dict) assert isinstance(ret.get("error", None), str) diff --git a/tests/pytests/unit/utils/test_thin.py b/tests/pytests/unit/utils/test_thin.py index 8398dfea02d..dcb75322860 100644 --- a/tests/pytests/unit/utils/test_thin.py +++ b/tests/pytests/unit/utils/test_thin.py @@ -58,7 +58,6 @@ def test_get_ext_tops(version): python3 = False if tuple(version) >= (3, 0): python3 = True - cfg = { "namespace": { "path": "/foo", @@ -68,6 +67,7 @@ def test_get_ext_tops(version): "yaml": "/yaml/", "tornado": "/tornado/tornado.py", "msgpack": "msgpack.py", + "networkx": "/networkx/networkx.py", }, } } diff --git a/tests/unit/utils/test_thin.py b/tests/unit/utils/test_thin.py index 23e5939c117..f2369c70b00 100644 --- a/tests/unit/utils/test_thin.py +++ b/tests/unit/utils/test_thin.py @@ -39,6 +39,15 @@ def patch_if(condition, *args, **kwargs): return inner +class FakeSaltSystemExit(Exception): + """ + Fake SaltSystemExit so the process does not actually die + """ + + def __init__(self, code=-1, msg=None): + super().__init__(msg or code) + + class SSHThinTestCase(TestCase): """ TestCase for SaltSSH-related parts. @@ -69,6 +78,7 @@ class SSHThinTestCase(TestCase): "yaml": os.path.join(lib_root, "yaml"), "tornado": os.path.join(lib_root, "tornado"), "msgpack": os.path.join(lib_root, "msgpack"), + "networkx": os.path.join(lib_root, "networkx"), } code_dir = pathlib.Path(RUNTIME_VARS.CODE_DIR).resolve() @@ -78,6 +88,7 @@ class SSHThinTestCase(TestCase): "yaml": str(code_dir / "yaml"), "tornado": str(code_dir / "tornado"), "msgpack": str(code_dir / "msgpack"), + "networkx": str(code_dir / "networkx"), "certifi": str(code_dir / "certifi"), "singledispatch": str(code_dir / "singledispatch.py"), "looseversion": str(code_dir / "looseversion.py"), @@ -164,7 +175,7 @@ class SSHThinTestCase(TestCase): self.assertIn("Missing dependencies", thin.log.error.call_args[0][0]) self.assertIn("jinja2, yaml, tornado, msgpack", thin.log.error.call_args[0][0]) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False)) def test_get_ext_tops_cfg_missing_interpreter(self): @@ -178,7 +189,7 @@ class SSHThinTestCase(TestCase): thin.get_ext_tops(cfg) self.assertIn("missing specific locked Python version", str(err.value)) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False)) def test_get_ext_tops_cfg_wrong_interpreter(self): @@ -196,7 +207,7 @@ class SSHThinTestCase(TestCase): str(err.value), ) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=False)) def test_get_ext_tops_cfg_interpreter(self): @@ -271,7 +282,7 @@ class SSHThinTestCase(TestCase): "configured with not a file or does not exist", messages["jinja2"] ) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.path.isfile", MagicMock(return_value=True)) def test_get_ext_tops_config_pass(self): @@ -289,6 +300,7 @@ class SSHThinTestCase(TestCase): "yaml": "/yaml/", "tornado": "/tornado/tornado.py", "msgpack": "msgpack.py", + "networkx": "/networkx/networkx.py", "distro": "distro.py", }, } @@ -302,6 +314,7 @@ class SSHThinTestCase(TestCase): "/jinja/foo.py", "/yaml/", "msgpack.py", + "/networkx/networkx.py", "distro.py", ] ) @@ -407,6 +420,10 @@ class SSHThinTestCase(TestCase): "salt.utils.thin.msgpack", type("msgpack", (), {"__file__": "/site-packages/msgpack"}), ) + @patch( + "salt.utils.thin.networkx", + type("networkx", (), {"__file__": "/site-packages/networkx"}), + ) @patch( "salt.utils.thin.certifi", type("certifi", (), {"__file__": "/site-packages/certifi"}), @@ -465,6 +482,7 @@ class SSHThinTestCase(TestCase): "yaml", "tornado", "msgpack", + "networkx", "certifi", "sdp", "sdp_hlp", @@ -512,6 +530,10 @@ class SSHThinTestCase(TestCase): "salt.utils.thin.msgpack", type("msgpack", (), {"__file__": "/site-packages/msgpack"}), ) + @patch( + "salt.utils.thin.networkx", + type("networkx", (), {"__file__": "/site-packages/networkx"}), + ) @patch( "salt.utils.thin.certifi", type("certifi", (), {"__file__": "/site-packages/certifi"}), @@ -570,6 +592,7 @@ class SSHThinTestCase(TestCase): "yaml", "tornado", "msgpack", + "networkx", "certifi", "sdp", "sdp_hlp", @@ -627,6 +650,10 @@ class SSHThinTestCase(TestCase): "salt.utils.thin.msgpack", type("msgpack", (), {"__file__": "/site-packages/msgpack"}), ) + @patch( + "salt.utils.thin.networkx", + type("networkx", (), {"__file__": "/site-packages/networkx"}), + ) @patch( "salt.utils.thin.certifi", type("certifi", (), {"__file__": "/site-packages/certifi"}), @@ -685,6 +712,7 @@ class SSHThinTestCase(TestCase): "yaml", "tornado", "msgpack", + "networkx", "certifi", "sdp", "sdp_hlp", @@ -754,7 +782,7 @@ class SSHThinTestCase(TestCase): assert form == "sha256" @patch("salt.utils.thin.sys.version_info", (2, 5)) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) def test_gen_thin_fails_ancient_python_version(self): """ Test thin.gen_thin function raises an exception @@ -770,7 +798,7 @@ class SSHThinTestCase(TestCase): str(err.value), ) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.makedirs", MagicMock()) @patch("salt.utils.files.fopen", MagicMock()) @@ -826,7 +854,7 @@ class SSHThinTestCase(TestCase): thin.zipfile.ZipFile.assert_not_called() thin.tarfile.open.assert_called() - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.makedirs", MagicMock()) @patch("salt.utils.files.fopen", MagicMock()) @@ -880,7 +908,7 @@ class SSHThinTestCase(TestCase): self.assertEqual(name, fname) thin.tarfile.open().close.assert_called() - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.makedirs", MagicMock()) @patch("salt.utils.files.fopen", MagicMock()) @@ -948,7 +976,7 @@ class SSHThinTestCase(TestCase): files.pop(files.index(arcname)) self.assertFalse(files) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.makedirs", MagicMock()) @patch("salt.utils.files.fopen", MagicMock()) @@ -1076,7 +1104,7 @@ class SSHThinTestCase(TestCase): for t_line in ["second-system-effect:2:7", "solar-interference:2:6"]: self.assertIn(t_line, out) - @patch("salt.exceptions.SaltSystemExit", Exception) + @patch("salt.exceptions.SaltSystemExit", FakeSaltSystemExit) @patch("salt.utils.thin.log", MagicMock()) @patch("salt.utils.thin.os.makedirs", MagicMock()) @patch("salt.utils.files.fopen", MagicMock()) @@ -1148,6 +1176,7 @@ class SSHThinTestCase(TestCase): (bts("yaml/__init__.py"), bts("")), (bts("tornado/__init__.py"), bts("")), (bts("msgpack/__init__.py"), bts("")), + (bts("networkx/__init__.py"), bts("")), (bts("certifi/__init__.py"), bts("")), (bts("singledispatch.py"), bts("")), (bts(""), bts("")), @@ -1190,6 +1219,7 @@ class SSHThinTestCase(TestCase): side_effect=[ (bts("tornado/__init__.py"), bts("")), (bts("msgpack/__init__.py"), bts("")), + (bts("networkx/__init__.py"), bts("")), (bts("certifi/__init__.py"), bts("")), (bts("singledispatch.py"), bts("")), (bts(""), bts("")), @@ -1235,6 +1265,7 @@ class SSHThinTestCase(TestCase): (bts(self.fake_libs["yaml"]), bts("")), (bts(self.fake_libs["tornado"]), bts("")), (bts(self.fake_libs["msgpack"]), bts("")), + (bts(self.fake_libs["networkx"]), bts("")), (bts(""), bts("")), (bts(""), bts("")), (bts(""), bts("")), @@ -1263,6 +1294,7 @@ class SSHThinTestCase(TestCase): os.path.join("yaml", "__init__.py"), os.path.join("tornado", "__init__.py"), os.path.join("msgpack", "__init__.py"), + os.path.join("networkx", "__init__.py"), ] ) @@ -1363,6 +1395,7 @@ class SSHThinTestCase(TestCase): os.path.join("yaml", "__init__.py"), os.path.join("tornado", "__init__.py"), os.path.join("msgpack", "__init__.py"), + os.path.join("networkx", "__init__.py"), ] ) with patch_tops_py: